From c1ddbf045d422ddb9a247b37e8d6abeacbe5c536 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Sun, 9 Jun 2024 11:29:16 +0200
Subject: [PATCH 01/34] refeactors for ndtiff v3. Made RAM datasets work better
---
pycromanager/acquisition/RAMStorage.py | 138 ------------------
pycromanager/acquisition/RAMStorage_java.py | 69 +++++++++
.../acq_eng_py/main/AcqEngPy_Acquisition.py | 24 ++-
pycromanager/acquisition/java_RAMStorage.py | 135 -----------------
.../acquisition/java_backend_acquisitions.py | 6 +-
.../python_backend_acquisitions.py | 21 ++-
pycromanager/napari_util.py | 28 ++--
pycromanager/test/conftest.py | 6 +-
pycromanager/test/test_acquisition.py | 1 -
pycromanager/test/test_callback_functions.py | 5 +-
requirements.txt | 2 +-
11 files changed, 113 insertions(+), 322 deletions(-)
delete mode 100644 pycromanager/acquisition/RAMStorage.py
create mode 100644 pycromanager/acquisition/RAMStorage_java.py
delete mode 100644 pycromanager/acquisition/java_RAMStorage.py
diff --git a/pycromanager/acquisition/RAMStorage.py b/pycromanager/acquisition/RAMStorage.py
deleted file mode 100644
index 25b21967..00000000
--- a/pycromanager/acquisition/RAMStorage.py
+++ /dev/null
@@ -1,138 +0,0 @@
-# A class for holding data in RAM
-
-from pycromanager.acquisition.acq_eng_py.main.acq_eng_metadata import AcqEngMetadata
-import numpy as np
-from sortedcontainers import SortedSet
-import threading
-
-
-class RAMDataStorage:
- """
- A class for holding data in RAM
- Implements the methods needed to be a DataSink for AcqEngPy
- """
-
- def __init__(self):
- self.finished = False
- self.images = {}
- self.image_metadata = {}
- self.axes = {}
- self.finished_event = threading.Event()
-
- def initialize(self, acq, summary_metadata: dict):
- self.summary_metadata = summary_metadata
-
- def block_until_finished(self, timeout=None):
- self.finished_event.wait(timeout=timeout)
-
- def finish(self):
- self.finished = True
-
- def is_finished(self) -> bool:
- return self.finished
-
- def put_image(self, tagged_image):
- self.bytes_per_pixel = tagged_image.pix.dtype.itemsize
- self.dtype = tagged_image.pix.dtype
- tags = tagged_image.tags
- axes = AcqEngMetadata.get_axes(tags)
- key = frozenset(axes.items())
- self.images[key] = tagged_image.pix
- self.image_metadata[key] = tags
- for axis in axes.keys():
- if axis not in self.axes:
- self.axes[axis] = SortedSet()
- self.axes[axis].add(axes[axis])
- self._new_image_arrived = True
-
- def anything_acquired(self) -> bool:
- return self.images != {}
-
- def has_image(self, channel: int or str, z: int, position: int,
- time: int, row: int, column: int, **kwargs):
- axes = self._consolidate_axes(channel, z, position, time, row, column, **kwargs)
- key = frozenset(axes.items())
- return key in self.images.keys()
-
- def read_image(self, channel=None, z=None, time=None, position=None, row=None, column=None, **kwargs):
- axes = self._consolidate_axes(channel, z, position, time, row, column, **kwargs)
- key = frozenset(axes.items())
- if key not in self.index:
- raise Exception("image with keys {} not present in data set".format(key))
- return self.images[key]
-
- def read_metadata(self, channel=None, z=None, time=None, position=None, row=None, column=None, **kwargs):
- axes = self._consolidate_axes(channel, z, position, time, row, column, **kwargs)
- key = frozenset(axes.items())
- if key not in self.index:
- raise Exception("image with keys {} not present in data set".format(key))
- return self.image_metadata[key]
-
- def _consolidate_axes(self, channel: int or str, z: int, position: int,
- time: int, row: int, column: int, **kwargs):
- """
- Pack axes into a convenient format
- """
- axis_positions = {'channel': channel, 'z': z, 'position': position,
- 'time': time, 'row': row, 'column': column, **kwargs}
- # ignore ones that are None
- axis_positions = {n: axis_positions[n] for n in axis_positions.keys() if axis_positions[n] is not None}
- for axis_name in axis_positions.keys():
- # convert any string-valued axes passed as ints into strings
- if self.axes_types[axis_name] == str and type(axis_positions[axis_name]) == int:
- axis_positions[axis_name] = self._string_axes_values[axis_name][axis_positions[axis_name]]
-
- return axis_positions
-
- def has_new_image(self):
- """
- For datasets currently being acquired, check whether a new image has arrived since this function
- was last called, so that a viewer displaying the data can be updated.
- """
- # pass through to full resolution, since only this is monitored in current implementation
- if not hasattr(self, '_new_image_arrived'):
- return False # pre-initilization
- new = self._new_image_arrived
- self._new_image_arrived = False
- return new
-
- def as_array(self, axes=None, **kwargs):
- """
- Read all data image data as one big numpy array with last two axes as y, x and preceeding axes depending on data.
- If the data doesn't fully fill out the array (e.g. not every z-slice collected at every time point), zeros will
- be added automatically.
-
- This function is modeled of the same one in the NDTiff library, but it uses numpy arrays instead of dask arrays
- because the data is already in RAM
-
- Parameters
- ----------
- axes : list
- list of axes names over which to iterate and merge into a stacked array. The order of axes supplied in this
- list will be the order of the axes of the returned dask array. If None, all axes will be used in PTCZYX order.
-
- **kwargs :
- names and integer positions of axes on which to slice data
- """
- if axes is None:
- axes = self.axes.keys()
-
- empty_image = np.zeros_like(list(self.images.values())[0])
- indices = [np.array(self.axes[axis_name]) for axis_name in list(axes)]
- gridded = np.meshgrid(*indices, indexing='ij')
- result = np.stack(gridded, axis=-1)
- flattened = result.reshape((-1, result.shape[-1]))
- images = []
- for coord in flattened:
- images_key = {key: coord[i] for i, key in enumerate(axes)}
- key = frozenset(images_key.items())
- if key in self.images.keys():
- images.append(self.images[key])
- else:
- images.append(empty_image)
- # reshape to Num axes + image size dimensions
- cube = np.array(images).reshape(tuple(len(i) for i in indices) + empty_image.shape)
- return cube
-
-
-
diff --git a/pycromanager/acquisition/RAMStorage_java.py b/pycromanager/acquisition/RAMStorage_java.py
new file mode 100644
index 00000000..d30345a4
--- /dev/null
+++ b/pycromanager/acquisition/RAMStorage_java.py
@@ -0,0 +1,69 @@
+from pyjavaz.wrappers import JavaObject
+from ndtiff import NDStorage
+
+class NDRAMDatasetJava(NDStorage):
+ """
+ A python class that wraps a Java-backend RAM data storage.
+
+ This class maintains an index of which images have been saved, but otherwise routes all calls to the Java
+ implementation of the RAM data storage.
+ """
+
+ def __init__(self, java_RAM_data_storage):
+ super().__init__()
+ self._java_RAM_data_storage = java_RAM_data_storage
+ self._index_keys = set()
+
+ def close(self):
+ pass
+
+ def add_available_axes(self, image_coordinates):
+ """
+ The Java RAM storage has received a new image with the given axes. Add these axes to the index.
+ """
+ self._index_keys.add(frozenset(image_coordinates.items()))
+ # update information about the available images
+ self._update_axes(image_coordinates)
+ self._new_image_event.set()
+
+ def get_image_coordinates_list(self):
+ """
+ Return a list of every combination of axes that has an image in this dataset
+ """
+ frozen_set_list = list(self._index_keys)
+ # convert to dict
+ return [{axis_name: position for axis_name, position in key} for key in frozen_set_list]
+
+ def is_finished(self) -> bool:
+ return self._java_RAM_data_storage.is_finished()
+
+ def has_image(self, channel=None, z=None, time=None, position=None, row=None, column=None, **kwargs):
+ axes = self._consolidate_axes(channel, z, position, time, row, column, **kwargs)
+ key = frozenset(axes.items())
+ return key in self._index_keys
+
+ def read_image(self, channel=None, z=None, time=None, position=None, row=None, column=None, **kwargs):
+ axes = self._consolidate_axes(channel, z, position, time, row, column, **kwargs)
+ key = frozenset(axes.items())
+ if key not in self._index_keys:
+ return None
+ java_hashmap = JavaObject('java.util.HashMap')
+ for k, v in axes.items():
+ java_hashmap.put(k, v)
+ tagged_image = self._java_RAM_data_storage.get_image(java_hashmap)
+ pixels = tagged_image.pix
+ metadata = tagged_image.tags
+ return pixels.reshape(metadata['Height'], metadata['Width'])
+
+ def read_metadata(self, channel=None, z=None, time=None, position=None, row=None, column=None, **kwargs):
+ axes = self._consolidate_axes(channel, z, position, time, row, column, **kwargs)
+ key = frozenset(axes.items())
+ if key not in self._index_keys:
+ return None
+ java_hashmap = JavaObject('java.util.HashMap')
+ for k, v in axes.items():
+ java_hashmap.put(k, v)
+ tagged_image = self._java_RAM_data_storage.get_image(java_hashmap)
+ return tagged_image.tags
+
+
diff --git a/pycromanager/acquisition/acq_eng_py/main/AcqEngPy_Acquisition.py b/pycromanager/acquisition/acq_eng_py/main/AcqEngPy_Acquisition.py
index 70e493b5..41e60307 100644
--- a/pycromanager/acquisition/acq_eng_py/main/AcqEngPy_Acquisition.py
+++ b/pycromanager/acquisition/acq_eng_py/main/AcqEngPy_Acquisition.py
@@ -9,7 +9,6 @@
from pycromanager.acquisition.acq_eng_py.internal.notification_handler import NotificationHandler
-
class Acquisition():
EVENT_GENERATION_HOOK = 0
@@ -169,15 +168,10 @@ def add_hook(self, h, type_):
self.after_exposure_hooks_.append(h)
def initialize(self):
- if self.core_:
- summary_metadata = AcqEngMetadata.make_summary_metadata(self.core_, self)
- self.add_to_summary_metadata(summary_metadata)
- try:
- self.summary_metadata_ = summary_metadata
- except json.JSONDecodeError:
- print("Couldn't copy summary metadata")
- if self.data_sink_:
- self.data_sink_.initialize(self, summary_metadata)
+ summary_metadata = AcqEngMetadata.make_summary_metadata(self.core_, self)
+ self.add_to_summary_metadata(summary_metadata)
+ if self.data_sink_:
+ self.data_sink_.initialize(summary_metadata)
def start(self):
if self.data_sink_:
@@ -190,8 +184,9 @@ def save_image(self, image):
self.data_sink_.finish()
self.post_notification(AcqNotification.create_data_sink_finished_notification())
else:
- self.data_sink_.put_image(image)
- axes = AcqEngMetadata.get_axes(image.tags)
+ pixels, metadata = image.pix, image.tags
+ axes = AcqEngMetadata.get_axes(metadata)
+ self.data_sink_.put_image(axes, pixels, metadata)
self.post_notification(AcqNotification.create_image_saved_notification(axes))
def get_start_time_ms(self):
@@ -212,8 +207,9 @@ def set_paused(self, pause):
def get_summary_metadata(self):
return self.summary_metadata_
- def anything_acquired(self):
- return not self.data_sink_ or self.data_sink_.anything_acquired()
+ # perhaps not needed in python like it is in java
+ # def anything_acquired(self):
+ # return not self.data_sink_ or self.data_sink_.anything_acquired()
def add_image_metadata_processor(self, processor):
if not self.image_metadata_processor_:
diff --git a/pycromanager/acquisition/java_RAMStorage.py b/pycromanager/acquisition/java_RAMStorage.py
deleted file mode 100644
index 439c5abd..00000000
--- a/pycromanager/acquisition/java_RAMStorage.py
+++ /dev/null
@@ -1,135 +0,0 @@
-from pyjavaz.wrappers import JavaObject
-
-
-class JavaRAMDataStorage:
- """
- A python class that wraps a Java-backend RAM data storage.
-
- This class maintains an index of which images have been saved, but otherwise routes all calls to the Java
- implementation of the RAM data storage.
- """
-
- def __init__(self, java_RAM_data_storage):
- self._java_RAM_data_storage = java_RAM_data_storage
- self._index_keys = set()
-
- def add_index_entry(self, data):
- self._index_keys.add(frozenset(data.items()))
-
- # def get_channel_names(self):
- # """
- # :return: list of channel names (strings)
- # """
- # return list(self._channels.keys())
-
- def get_index_keys(self):
- """
- Return a list of every combination of axes that has a image in this dataset
- """
- frozen_set_list = list(self._index_keys)
- # convert to dict
- return [{axis_name: position for axis_name, position in key} for key in frozen_set_list]
-
- def is_finished(self) -> bool:
- return self._java_RAM_data_storage.is_finished()
-
- def has_image(self, channel: int or str, z: int, position: int,
- time: int, row: int, column: int, **kwargs):
- axes = self._consolidate_axes(channel, z, position, time, row, column, **kwargs)
- key = frozenset(axes.items())
- return key in self._index_keys
-
- def read_image(self, channel=None, z=None, time=None, position=None, row=None, column=None, **kwargs):
- axes = self._consolidate_axes(channel, z, position, time, row, column, **kwargs)
- key = frozenset(axes.items())
- if key not in self._index_keys:
- return None
- java_hashmap = JavaObject('java.util.HashMap')
- for k, v in axes.items():
- java_hashmap.put(k, v)
- tagged_image = self._java_RAM_data_storage.get_image(java_hashmap)
- pixels = tagged_image.pix
- metadata = tagged_image.tags
- return pixels.reshape(metadata['Height'], metadata['Width'])
-
- def read_metadata(self, channel=None, z=None, time=None, position=None, row=None, column=None, **kwargs):
- axes = self._consolidate_axes(channel, z, position, time, row, column, **kwargs)
- key = frozenset(axes.items())
- if key not in self._index_keys:
- return None
- java_hashmap = JavaObject('java.util.HashMap')
- for k, v in axes.items():
- java_hashmap.put(k, v)
- tagged_image = self._java_RAM_data_storage.get_image(java_hashmap)
- return tagged_image.tags
-
- def _consolidate_axes(self, channel: int or str, z: int, position: int,
- time: int, row: int, column: int, **kwargs):
- """
- Pack axes into a convenient format
- """
- axis_positions = {'channel': channel, 'z': z, 'position': position,
- 'time': time, 'row': row, 'column': column, **kwargs}
- # ignore ones that are None
- axis_positions = {n: axis_positions[n] for n in axis_positions.keys() if axis_positions[n] is not None}
- # for axis_name in axis_positions.keys():
- # # convert any string-valued axes passed as ints into strings
- # if self.axes_types[axis_name] == str and type(axis_positions[axis_name]) == int:
- # axis_positions[axis_name] = self._string_axes_values[axis_name][axis_positions[axis_name]]
-
- return axis_positions
-
- def has_new_image(self):
- """
- For datasets currently being acquired, check whether a new image has arrived since this function
- was last called, so that a viewer displaying the data can be updated.
- """
- # pass through to full resolution, since only this is monitored in current implementation
- if not hasattr(self, '_new_image_arrived'):
- return False # pre-initilization
- new = self._new_image_arrived
- self._new_image_arrived = False
- return new
-
- def as_array(self, axes=None, **kwargs):
- """
- Read all data image data as one big numpy array with last two axes as y, x and preceeding axes depending on data.
- If the data doesn't fully fill out the array (e.g. not every z-slice collected at every time point), zeros will
- be added automatically.
-
- This function is modeled of the same one in the NDTiff library, but it uses numpy arrays instead of dask arrays
- because the data is already in RAM
-
- Parameters
- ----------
- axes : list
- list of axes names over which to iterate and merge into a stacked array. The order of axes supplied in this
- list will be the order of the axes of the returned dask array. If None, all axes will be used in PTCZYX order.
-
- **kwargs :
- names and integer positions of axes on which to slice data
- """
- raise NotImplementedError("This function is not yet implemented")
- # TODO
- # if axes is None:
- # axes = self.axes.keys()
- #
- # empty_image = np.zeros_like(list(self.images.values())[0])
- # indices = [np.array(self.axes[axis_name]) for axis_name in list(axes)]
- # gridded = np.meshgrid(*indices, indexing='ij')
- # result = np.stack(gridded, axis=-1)
- # flattened = result.reshape((-1, result.shape[-1]))
- # images = []
- # for coord in flattened:
- # images_key = {key: coord[i] for i, key in enumerate(axes)}
- # key = frozenset(images_key.items())
- # if key in self.images.keys():
- # images.append(self.images[key])
- # else:
- # images.append(empty_image)
- # # reshape to Num axes + image size dimensions
- # cube = np.array(images).reshape(tuple(len(i) for i in indices) + empty_image.shape)
- # return cube
-
-
-
diff --git a/pycromanager/acquisition/java_backend_acquisitions.py b/pycromanager/acquisition/java_backend_acquisitions.py
index 441fbb94..0e6d80f7 100644
--- a/pycromanager/acquisition/java_backend_acquisitions.py
+++ b/pycromanager/acquisition/java_backend_acquisitions.py
@@ -15,7 +15,7 @@
from pyjavaz import PullSocket, PushSocket, JavaObject, JavaClass
from pyjavaz import DEFAULT_BRIDGE_PORT as DEFAULT_PORT
from pycromanager.mm_java_classes import ZMQRemoteMMCoreJ, Magellan
-from pycromanager.acquisition.java_RAMStorage import JavaRAMDataStorage
+from pycromanager.acquisition.RAMStorage_java import NDRAMDatasetJava
from ndtiff import Dataset
import os.path
@@ -209,7 +209,7 @@ def _notification_handler_fn(acquisition, notification_push_port, connected_even
notification.payload = axes
else: # RAM storage
axes = json.loads(notification.payload)
- acquisition._dataset.add_index_entry(axes)
+ acquisition._dataset.add_available_axes(axes)
notification.payload = axes
acquisition._image_notification_queue.put(notification)
@@ -321,7 +321,7 @@ def __init__(
self._dataset = Dataset(dataset_path=self._dataset_disk_location, summary_metadata=summary_metadata)
else:
# Saved to RAM on Java side
- self._dataset = JavaRAMDataStorage(storage_java_class)
+ self._dataset = NDRAMDatasetJava(storage_java_class)
# Monitor image arrival so they can be loaded on python side, but with no callback function
# Need to do this regardless of whether you use it, so that it signals to shut down on Java side
self._storage_monitor_thread = self._add_storage_monitor_fn(image_saved_fn=image_saved_fn)
diff --git a/pycromanager/acquisition/python_backend_acquisitions.py b/pycromanager/acquisition/python_backend_acquisitions.py
index beba1475..959278ed 100644
--- a/pycromanager/acquisition/python_backend_acquisitions.py
+++ b/pycromanager/acquisition/python_backend_acquisitions.py
@@ -1,12 +1,13 @@
from docstring_inheritance import NumpyDocstringInheritanceMeta
from pycromanager.acquisition.acq_eng_py.main.AcqEngPy_Acquisition import Acquisition as pymmcore_Acquisition
-from pycromanager.acquisition.RAMStorage import RAMDataStorage
from pycromanager.acquisition.acquisition_superclass import _validate_acq_events, Acquisition
from pycromanager.acquisition.acq_eng_py.main.acquisition_event import AcquisitionEvent
from pycromanager.acq_future import AcqNotification
import threading
from inspect import signature
+from ndtiff.ndram_dataset import NDRAMDataset
+from ndtiff.ndtiff_dataset import NDTiffDataset
class PythonBackendAcquisition(Acquisition, metaclass=NumpyDocstringInheritanceMeta):
"""
@@ -19,6 +20,7 @@ class PythonBackendAcquisition(Acquisition, metaclass=NumpyDocstringInheritanceM
def __init__(
self,
+ directory: str=None,
name: str='default_acq_name',
image_process_fn: callable=None,
event_generation_hook_fn: callable = None,
@@ -29,8 +31,7 @@ def __init__(
napari_viewer=None,
image_saved_fn: callable=None,
debug: int=False,
- # Specificly so the directory arg can be absorbed and ignored without error,
- **kwargs
+
):
# Get a dict of all named argument values (or default values when nothing provided)
arg_names = [k for k in signature(PythonBackendAcquisition.__init__).parameters.keys() if k != 'self']
@@ -38,12 +39,8 @@ def __init__(
named_args = {arg_name: (l[arg_name] if arg_name in l else
dict(signature(PythonBackendAcquisition.__init__).parameters.items())[arg_name].default)
for arg_name in arg_names }
- if 'kwargs' in named_args:
- if 'directory' in named_args['kwargs'] and named_args['kwargs']['directory'] is not None:
- raise Exception('The directory argument is not supported in Python backend acquisitions')
- del named_args['kwargs']
super().__init__(**named_args)
- self._dataset = RAMDataStorage()
+ self._dataset = NDRAMDataset() if not directory else NDTiffDataset(directory, name, writable=True)
self._finished = False
self._notifications_finished = False
self._create_event_queue()
@@ -87,13 +84,13 @@ def post_notification(notification):
# add hooks and image processor
if pre_hardware_hook_fn is not None:
- self._acq.add_hook(AcquisitionHook(pre_hardware_hook_fn),self._acq.BEFORE_HARDWARE_HOOK)
+ self._acq.add_hook(AcquisitionHook(pre_hardware_hook_fn), self._acq.BEFORE_HARDWARE_HOOK)
if post_hardware_hook_fn is not None:
- self._acq.add_hook(AcquisitionHook(post_hardware_hook_fn),self._acq.AFTER_HARDWARE_HOOK)
+ self._acq.add_hook(AcquisitionHook(post_hardware_hook_fn), self._acq.AFTER_HARDWARE_HOOK)
if post_camera_hook_fn is not None:
- self._acq.add_hook(AcquisitionHook(post_camera_hook_fn),self._acq.AFTER_CAMERA_HOOK)
+ self._acq.add_hook(AcquisitionHook(post_camera_hook_fn), self._acq.AFTER_CAMERA_HOOK)
if event_generation_hook_fn is not None:
- self._acq.add_hook(AcquisitionHook(event_generation_hook_fn),self._acq.EVENT_GENERATION_HOOK)
+ self._acq.add_hook(AcquisitionHook(event_generation_hook_fn), self._acq.EVENT_GENERATION_HOOK)
if self._image_processor is not None:
self._acq.add_image_processor(self._image_processor)
diff --git a/pycromanager/napari_util.py b/pycromanager/napari_util.py
index 33b0341c..03aba724 100644
--- a/pycromanager/napari_util.py
+++ b/pycromanager/napari_util.py
@@ -29,22 +29,24 @@ def update_layer(image):
@thread_worker(connect={'yielded': update_layer})
def napari_signaller():
"""
- Monitor for signals that Acqusition has a new image ready, and when that happens
+ Monitor for signals that Acquisition has a new image ready, and when that happens
update napari appropriately
"""
+ # don't update faster than the display can handle
+ min_update_time = 1 / 30
+ last_update_time = time.time()
while True:
- time.sleep(1 / 60) # limit to 60 hz refresh
- image = None
-
- if dataset is not None and dataset.has_new_image():
- # A new image has arrived, this could be overwriting something existing or have a new combination of axes
- image = dataset.as_array()
- shape = np.array([len(dataset.axes[name]) for name in dataset.axes.keys()])
- if not hasattr(napari_signaller, 'old_shape') or \
- napari_signaller.old_shape.size != shape.size or \
- np.any(napari_signaller.old_shape != shape):
- napari_signaller.old_shape = shape
-
+ dataset_writing_complete = dataset.is_finished()
+ new_image_ready = dataset.await_new_image(timeout=.25)
+ if not new_image_ready:
+ continue
+ image = dataset.as_array()
+ update_time = time.time()
yield image
+ if dataset_writing_complete:
+ break
+ if update_time - last_update_time < min_update_time:
+ time.sleep(min_update_time - (update_time - last_update_time))
+ last_update_time = time.time()
napari_signaller()
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index 08c12fc1..e84831b1 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -179,8 +179,9 @@ def setup_data_folder():
shutil.rmtree(data_folder_path)
-@pytest.fixture(scope="session")
-def launch_mm_headless(install_mm):
+@pytest.fixture(scope="session", params=[True, False])
+def launch_mm_headless(request, install_mm):
+ python_backend = request.param
mm_install_dir = install_mm
if mm_install_dir is None:
yield # local manual testing where MM has been launched from source
@@ -195,6 +196,7 @@ def launch_mm_headless(install_mm):
start_headless(mm_install_dir, config_file, java_loc=java_loc,
buffer_size_mb=128, max_memory_mb=128, # set these low for github actions
+ python_backend=python_backend,
debug=True)
yield None
diff --git a/pycromanager/test/test_acquisition.py b/pycromanager/test/test_acquisition.py
index 313300d7..55904d41 100644
--- a/pycromanager/test/test_acquisition.py
+++ b/pycromanager/test/test_acquisition.py
@@ -30,7 +30,6 @@ def hook_fn(_events):
finally:
dataset.close()
-
def test_timelapse_seq_acq(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=10, time_interval_s=0)
diff --git a/pycromanager/test/test_callback_functions.py b/pycromanager/test/test_callback_functions.py
index 142d424f..32d0828e 100644
--- a/pycromanager/test/test_callback_functions.py
+++ b/pycromanager/test/test_callback_functions.py
@@ -37,12 +37,11 @@ def test_img_process_fn_no_save(launch_mm_headless):
def hook_fn(image, metadata):
return None
- with Acquisition(directory=None, name='acq', show_display=False,
- image_process_fn=hook_fn) as acq:
+ with Acquisition(directory=None, name='acq', show_display=False, image_process_fn=hook_fn) as acq:
acq.acquire(events)
dataset = acq.get_dataset() # Can this be moved out of the Acquisition context?
- assert len(dataset.get_index_keys()) == 0
+ assert len(dataset.get_image_coordinates_list()) == 0
def test_img_process_fn_image_saved_fn_consistency(launch_mm_headless, setup_data_folder):
diff --git a/requirements.txt b/requirements.txt
index c47a8683..1dc8dcf9 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,7 +1,7 @@
numpy
dask[array]>=2022.2.0
pyzmq
-ndtiff>=2.3.0
+ndtiff>=3.0.0
docstring-inheritance
pymmcore
sortedcontainers
From e5d1fa4ac709153a1bf223f201ffbc9f20f0bcb2 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Sun, 9 Jun 2024 22:30:40 +0200
Subject: [PATCH 02/34] partial refactor of auto install functions + partial
fixes for tests for AcqEngPy
---
pycromanager/acquisition/acq_constructor.py | 2 +-
.../acquisition/acq_eng_py/internal/engine.py | 37 ++--
.../acq_eng_py/main/AcqEngPy_Acquisition.py | 2 +-
.../acq_eng_py/main/acq_eng_metadata.py | 9 +-
.../acq_eng_py/main/acquisition_event.py | 29 +--
.../acquisition/acquisition_superclass.py | 72 ++++---
.../python_backend_acquisitions.py | 9 +-
pycromanager/headless.py | 4 +-
pycromanager/install.py | 76 +++++++
pycromanager/test/conftest.py | 200 +++++++-----------
10 files changed, 239 insertions(+), 201 deletions(-)
create mode 100644 pycromanager/install.py
diff --git a/pycromanager/acquisition/acq_constructor.py b/pycromanager/acquisition/acq_constructor.py
index 8a486419..6217a494 100644
--- a/pycromanager/acquisition/acq_constructor.py
+++ b/pycromanager/acquisition/acq_constructor.py
@@ -31,7 +31,7 @@ def __new__(cls,
if _PYMMCORES:
# Python backend detected, so create a python backend acquisition
- specific_arg_names = [k for k in signature(JavaBackendAcquisition.__init__).parameters.keys() if k != 'self']
+ specific_arg_names = [k for k in signature(PythonBackendAcquisition.__init__).parameters.keys() if k != 'self']
for name in specific_arg_names:
if name in kwargs:
named_args[name] = kwargs[name]
diff --git a/pycromanager/acquisition/acq_eng_py/internal/engine.py b/pycromanager/acquisition/acq_eng_py/internal/engine.py
index cb4509fd..21de6efd 100644
--- a/pycromanager/acquisition/acq_eng_py/internal/engine.py
+++ b/pycromanager/acquisition/acq_eng_py/internal/engine.py
@@ -105,7 +105,6 @@ def process_acquisition_event_inner():
self.check_for_default_devices(event)
if event.acquisition_.is_debug_mode():
self.core.logMessage("Processing event: " + event.to_string())
- if event.acquisition_.is_debug_mode():
self.core.logMessage("checking for sequencing")
if not self.sequenced_events and not event.is_acquisition_sequence_end_event():
self.sequenced_events.append(event)
@@ -182,7 +181,7 @@ def execute_acquisition_event(self, event: AcquisitionEvent):
event.acquisition_.post_notification(AcqNotification(
AcqNotification.Hardware, event.axisPositions_, AcqNotification.Hardware.PRE_Z_DRIVE))
- for h in event.acquisition_.get_before_z_drive_hooks():
+ for h in event.acquisition_.get_before_z_hooks():
event = h.run(event)
if event is None:
return # The hook cancelled this event
@@ -342,7 +341,7 @@ def acquire_images(self, event: AcquisitionEvent, hardware_sequences_in_progress
# This is a little different from the java version due to differences in metadata
# handling in the SWIG wrapper
camera_name = self.core.get_camera_device()
- ti = self.core.get_tagged_image(self, cam_index, camera_name, height, width)
+ ti = self.core.get_tagged_image(cam_index, camera_name, height, width)
except Exception as e:
# continue waiting
pass
@@ -492,9 +491,9 @@ def change_channels(event):
group = event.get_sequence()[0].get_config_group()
config = self.core.get_config_data(group, event.get_sequence()[0].get_config_preset())
for i in range(config.size()):
- ps = config.get_setting(i)
- device_name = ps.get_device_label()
- prop_name = ps.get_property_name()
+ ps = config.getSetting(i)
+ device_name = ps.getDeviceLabel()
+ prop_name = ps.getPropertyName()
if self.core.is_property_sequenceable(device_name, prop_name):
self.core.start_property_sequence(device_name, prop_name)
except Exception as ex:
@@ -588,16 +587,16 @@ def change_additional_properties(event):
# Set sequences for all channel properties
if prop_sequences is not None:
for i in range(config.size()):
- ps = config.get_setting(i)
- device_name = ps.get_device_label()
- prop_name = ps.get_property_name()
+ ps = config.getSetting(i)
+ device_name = ps.getDeviceLabel()
+ prop_name = ps.getPropertyName()
if e == event.get_sequence()[0]: # First property
# TODO: what is this in pymmcore
prop_sequences.add(StrVector())
channel_preset_config = self.core.get_config_data(group, e.get_config_preset())
- prop_value = channel_preset_config.get_setting(device_name, prop_name).get_property_value()
+ prop_value = channel_preset_config.getSetting(device_name, prop_name).getPropertyValue()
if self.core.is_property_sequenceable(device_name, prop_name):
prop_sequences.get(i).add(prop_value)
@@ -617,9 +616,9 @@ def change_additional_properties(event):
if event.is_config_group_sequenced():
for i in range(config.size()):
- ps = config.get_setting(i)
- device_name = ps.get_device_label()
- prop_name = ps.get_property_name()
+ ps = config.getSetting(i)
+ device_name = ps.getDeviceLabel()
+ prop_name = ps.getPropertyName()
if prop_sequences.get(i).size() > 0:
self.core.load_property_sequence(device_name, prop_name, prop_sequences.get(i))
@@ -726,12 +725,12 @@ def is_sequencable(self, previous_events, next_event, new_seq_length):
previous_event.get_config_preset())
config2 = self.core.get_config_data(next_event.get_config_group(), next_event.get_config_preset())
for i in range(config1.size()):
- ps1 = config1.get_setting(i)
- device_name = ps1.get_device_label()
- prop_name = ps1.get_property_name()
- prop_value1 = ps1.get_property_value()
- ps2 = config2.get_setting(i)
- prop_value2 = ps2.get_property_value()
+ ps1 = config1.getSetting(i)
+ device_name = ps1.getDeviceLabel()
+ prop_name = ps1.getPropertyName()
+ prop_value1 = ps1.getPropertyValue()
+ ps2 = config2.getSetting(i)
+ prop_value2 = ps2.getPropertyValue()
if prop_value1 != prop_value2:
if not self.core.is_property_sequenceable(device_name, prop_name):
return False
diff --git a/pycromanager/acquisition/acq_eng_py/main/AcqEngPy_Acquisition.py b/pycromanager/acquisition/acq_eng_py/main/AcqEngPy_Acquisition.py
index 41e60307..9f83b916 100644
--- a/pycromanager/acquisition/acq_eng_py/main/AcqEngPy_Acquisition.py
+++ b/pycromanager/acquisition/acq_eng_py/main/AcqEngPy_Acquisition.py
@@ -119,9 +119,9 @@ def saving_thread(acq):
img = acq.first_dequeue_.get()
if acq.debug_mode_:
acq.core_.log_message("got image to save")
+ acq.save_image(img)
if img.tags is None and img.pix is None:
break
- acq.save_image(img)
else:
img = acq.processor_output_queues_[acq.image_processors_[-1]].get()
if acq.data_sink_:
diff --git a/pycromanager/acquisition/acq_eng_py/main/acq_eng_metadata.py b/pycromanager/acquisition/acq_eng_py/main/acq_eng_metadata.py
index 9f10aaeb..aef8807e 100644
--- a/pycromanager/acquisition/acq_eng_py/main/acq_eng_metadata.py
+++ b/pycromanager/acquisition/acq_eng_py/main/acq_eng_metadata.py
@@ -563,7 +563,7 @@ def get_pixel_overlap_y(summary_md):
@staticmethod
def set_stage_x_intended(smd, x):
- smd[AcqEngMetadata.X_UM_INTENDED] = x
+ smd[AcqEngMetadata.X_UM_INTENDED] = float(x)
@staticmethod
def has_stage_x_intended(map):
@@ -578,7 +578,7 @@ def get_stage_x_intended(smd):
@staticmethod
def set_stage_y_intended(smd, y):
- smd[AcqEngMetadata.Y_UM_INTENDED] = y
+ smd[AcqEngMetadata.Y_UM_INTENDED] = float(y)
@staticmethod
def has_stage_y_intended(map):
@@ -593,7 +593,7 @@ def get_stage_y_intended(smd):
@staticmethod
def set_stage_z_intended(smd, y):
- smd[AcqEngMetadata.Z_UM_INTENDED] = y
+ smd[AcqEngMetadata.Z_UM_INTENDED] = float(y)
@staticmethod
def has_stage_z_intended(map):
@@ -756,6 +756,9 @@ def get_axes(tags):
axes_map = {}
for key in axes:
axes_map[key] = axes[key]
+ if not isinstance(axes_map[key], str):
+ # make sure they're not numpy int64s because they can't be serialized and this causes problems
+ axes_map[key] = int(axes_map[key])
return axes_map
except KeyError:
raise ValueError("Could not create axes")
diff --git a/pycromanager/acquisition/acq_eng_py/main/acquisition_event.py b/pycromanager/acquisition/acq_eng_py/main/acquisition_event.py
index 47c25b4b..c2dca402 100644
--- a/pycromanager/acquisition/acq_eng_py/main/acquisition_event.py
+++ b/pycromanager/acquisition/acq_eng_py/main/acquisition_event.py
@@ -83,17 +83,17 @@ def copy(self):
def event_to_json(e):
data = {}
- if e.isAcquisitionFinishedEvent():
+ if e.is_acquisition_finished_event():
data["special"] = "acquisition-end"
- return json.dumps(data)
- elif e.isAcquisitionSequenceEndEvent():
+ return data
+ elif e.is_acquisition_sequence_end_event():
data["special"] = "sequence-end"
- return json.dumps(data)
+ return data
if e.miniumumStartTime_ms_:
data["min_start_time"] = e.miniumumStartTime_ms_ / 1000
- if e.hasConfigGroup():
+ if e.has_config_group():
data["config_group"] = [e.configGroup_, e.configPreset_]
if e.exposure_:
@@ -109,7 +109,8 @@ def event_to_json(e):
if axes:
data["axes"] = axes
- stage_positions = [[stageDevice, e.getStageSingleAxisStagePosition(stageDevice)] for stageDevice in e.getStageDeviceNames()]
+ stage_positions = [[stageDevice, e.get_stage_single_axis_stage_position(stageDevice)]
+ for stageDevice in e.get_stage_device_names()]
if stage_positions:
data["stage_positions"] = stage_positions
@@ -125,22 +126,22 @@ def event_to_json(e):
if e.camera_:
data["camera"] = e.camera_
- if e.getTags() and e.getTags(): # Assuming getTags is a method in the class
+ if e.get_tags() and e.get_tags(): # Assuming getTags is a method in the class
data["tags"] = {key: value for key, value in e.getTags().items()}
props = [[t.dev, t.prop, t.val] for t in e.properties_]
if props:
data["properties"] = props
- return json.dumps(data)
+ return data
@staticmethod
def event_from_json(data, acq):
if "special" in data:
if data["special"] == "acquisition-end":
- return AcquisitionEvent.createAcquisitionFinishedEvent(acq)
+ return AcquisitionEvent.create_acquisition_finished_event(acq)
elif data["special"] == "sequence-end":
- return AcquisitionEvent.createAcquisitionSequenceEndEvent(acq)
+ return AcquisitionEvent.create_acquisition_sequence_end_event(acq)
event = AcquisitionEvent(acq)
@@ -214,17 +215,17 @@ def event_from_json(data, acq):
def to_json(self):
if self.sequence_:
events = [self.event_to_json(e) for e in self.sequence_]
- return json.dumps({"events": events})
+ return events
else:
return self.event_to_json(self)
@staticmethod
def from_json(data, acq):
- if "events" not in data:
+ if not isinstance(data, list):
return AcquisitionEvent.event_from_json(data, acq)
else:
- sequence = [AcquisitionEvent.event_from_json(item, acq) for item in data["events"]]
- return AcquisitionEvent(sequence)
+ sequence = [AcquisitionEvent.event_from_json(event, acq) for event in data]
+ return AcquisitionEvent(acq, sequence=sequence)
def get_camera_device_name(self):
return self.camera_
diff --git a/pycromanager/acquisition/acquisition_superclass.py b/pycromanager/acquisition/acquisition_superclass.py
index 8c523c5d..9b8dff9b 100644
--- a/pycromanager/acquisition/acquisition_superclass.py
+++ b/pycromanager/acquisition/acquisition_superclass.py
@@ -245,40 +245,44 @@ def acquire(self, event_or_events: dict or list or Generator) -> AcquisitionFutu
acquisition events.
"""
- if self._acq.are_events_finished():
- raise AcqAlreadyCompleteException(
- 'Cannot submit more events because this acquisition is already finished')
-
- if event_or_events is None:
- # manual shutdown
- self._event_queue.put(None)
- return
-
- if isinstance(event_or_events, GeneratorType):
- acq_future = AcquisitionFuture(self)
-
- def notifying_generator(original_generator):
- # store in a weakref so that if user code doesn't hange on to AcqFuture
- # it doesn't needlessly track events
- acq_future_weakref = weakref.ref(acq_future)
- for event in original_generator:
- future = acq_future_weakref()
- if future is not None:
- acq_future._monitor_axes(event['axes'])
- _validate_acq_events(event)
- yield event
- event_or_events = notifying_generator(event_or_events)
- else:
- _validate_acq_events(event_or_events)
- axes_or_axes_list = event_or_events['axes'] if type(event_or_events) == dict\
- else [e['axes'] for e in event_or_events]
- acq_future = AcquisitionFuture(self, axes_or_axes_list)
- self._acq_futures.append(weakref.ref(acq_future))
- # clear out old weakrefs
- self._acq_futures = [f for f in self._acq_futures if f() is not None]
+ try:
+ if self._acq.are_events_finished():
+ raise AcqAlreadyCompleteException(
+ 'Cannot submit more events because this acquisition is already finished')
+
+ if event_or_events is None:
+ # manual shutdown
+ self._event_queue.put(None)
+ return
+
+ if isinstance(event_or_events, GeneratorType):
+ acq_future = AcquisitionFuture(self)
+
+ def notifying_generator(original_generator):
+ # store in a weakref so that if user code doesn't hange on to AcqFuture
+ # it doesn't needlessly track events
+ acq_future_weakref = weakref.ref(acq_future)
+ for event in original_generator:
+ future = acq_future_weakref()
+ if future is not None:
+ acq_future._monitor_axes(event['axes'])
+ _validate_acq_events(event)
+ yield event
+ event_or_events = notifying_generator(event_or_events)
+ else:
+ _validate_acq_events(event_or_events)
+ axes_or_axes_list = event_or_events['axes'] if type(event_or_events) == dict\
+ else [e['axes'] for e in event_or_events]
+ acq_future = AcquisitionFuture(self, axes_or_axes_list)
+ self._acq_futures.append(weakref.ref(acq_future))
+ # clear out old weakrefs
+ self._acq_futures = [f for f in self._acq_futures if f() is not None]
- self._event_queue.put(event_or_events)
- return acq_future
+ self._event_queue.put(event_or_events)
+ return acq_future
+ except Exception as e:
+ self.abort(e)
+ raise e
@@ -507,7 +511,7 @@ def generate_events(event, order):
absolute_start_times = np.cumsum(time_interval_s)
for time_index in time_indices:
new_event = copy.deepcopy(event)
- new_event["axes"]["time"] = time_index
+ new_event["axes"]["time"] = int(time_index)
if isinstance(time_interval_s, list):
new_event["min_start_time"] = absolute_start_times[time_index]
else:
diff --git a/pycromanager/acquisition/python_backend_acquisitions.py b/pycromanager/acquisition/python_backend_acquisitions.py
index 959278ed..68bb9009 100644
--- a/pycromanager/acquisition/python_backend_acquisitions.py
+++ b/pycromanager/acquisition/python_backend_acquisitions.py
@@ -40,7 +40,7 @@ def __init__(
dict(signature(PythonBackendAcquisition.__init__).parameters.items())[arg_name].default)
for arg_name in arg_names }
super().__init__(**named_args)
- self._dataset = NDRAMDataset() if not directory else NDTiffDataset(directory, name, writable=True)
+ self._dataset = NDRAMDataset() if not directory else NDTiffDataset(directory, name=name, writable=True)
self._finished = False
self._notifications_finished = False
self._create_event_queue()
@@ -196,7 +196,12 @@ def __init__(self, hook_fn):
self._hook_fn = hook_fn
def run(self, event):
- self._hook_fn(event)
+ if AcquisitionEvent.is_acquisition_finished_event(event):
+ return event
+ acq = event.acquisition_
+ output = self._hook_fn(event.to_json())
+ if output is not None:
+ return AcquisitionEvent.from_json(output, acq)
def close(self):
pass # nothing to do here
diff --git a/pycromanager/headless.py b/pycromanager/headless.py
index 7fb290ef..84b3658a 100644
--- a/pycromanager/headless.py
+++ b/pycromanager/headless.py
@@ -60,12 +60,12 @@ def pop_next_tagged_image(self):
tags = {key: md.GetSingleTag(key).GetValue() for key in md.GetKeys()}
return TaggedImage(tags, pix)
- def get_tagged_image(self, cam_index, camera, height, width, binning=None, pixel_type=None, roi_x_start=None,
+ def get_tagged_image(core, cam_index, camera, height, width, binning=None, pixel_type=None, roi_x_start=None,
roi_y_start=None):
"""
Different signature than the Java version because of difference in metadata handling in the swig layers
"""
- pix = self.get_image()
+ pix = core.get_image()
md = pymmcore.Metadata()
# most of the same tags from pop_next_tagged_image, which may not be the same as the MMCoreJ version of this function
tags = {'Camera': camera, 'Height': height, 'Width': width, 'PixelType': pixel_type,
diff --git a/pycromanager/install.py b/pycromanager/install.py
new file mode 100644
index 00000000..c18e44fa
--- /dev/null
+++ b/pycromanager/install.py
@@ -0,0 +1,76 @@
+"""
+Utility functions for installing micro-manager
+"""
+import sys
+import os
+import re
+import requests
+import wget
+import subprocess
+import shutil
+
+MM_DOWNLOAD_URL_BASE = 'https://download.micro-manager.org'
+
+MM_DOWNLOAD_URL_MAC = MM_DOWNLOAD_URL_BASE + '/nightly/2.0/Mac'
+MM_DOWNLOAD_URL_WINDOWS = MM_DOWNLOAD_URL_BASE + '/nightly/2.0/Windows'
+
+def _find_versions(platform='Windows'):
+ """
+ Find all available versions of Micro-Manager nightly builds
+ """
+ # Get the webpage
+ if platform == 'Windows':
+ webpage = requests.get(MM_DOWNLOAD_URL_WINDOWS)
+ elif platform == 'Mac':
+ webpage = requests.get(MM_DOWNLOAD_URL_MAC)
+ else:
+ raise ValueError(f"Unsupported OS: {platform}")
+ return re.findall(r'class="rowDefault" href="([^"]+)', webpage.text)
+
+
+def download_and_install(windows=True, destination='auto'):
+ """
+ Download and install the latest nightly build of Micro-Manager
+
+ Parameters
+ ----------
+ windows : bool
+ Whether to download the Windows or Mac version
+ destination : str
+ The directory to install Micro-Manager to. If 'auto', it will install to the user's home directory.
+
+ Returns
+ -------
+ str
+ The path to the installed Micro-Manager directory
+ """
+ platform = 'Windows' if windows else 'Mac'
+ installer = 'mm_installer.exe' if windows else 'mm_installer.dmg'
+ latest_version = MM_DOWNLOAD_URL_BASE + _find_versions(platform)[0]
+ wget.download(latest_version, out=installer, bar=lambda curr, total, width: print(f"\rDownloading installer: {curr / total*100:.2f}%", end=''))
+
+ if windows:
+ # TODO: need to test this
+ cmd = f"{installer} /SP /VERYSILENT /SUPRESSMSGBOXES"
+ # /CURRENTUSER"
+ # f"/DIR={mm_install_dir} /LOG={mm_install_log_path}"
+ subprocess.run(cmd, shell=True)
+ return 'TODO: Windows install path'
+ else:
+ if destination == 'auto':
+ destination = os.path.expanduser('~') + '/Micro-Manager'
+ try:
+ # unmount if already mounted
+ subprocess.run(['hdiutil', 'detach', '/Volumes/Micro-Manager'])
+ except:
+ pass
+ process = subprocess.run(['hdiutil', 'attach', '-nobrowse', str(installer)])
+ latest_build = [name for name in os.listdir('/Volumes/Micro-Manager') if 'Micro-Manager' in name][0]
+ shutil.copytree('/Volumes/Micro-Manager/' + latest_build, destination, dirs_exist_ok=True)
+ # unmount
+ subprocess.run(['hdiutil', 'detach', '/Volumes/Micro-Manager'])
+ # delete this installer
+ os.remove(installer)
+ return destination
+
+ # For issues with M1 Macs: https://github.com/conda-forge/miniforge/issues/165#issuecomment-860233092
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index e84831b1..95e156c6 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -13,6 +13,7 @@
from pycromanager import start_headless
from pycromanager.headless import stop_headless
import socket
+from pycromanager.install import download_and_install
def is_port_in_use(port):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
@@ -52,120 +53,60 @@ def replace_jars(new_file_path, old_file_path, jar_names: list):
@pytest.fixture(scope="session")
-def download_mm_nightly():
+def install_mm():
if is_port_in_use(4827):
- yield
- else:
- # get latest mm nightly build
- mm_windows_downloads = "https://download.micro-manager.org/nightly/2.0/Windows/"
- webpage = requests.get(mm_windows_downloads)
-
- m = re.search(r'class="rowDefault" href="([^"]+)', webpage.text)
- url = "https://download.micro-manager.org" + m.group(1)
-
- # download
- print(f"\nDownloading Micro-manager nightly build: {url.split('/')[-1]}")
- mm_installer = os.path.join(os.getcwd(), 'mm_nightly_build.exe')
- if not os.path.exists(mm_installer):
- wget.download(url, out=mm_installer)
-
- yield mm_installer
-
- # cleanup
- if os.path.isfile(mm_installer):
- os.remove(mm_installer)
-
-
-@pytest.fixture(scope="session")
-def install_mm(download_mm_nightly):
- mm_installed = False
- mm_running = False
- mm_install_dir = os.path.join(os.path.expanduser('~'), "Micro-Manager-nightly")
-
-
- # check if there is currently a Micro-manager instance running (used for local testing)
- if is_port_in_use(4827):
- mm_running = True
print('Using Micro-manager running on port 4827 for testing')
yield
else:
- if os.path.isdir(mm_install_dir) and os.listdir(mm_install_dir):
- # Check if Micro-manager installation is present in mm_install_dir.
- # If so, the latest Micro-manager nightly build will not be installed.
- print(f'Existing Micro-manager installation found at {mm_install_dir}')
- else:
- # Install Micro-manager nightly build. Currently only supported on Windows platforms
- # To run tests on other platform, please place a working Micro-manager installation in "~/Micro-Manager-nightly"
- mm_installed = True
-
- if sys.platform.startswith('win'):
- mm_installer = download_mm_nightly
- mm_install_log_path = os.path.join(os.path.dirname(mm_installer), "mm_install.log")
- else:
- raise RuntimeError(
- '''Micro-manager nightly build installation is currently only supported on Windows platforms.
- To run tests on other platform, please place a working Micro-manager installation in
- "~/Micro-Manager-nightly"'''
- )
-
- # mkdir if not exists
- if not os.path.isdir(mm_install_dir):
- os.mkdir(mm_install_dir)
-
- print(f'Installing Micro-manager nightly build at: {mm_install_dir}')
- cmd = f"{mm_installer} /SP /VERYSILENT /SUPRESSMSGBOXES /CURRENTUSER /DIR={mm_install_dir} /LOG={mm_install_log_path}"
- subprocess.run(cmd, shell=True)
-
- # find pycro-manager/java path
- if os.path.isdir('java'):
- java_path = os.path.abspath('java')
- # in case cwd is '/pycromanager/test'
- elif os.path.isdir('../../java'):
- java_path = os.path.abspath('../../java')
- else:
- raise RuntimeError('Could not find pycro-manager/java path')
-
- # Delete the pycromanagerjava.jar file that is packaged with the nightly build
- pycromanager_jar_path = os.path.join(mm_install_dir, 'plugins', 'Micro-Manager', 'PycromanagerJava-*.jar')
- for file_path in glob.glob(pycromanager_jar_path):
- os.remove(file_path)
- print(f'Removed {file_path}')
-
- # Copy the pycromanagerjava.jar file that was compiled by the github action
- # into the nightly build so that it will test with the latest code
- compiled_jar_path = os.path.join(java_path, 'target', 'PycromanagerJava-*.jar')
- destination_path = os.path.join(mm_install_dir, 'plugins', 'Micro-Manager', 'PycromanagerJava.jar')
-
- # Find the actual file that matches the pattern and copy it to the destination
- matched_files = [file for file in glob.glob(compiled_jar_path)
- if not any(exclude in file for exclude in ['-javadoc', '-sources', '.asc', '.pom'])]
- if matched_files:
- file_path = matched_files[0]
- shutil.copy2(file_path, destination_path)
- print(f'Copied {file_path} to {destination_path}')
- else:
- print(f'No matching JAR file found at {compiled_jar_path}')
- raise FileNotFoundError(f'No matching JAR file found at {compiled_jar_path}')
-
- # Update pycromanager dependency jar files packaged with the Micro-manager nightly build
- # Files are updated only if they are larger version
- # Copy dependency jar files if present in target/dependency
- if os.path.isdir(os.path.join(java_path, 'target/dependency')):
- replace_jars(os.path.join(java_path, 'target/dependency'), os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'),
- ['AcqEngJ', 'NDTiffStorage', 'NDViewer'])
- # Copy dependency jar files if present in ../../REPO_NAME/target
- for repo_name in ['AcqEngJ', 'NDTiffStorage', 'NDViewer']:
- if os.path.isdir(os.path.join(java_path, f'../../{repo_name}/target')):
- replace_jars(os.path.join(java_path, f'../../{repo_name}/target'),
- os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'), [repo_name])
-
- yield mm_install_dir
-
- # cleanup only if Micro-manager was installed in this session
- if not mm_running and mm_installed:
- os.remove(mm_install_log_path)
- # fails, because MM is still running, I think
- # shutil.rmtree(mm_install_dir)
+ yield '/Users/henrypinkard/Micro-Manager'
+ # # Download an install latest nightly build
+ # mm_install_dir = download_and_install(windows=sys.platform.startswith('win'), destination='auto')
+ #
+ # #### Replace with newer versions of Java libraries ####
+ # # find pycro-manager/java path
+ # if os.path.isdir('java'):
+ # java_path = os.path.abspath('java')
+ # # in case cwd is '/pycromanager/test'
+ # elif os.path.isdir('../../java'):
+ # java_path = os.path.abspath('../../java')
+ # else:
+ # raise RuntimeError('Could not find pycro-manager/java path')
+ #
+ # # Delete the pycromanagerjava.jar file that is packaged with the nightly build
+ # pycromanager_jar_path = os.path.join(mm_install_dir, 'plugins', 'Micro-Manager', 'PycromanagerJava-*.jar')
+ # for file_path in glob.glob(pycromanager_jar_path):
+ # os.remove(file_path)
+ # print(f'Removed {file_path}')
+ #
+ # # Copy the pycromanagerjava.jar file that was compiled by the github action
+ # # into the nightly build so that it will test with the latest code
+ # compiled_jar_path = os.path.join(java_path, 'target', 'PycromanagerJava-*.jar')
+ # destination_path = os.path.join(mm_install_dir, 'plugins', 'Micro-Manager', 'PycromanagerJava.jar')
+ #
+ # # Find the actual file that matches the pattern and copy it to the destination
+ # matched_files = [file for file in glob.glob(compiled_jar_path)
+ # if not any(exclude in file for exclude in ['-javadoc', '-sources', '.asc', '.pom'])]
+ # if matched_files:
+ # file_path = matched_files[0]
+ # shutil.copy2(file_path, destination_path)
+ # print(f'Copied {file_path} to {destination_path}')
+ # else:
+ # print(f'No matching JAR file found at {compiled_jar_path}')
+ # raise FileNotFoundError(f'No matching JAR file found at {compiled_jar_path}')
+ #
+ # # Update pycromanager dependency jar files packaged with the Micro-manager nightly build
+ # # Files are updated only if they are larger version
+ # # Copy dependency jar files if present in target/dependency
+ # if os.path.isdir(os.path.join(java_path, 'target/dependency')):
+ # replace_jars(os.path.join(java_path, 'target/dependency'), os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'),
+ # ['AcqEngJ', 'NDTiffStorage', 'NDViewer'])
+ # # Copy dependency jar files if present in ../../REPO_NAME/target
+ # for repo_name in ['AcqEngJ', 'NDTiffStorage', 'NDViewer']:
+ # if os.path.isdir(os.path.join(java_path, f'../../{repo_name}/target')):
+ # replace_jars(os.path.join(java_path, f'../../{repo_name}/target'),
+ # os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'), [repo_name])
+ #
+ # yield mm_install_dir
@pytest.fixture(scope="session")
@@ -179,26 +120,35 @@ def setup_data_folder():
shutil.rmtree(data_folder_path)
-@pytest.fixture(scope="session", params=[True, False])
+@pytest.fixture(scope="session", params=[True])
+# @pytest.fixture(scope="session", params=[True, False])
def launch_mm_headless(request, install_mm):
python_backend = request.param
mm_install_dir = install_mm
- if mm_install_dir is None:
- yield # local manual testing where MM has been launched from source
- else:
- config_file = os.path.join(mm_install_dir, 'MMConfig_demo.cfg')
- print('Launching Micro-manager in headless mode.')
+ if not python_backend:
+ if mm_install_dir is None:
+ yield # local manual testing where MM has been launched from source
+ else:
+ config_file = os.path.join(mm_install_dir, 'MMConfig_demo.cfg')
+ print('Launching Micro-manager in headless mode.')
- # MM doesn't ship with Java on Mac so allow it to be defined here if using mac os
- java_loc = None
- if "JAVA" in os.environ and sys.platform == "darwin":
- java_loc = os.environ["JAVA"]
+ # MM doesn't ship with Java on Mac so allow it to be defined here if using mac os
+ java_loc = None
+ if "JAVA" in os.environ and sys.platform == "darwin":
+ java_loc = os.environ["JAVA"]
- start_headless(mm_install_dir, config_file, java_loc=java_loc,
- buffer_size_mb=128, max_memory_mb=128, # set these low for github actions
- python_backend=python_backend,
- debug=True)
+ start_headless(mm_install_dir, config_file, java_loc=java_loc,
+ buffer_size_mb=128, max_memory_mb=128, # set these low for github actions
+ debug=True)
- yield None
+ yield
+ stop_headless(debug=True)
+ else: # python backend
+ config_file = os.path.join(mm_install_dir, 'MMConfig_demo.cfg')
+ start_headless(mm_install_dir, config_file,
+ buffer_size_mb=128, max_memory_mb=128, # set these low for github actions
+ python_backend=True,
+ debug=True)
+ yield
stop_headless(debug=True)
From f64b0c1674b6a977896633ba2a1e2455448b9865 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Tue, 11 Jun 2024 05:10:30 +0200
Subject: [PATCH 03/34] fix remaining tests for acqengpy
---
.../remote/RemoteViewerStorageAdapter.java | 6 +-
pycromanager/acquisition/RAMStorage_java.py | 3 +
.../acquisition/acq_eng_py/internal/engine.py | 68 +++++----
.../internal/notification_handler.py | 11 +-
.../acq_eng_py/main/acquisition_event.py | 36 ++---
.../acquisition/acquisition_superclass.py | 38 ++++-
.../acquisition/java_backend_acquisitions.py | 38 +----
.../python_backend_acquisitions.py | 12 +-
pycromanager/install.py | 10 +-
pycromanager/test/conftest.py | 130 ++++++++++--------
pycromanager/test/test_acquisition.py | 16 ++-
11 files changed, 201 insertions(+), 167 deletions(-)
diff --git a/java/src/main/java/org/micromanager/remote/RemoteViewerStorageAdapter.java b/java/src/main/java/org/micromanager/remote/RemoteViewerStorageAdapter.java
index 6dd4b807..42368d36 100644
--- a/java/src/main/java/org/micromanager/remote/RemoteViewerStorageAdapter.java
+++ b/java/src/main/java/org/micromanager/remote/RemoteViewerStorageAdapter.java
@@ -234,7 +234,11 @@ public String getDiskLocation() {
public void close() {
try {
- storage_.closeAndWait();
+ if (!(storage_ instanceof NDRAMStorage)) {
+ // If its RAM storage, the python side may want to hang onto it
+ storage_.closeAndWait();
+ storage_ = null;
+ }
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
diff --git a/pycromanager/acquisition/RAMStorage_java.py b/pycromanager/acquisition/RAMStorage_java.py
index d30345a4..5075b86c 100644
--- a/pycromanager/acquisition/RAMStorage_java.py
+++ b/pycromanager/acquisition/RAMStorage_java.py
@@ -25,6 +25,9 @@ def add_available_axes(self, image_coordinates):
# update information about the available images
self._update_axes(image_coordinates)
self._new_image_event.set()
+ if self.dtype is None:
+ image = self.read_image(**image_coordinates)
+ self._infer_image_properties(image)
def get_image_coordinates_list(self):
"""
diff --git a/pycromanager/acquisition/acq_eng_py/internal/engine.py b/pycromanager/acquisition/acq_eng_py/internal/engine.py
index 21de6efd..7b96a323 100644
--- a/pycromanager/acquisition/acq_eng_py/internal/engine.py
+++ b/pycromanager/acquisition/acq_eng_py/internal/engine.py
@@ -186,14 +186,13 @@ def execute_acquisition_event(self, event: AcquisitionEvent):
if event is None:
return # The hook cancelled this event
self.abort_if_requested(event, None)
- hardware_sequences_in_progress = HardwareSequences()
+
try:
self.start_z_drive(event, hardware_sequences_in_progress)
except HardwareControlException as e:
self.stop_hardware_sequences(hardware_sequences_in_progress)
raise e
- # TODO restore this
event.acquisition_.post_notification(AcqNotification(
AcqNotification.Hardware, event.axisPositions_, AcqNotification.Hardware.POST_HARDWARE))
for h in event.acquisition_.get_after_hardware_hooks():
@@ -415,11 +414,11 @@ def stop_hardware_sequences(self, hardware_sequences_in_progress: HardwareSequen
# Stop any hardware sequences
for device_name in hardware_sequences_in_progress.device_names:
try:
- if str(self.core.getDeviceType(device_name)) == "StageDevice":
- str(self.core.stopStageSequence(device_name))
- elif str(self.core.getDeviceType(device_name)) == "XYStageDevice":
+ if self.core.get_device_type(device_name) == 5:
+ self.core.stopStageSequence(device_name)
+ elif self.core.get_device_type(device_name) == 6:
self.core.stopXYStageSequence(device_name)
- elif (self.core.getDeviceType(device_name)) == "CameraDevice":
+ elif self.core.get_device_type(device_name) == 2: # camera device
self.core.stopSequenceAcquisition(self.core.getCameraDevice())
except Exception as ee:
traceback.print_exc()
@@ -576,13 +575,13 @@ def change_additional_properties(event):
for e in event.get_sequence():
if z_sequence is not None:
- z_sequence.add(e.get_z_position())
+ z_sequence.append(e.get_z_position())
if x_sequence is not None:
- x_sequence.add(e.get_x_position())
+ x_sequence.append(e.get_x_position())
if y_sequence is not None:
- y_sequence.add(e.get_y_position())
+ y_sequence.append(e.get_y_position())
if exposure_sequence_ms is not None:
- exposure_sequence_ms.add(e.get_exposure())
+ exposure_sequence_ms.append(e.get_exposure())
# Set sequences for all channel properties
if prop_sequences is not None:
@@ -592,6 +591,7 @@ def change_additional_properties(event):
prop_name = ps.getPropertyName()
if e == event.get_sequence()[0]: # First property
+ raise NotImplementedError("Property sequences not yet implemented")
# TODO: what is this in pymmcore
prop_sequences.add(StrVector())
@@ -601,35 +601,33 @@ def change_additional_properties(event):
if self.core.is_property_sequenceable(device_name, prop_name):
prop_sequences.get(i).add(prop_value)
- hardware_sequences_in_progress.device_names.append(self.core.get_camera_device())
-
- # Now have built up all the sequences, apply them
- if event.is_exposure_sequenced():
- self.core.load_exposure_sequence(self.core.get_camera_device(), exposure_sequence_ms)
- # Already added camera
-
- if event.is_xy_sequenced():
- self.core.load_xy_stage_sequence(xy_stage, x_sequence, y_sequence)
- hardware_sequences_in_progress.device_names.add(xy_stage)
+ hardware_sequences_in_progress.device_names.append(self.core.get_camera_device())
+ # Now have built up all the sequences, apply them
+ if event.is_exposure_sequenced():
+ self.core.load_exposure_sequence(self.core.get_camera_device(), exposure_sequence_ms)
+ # Already added camera
+ if event.is_xy_sequenced():
+ self.core.load_xy_stage_sequence(xy_stage, x_sequence, y_sequence)
+ hardware_sequences_in_progress.device_names.add(xy_stage)
- if event.is_config_group_sequenced():
- for i in range(config.size()):
- ps = config.getSetting(i)
- device_name = ps.getDeviceLabel()
- prop_name = ps.getPropertyName()
+ if event.is_config_group_sequenced():
+ for i in range(config.size()):
+ ps = config.getSetting(i)
+ device_name = ps.getDeviceLabel()
+ prop_name = ps.getPropertyName()
- if prop_sequences.get(i).size() > 0:
- self.core.load_property_sequence(device_name, prop_name, prop_sequences.get(i))
- hardware_sequences_in_progress.property_names.add(prop_name)
- hardware_sequences_in_progress.property_device_names.add(device_name)
+ if prop_sequences.get(i).size() > 0:
+ self.core.load_property_sequence(device_name, prop_name, prop_sequences.get(i))
+ hardware_sequences_in_progress.property_names.add(prop_name)
+ hardware_sequences_in_progress.property_device_names.add(device_name)
- self.core.prepare_sequence_acquisition(self.core.get_camera_device())
+ self.core.prepare_sequence_acquisition(self.core.get_camera_device())
- # Compare to last event to see what needs to change
- if self.last_event is not None and self.last_event.acquisition_ != event.acquisition_:
- self.last_event = None # Update all hardware if switching to a new acquisition
+ # Compare to last event to see what needs to change
+ if self.last_event is not None and self.last_event.acquisition_ != event.acquisition_:
+ self.last_event = None # Update all hardware if switching to a new acquisition
# Other stage devices
@@ -693,10 +691,10 @@ def move_z_device(event):
z_sequence = pymmcore.DoubleVector() if event.is_z_sequenced() else None
for e in event.get_sequence():
if z_sequence is not None:
- z_sequence.add(e.get_z_position())
+ z_sequence.append(e.get_z_position())
if event.is_z_sequenced():
self.core.load_stage_sequence(z_stage, z_sequence)
- hardware_sequences_in_progress.device_names.add(z_stage)
+ hardware_sequences_in_progress.device_names.append(z_stage)
# Z stage
loop_hardware_command_retries(lambda: move_z_device(event), "Moving Z device")
diff --git a/pycromanager/acquisition/acq_eng_py/internal/notification_handler.py b/pycromanager/acquisition/acq_eng_py/internal/notification_handler.py
index c84ccc07..4194225c 100644
--- a/pycromanager/acquisition/acq_eng_py/internal/notification_handler.py
+++ b/pycromanager/acquisition/acq_eng_py/internal/notification_handler.py
@@ -1,5 +1,8 @@
+import warnings
from queue import Queue
+import queue
import threading
+import traceback
class NotificationHandler:
def __init__(self):
@@ -23,9 +26,11 @@ def run(self):
break
def post_notification(self, notification):
- self.notification_queue.put(notification)
- if self.notification_queue.qsize() > 500:
- print(f"Warning: Acquisition notification queue size: {self.notification_queue.qsize()}")
+ # print(f"NotificationHandler.post_notification: {notification}")
+ self.notification_queue.put(notification)
+ # print("NotificationHandler.post_notification. size", self.notification_queue.qsize() )
+ if self.notification_queue.qsize() > 500:
+ warnings.warn(f"Acquisition notification queue size: {self.notification_queue.qsize()}")
def add_listener(self, listener):
self.listeners.append(listener)
diff --git a/pycromanager/acquisition/acq_eng_py/main/acquisition_event.py b/pycromanager/acquisition/acq_eng_py/main/acquisition_event.py
index c2dca402..03310725 100644
--- a/pycromanager/acquisition/acq_eng_py/main/acquisition_event.py
+++ b/pycromanager/acquisition/acq_eng_py/main/acquisition_event.py
@@ -42,15 +42,15 @@ def __init__(self, acq, sequence=None):
exposureSet = set()
configSet = set()
for event in self.sequence_:
- if event.zPosition_:
+ if event.zPosition_ is not None:
zPosSet.add(event.get_z_position())
- if event.xPosition_:
+ if event.xPosition_ is not None:
xPosSet.add(event.get_x_position())
- if event.yPosition_:
+ if event.yPosition_ is not None:
yPosSet.add(event.get_y_position())
- if event.exposure_:
+ if event.exposure_ is not None:
exposureSet.add(event.get_exposure())
- if event.configPreset_:
+ if event.configPreset_ is not None:
configSet.add(event.get_config_preset())
self.exposureSequenced_ = len(exposureSet) > 1
self.configGroupSequenced_ = len(configSet) > 1
@@ -96,13 +96,13 @@ def event_to_json(e):
if e.has_config_group():
data["config_group"] = [e.configGroup_, e.configPreset_]
- if e.exposure_:
+ if e.exposure_ is not None:
data["exposure"] = e.exposure_
if e.slmImage_:
data["slm_pattern"] = e.slmImage_
- if e.timeout_ms_:
+ if e.timeout_ms_ is not None:
data["timeout_ms"] = e.timeout_ms_
axes = {axis: e.axisPositions_[axis] for axis in e.axisPositions_}
@@ -114,13 +114,13 @@ def event_to_json(e):
if stage_positions:
data["stage_positions"] = stage_positions
- if e.zPosition_:
+ if e.zPosition_ is not None:
data["z"] = e.zPosition_
- if e.xPosition_:
+ if e.xPosition_ is not None:
data["x"] = e.xPosition_
- if e.yPosition_:
+ if e.yPosition_ is not None:
data["y"] = e.yPosition_
if e.camera_:
@@ -153,29 +153,29 @@ def event_from_json(data, acq):
event.miniumumStartTime_ms_ = int(data["min_start_time"] * 1000)
if "timeout" in data:
- event.timeout_ms_ = data["timeout"]
+ event.timeout_ms_ = float(data["timeout"])
if "config_group" in data:
event.configGroup_ = data["config_group"][0]
event.configPreset_ = data["config_group"][1]
if "exposure" in data:
- event.exposure_ = data["exposure"]
+ event.exposure_ = float(data["exposure"])
if "timeout_ms" in data:
- event.slmImage_ = data["timeout_ms"]
+ event.slmImage_ = float(data["timeout_ms"])
if "stage_positions" in data:
for stagePos in data["stage_positions"]:
event.setStageCoordinate(stagePos[0], stagePos[1])
if "z" in data:
- event.zPosition_ = data["z"]
+ event.zPosition_ = float(data["z"])
if "stage" in data:
deviceName = data["stage"]["device_name"]
position = data["stage"]["position"]
- event.axisPositions_[deviceName] = position
+ event.axisPositions_[deviceName] = float(position)
if "axis_name" in data["stage"]:
axisName = data["stage"]["axis_name"]
event.stageDeviceNamesToAxisNames_[deviceName] = axisName
@@ -190,10 +190,10 @@ def event_from_json(data, acq):
# event.yPosition_ = xyPos.y
if "x" in data:
- event.xPosition_ = data["x"]
+ event.xPosition_ = float(data["x"])
if "y" in data:
- event.yPosition_ = data["y"]
+ event.yPosition_ = float(data["y"])
if "slm_pattern" in data:
event.slmImage_ = data["slm_pattern"]
@@ -239,7 +239,7 @@ def get_additional_properties(self):
def should_acquire_image(self):
if self.sequence_:
return True
- return self.configPreset_ is not None or len(self.axisPositions_) > 0
+ return self.configPreset_ is not None or self.axisPositions_ is not None
def has_config_group(self):
return self.configPreset_ is not None and self.configGroup_ is not None
diff --git a/pycromanager/acquisition/acquisition_superclass.py b/pycromanager/acquisition/acquisition_superclass.py
index 9b8dff9b..b1db8fe2 100644
--- a/pycromanager/acquisition/acquisition_superclass.py
+++ b/pycromanager/acquisition/acquisition_superclass.py
@@ -284,8 +284,6 @@ def notifying_generator(original_generator):
self.abort(e)
raise e
-
-
def abort(self, exception=None):
"""
Cancel any pending events and shut down immediately
@@ -306,6 +304,42 @@ def abort(self, exception=None):
# checking the status of the acquisition
self._acq.abort()
+
+ def _add_storage_monitor_fn(self, image_saved_fn=None):
+ """
+ Add a callback function that gets called whenever a new image is writtern to disk (for acquisitions in
+ progress only)
+
+ Parameters
+ ----------
+ image_saved_fn : Callable
+ user function to be run whenever an image is ready on disk
+ """
+
+ callback = None
+ if image_saved_fn is not None:
+ params = signature(image_saved_fn).parameters
+ if len(params) == 2:
+ callback = image_saved_fn
+ elif len(params) == 3:
+ callback = lambda axes, dataset: image_saved_fn(axes, dataset, self._event_queue)
+ else:
+ raise Exception('Image saved callbacks must have either 2 or three parameters')
+
+ def _storage_monitor_fn():
+ dataset = self.get_dataset()
+ while True:
+ image_notification = self._image_notification_queue.get()
+ if AcqNotification.is_data_sink_finished_notification(image_notification):
+ break
+ dataset._new_image_arrived = True
+ if callback is not None:
+ callback(image_notification.payload, dataset)
+
+ t = threading.Thread(target=_storage_monitor_fn, name='StorageMonitorThread')
+ t.start()
+ return t
+
def _create_event_queue(self):
"""Create thread safe queue for events so they can be passed from multiple processes"""
self._event_queue = EventQueue()
diff --git a/pycromanager/acquisition/java_backend_acquisitions.py b/pycromanager/acquisition/java_backend_acquisitions.py
index 0e6d80f7..e0f1041d 100644
--- a/pycromanager/acquisition/java_backend_acquisitions.py
+++ b/pycromanager/acquisition/java_backend_acquisitions.py
@@ -211,7 +211,11 @@ def _notification_handler_fn(acquisition, notification_push_port, connected_even
axes = json.loads(notification.payload)
acquisition._dataset.add_available_axes(axes)
notification.payload = axes
+
acquisition._image_notification_queue.put(notification)
+ # check size
+ if acquisition._image_notification_queue.qsize() > acquisition._image_notification_queue.maxsize * 0.9:
+ warnings.warn(f"Acquisition image notification queue size: {acquisition._image_notification_queue.qsize()}")
acquisition._notification_queue.put(notification)
if AcqNotification.is_acquisition_finished_notification(notification):
@@ -413,40 +417,6 @@ def _start_receiving_notifications(self):
self._remote_notification_handler.start()
return notification_thread
- def _add_storage_monitor_fn(self, image_saved_fn=None):
- """
- Add a callback function that gets called whenever a new image is writtern to disk (for acquisitions in
- progress only)
-
- Parameters
- ----------
- image_saved_fn : Callable
- user function to be run whenever an image is ready on disk
- """
-
- callback = None
- if image_saved_fn is not None:
- params = signature(image_saved_fn).parameters
- if len(params) == 2:
- callback = image_saved_fn
- elif len(params) == 3:
- callback = lambda axes, dataset: image_saved_fn(axes, dataset, self._event_queue)
- else:
- raise Exception('Image saved callbacks must have either 2 or three parameters')
-
- def _storage_monitor_fn():
- dataset = self.get_dataset()
- while True:
- image_notification = self._image_notification_queue.get()
- if AcqNotification.is_data_sink_finished_notification(image_notification):
- break
- dataset._new_image_arrived = True
- if callback is not None:
- callback(image_notification.payload, dataset)
- t = threading.Thread(target=_storage_monitor_fn, name='StorageMonitorThread')
- t.start()
- return t
-
def _check_for_exceptions(self):
"""
Check for exceptions on the python side (i.e. hooks and processors)
diff --git a/pycromanager/acquisition/python_backend_acquisitions.py b/pycromanager/acquisition/python_backend_acquisitions.py
index 68bb9009..22acb40b 100644
--- a/pycromanager/acquisition/python_backend_acquisitions.py
+++ b/pycromanager/acquisition/python_backend_acquisitions.py
@@ -1,3 +1,4 @@
+import warnings
from docstring_inheritance import NumpyDocstringInheritanceMeta
from pycromanager.acquisition.acq_eng_py.main.AcqEngPy_Acquisition import Acquisition as pymmcore_Acquisition
from pycromanager.acquisition.acquisition_superclass import _validate_acq_events, Acquisition
@@ -75,8 +76,11 @@ def submit_events():
def post_notification(notification):
self._notification_queue.put(notification)
# these are processed seperately to handle image saved callback
- if AcqNotification.is_image_saved_notification(notification):
+ if AcqNotification.is_image_saved_notification(notification) or \
+ AcqNotification.is_data_sink_finished_notification(notification):
self._image_notification_queue.put(notification)
+ if self._image_notification_queue.qsize() > self._image_notification_queue.maxsize * 0.9:
+ warnings.warn(f"Acquisition image notification queue size: {self._image_notification_queue.qsize()}")
self._acq.add_acq_notification_listener(NotificationListener(post_notification))
@@ -94,6 +98,10 @@ def post_notification(notification):
if self._image_processor is not None:
self._acq.add_image_processor(self._image_processor)
+ # Monitor image arrival so they can be loaded on python side, but with no callback function
+ # Need to do this regardless of whether you use it, so that notifcation handling shuts down
+ self._storage_monitor_thread = self._add_storage_monitor_fn(image_saved_fn=image_saved_fn)
+
if napari_viewer is not None:
# using napari viewer
@@ -105,6 +113,7 @@ def post_notification(notification):
assert isinstance(napari_viewer, napari.Viewer), 'napari_viewer must be an instance of napari.Viewer'
self._napari_viewer = napari_viewer
start_napari_signalling(self._napari_viewer, self.get_dataset())
+ self._acq.start()
######## Public API ###########
@@ -120,6 +129,7 @@ def await_completion(self):
self._check_for_exceptions()
self._event_thread.join()
self._notification_dispatch_thread.join()
+ self._storage_monitor_thread.join()
self._acq = None
self._finished = True
diff --git a/pycromanager/install.py b/pycromanager/install.py
index c18e44fa..1801ba91 100644
--- a/pycromanager/install.py
+++ b/pycromanager/install.py
@@ -50,12 +50,12 @@ def download_and_install(windows=True, destination='auto'):
wget.download(latest_version, out=installer, bar=lambda curr, total, width: print(f"\rDownloading installer: {curr / total*100:.2f}%", end=''))
if windows:
- # TODO: need to test this
- cmd = f"{installer} /SP /VERYSILENT /SUPRESSMSGBOXES"
- # /CURRENTUSER"
- # f"/DIR={mm_install_dir} /LOG={mm_install_log_path}"
+ if destination == 'auto':
+ destination = r'C:\Program Files\Micro-Manager'
+ cmd = f"{installer} /SP /VERYSILENT /SUPRESSMSGBOXES /DIR={destination}"
+ # /LOG={mm_install_log_path}"
subprocess.run(cmd, shell=True)
- return 'TODO: Windows install path'
+ return destination
else:
if destination == 'auto':
destination = os.path.expanduser('~') + '/Micro-Manager'
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index 95e156c6..10debb17 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -15,6 +15,11 @@
import socket
from pycromanager.install import download_and_install
+# def pytest_runtest_protocol(item, nextitem):
+# """Add a delay between each test."""
+# time.sleep(0.5) # Adjust the delay time (in seconds) as needed
+# return None # Continue with the default test execution
+
def is_port_in_use(port):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
return s.connect_ex(('localhost', port)) == 0
@@ -58,72 +63,75 @@ def install_mm():
print('Using Micro-manager running on port 4827 for testing')
yield
else:
- yield '/Users/henrypinkard/Micro-Manager'
- # # Download an install latest nightly build
- # mm_install_dir = download_and_install(windows=sys.platform.startswith('win'), destination='auto')
- #
- # #### Replace with newer versions of Java libraries ####
- # # find pycro-manager/java path
- # if os.path.isdir('java'):
- # java_path = os.path.abspath('java')
- # # in case cwd is '/pycromanager/test'
- # elif os.path.isdir('../../java'):
- # java_path = os.path.abspath('../../java')
- # else:
- # raise RuntimeError('Could not find pycro-manager/java path')
- #
- # # Delete the pycromanagerjava.jar file that is packaged with the nightly build
- # pycromanager_jar_path = os.path.join(mm_install_dir, 'plugins', 'Micro-Manager', 'PycromanagerJava-*.jar')
- # for file_path in glob.glob(pycromanager_jar_path):
- # os.remove(file_path)
- # print(f'Removed {file_path}')
- #
- # # Copy the pycromanagerjava.jar file that was compiled by the github action
- # # into the nightly build so that it will test with the latest code
- # compiled_jar_path = os.path.join(java_path, 'target', 'PycromanagerJava-*.jar')
- # destination_path = os.path.join(mm_install_dir, 'plugins', 'Micro-Manager', 'PycromanagerJava.jar')
- #
- # # Find the actual file that matches the pattern and copy it to the destination
- # matched_files = [file for file in glob.glob(compiled_jar_path)
- # if not any(exclude in file for exclude in ['-javadoc', '-sources', '.asc', '.pom'])]
- # if matched_files:
- # file_path = matched_files[0]
- # shutil.copy2(file_path, destination_path)
- # print(f'Copied {file_path} to {destination_path}')
- # else:
- # print(f'No matching JAR file found at {compiled_jar_path}')
- # raise FileNotFoundError(f'No matching JAR file found at {compiled_jar_path}')
- #
- # # Update pycromanager dependency jar files packaged with the Micro-manager nightly build
- # # Files are updated only if they are larger version
- # # Copy dependency jar files if present in target/dependency
- # if os.path.isdir(os.path.join(java_path, 'target/dependency')):
- # replace_jars(os.path.join(java_path, 'target/dependency'), os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'),
- # ['AcqEngJ', 'NDTiffStorage', 'NDViewer'])
- # # Copy dependency jar files if present in ../../REPO_NAME/target
- # for repo_name in ['AcqEngJ', 'NDTiffStorage', 'NDViewer']:
- # if os.path.isdir(os.path.join(java_path, f'../../{repo_name}/target')):
- # replace_jars(os.path.join(java_path, f'../../{repo_name}/target'),
- # os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'), [repo_name])
- #
- # yield mm_install_dir
-
-
-@pytest.fixture(scope="session")
-def setup_data_folder():
- data_folder_path = os.path.join(os.getcwd(), 'temp_data')
- if not os.path.isdir(data_folder_path):
- os.mkdir(data_folder_path)
+ # Download an install latest nightly build
+ mm_install_dir = download_and_install(windows=sys.platform.startswith('win'), destination='auto')
+
+ #### Replace with newer versions of Java libraries ####
+ # find pycro-manager/java path
+ if os.path.isdir('java'):
+ java_path = os.path.abspath('java')
+ # in case cwd is '/pycromanager/test'
+ elif os.path.isdir('../../java'):
+ java_path = os.path.abspath('../../java')
+ else:
+ raise RuntimeError('Could not find pycro-manager/java path')
+
+ # Delete the pycromanagerjava.jar file that is packaged with the nightly build
+ pycromanager_jar_path = os.path.join(mm_install_dir, 'plugins', 'Micro-Manager', 'PycromanagerJava-*.jar')
+ for file_path in glob.glob(pycromanager_jar_path):
+ os.remove(file_path)
+ print(f'Removed {file_path}')
+
+ # Copy the pycromanagerjava.jar file that was compiled by the github action
+ # into the nightly build so that it will test with the latest code
+ compiled_jar_path = os.path.join(java_path, 'target', 'PycromanagerJava-*.jar')
+ destination_path = os.path.join(mm_install_dir, 'plugins', 'Micro-Manager', 'PycromanagerJava.jar')
+
+ # Find the actual file that matches the pattern and copy it to the destination
+ matched_files = [file for file in glob.glob(compiled_jar_path)
+ if not any(exclude in file for exclude in ['-javadoc', '-sources', '.asc', '.pom'])]
+ if matched_files:
+ file_path = matched_files[0]
+ shutil.copy2(file_path, destination_path)
+ print(f'Copied {file_path} to {destination_path}')
+ else:
+ print(f'No matching JAR file found at {compiled_jar_path}')
+ raise FileNotFoundError(f'No matching JAR file found at {compiled_jar_path}')
+
+ # Update pycromanager dependency jar files packaged with the Micro-manager nightly build
+ # Files are updated only if they are larger version
+ # Copy dependency jar files if present in target/dependency
+ if os.path.isdir(os.path.join(java_path, 'target/dependency')):
+ replace_jars(os.path.join(java_path, 'target/dependency'), os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'),
+ ['AcqEngJ', 'NDTiffStorage', 'NDViewer'])
+ # Copy dependency jar files if present in ../../REPO_NAME/target
+ for repo_name in ['AcqEngJ', 'NDTiffStorage', 'NDViewer']:
+ if os.path.isdir(os.path.join(java_path, f'../../{repo_name}/target')):
+ replace_jars(os.path.join(java_path, f'../../{repo_name}/target'),
+ os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'), [repo_name])
+
+ yield mm_install_dir
+
+
+@pytest.fixture(scope="session", params=['save_to_disk', 'RAM'])
+# @pytest.fixture(scope="session", params=['RAM'])
+def setup_data_folder(request):
+ if request.param != 'save_to_disk':
+ yield None
+ else:
+ data_folder_path = os.path.join(os.getcwd(), 'temp_data')
+ if not os.path.isdir(data_folder_path):
+ os.mkdir(data_folder_path)
- yield data_folder_path
+ yield data_folder_path
- shutil.rmtree(data_folder_path)
+ shutil.rmtree(data_folder_path)
-@pytest.fixture(scope="session", params=[True])
-# @pytest.fixture(scope="session", params=[True, False])
+# @pytest.fixture(scope="session", params=['python_backend', 'java_backend'])
+@pytest.fixture(scope="session", params=['java_backend'])
def launch_mm_headless(request, install_mm):
- python_backend = request.param
+ python_backend = request.param == 'python_backend'
mm_install_dir = install_mm
if not python_backend:
if mm_install_dir is None:
diff --git a/pycromanager/test/test_acquisition.py b/pycromanager/test/test_acquisition.py
index 55904d41..5b5923dd 100644
--- a/pycromanager/test/test_acquisition.py
+++ b/pycromanager/test/test_acquisition.py
@@ -443,19 +443,21 @@ def hook_fn(_events):
mmc = Core()
mmc.set_exposure(1000)
- with Acquisition(setup_data_folder, 'acq', show_display=False,
- pre_hardware_hook_fn=hook_fn) as acq:
+ with Acquisition(setup_data_folder, 'acq', show_display=False, pre_hardware_hook_fn=hook_fn) as acq:
events = multi_d_acquisition_events(1000)
acq.acquire(events)
time.sleep(10)
acq.abort()
+ assert not mmc.is_sequence_running()
+
# reset exposure time
mmc.set_exposure(10)
dataset = acq.get_dataset()
try:
- assert(0 < len(dataset.index) < 100)
+ assert(0 < len(dataset.axes['time']) < 100)
+
finally:
dataset.close()
@@ -463,10 +465,10 @@ def test_abort_with_no_events(launch_mm_headless, setup_data_folder):
"""
Test that aborting before any events processed doesnt cause hang or exception
"""
+ mmc = Core()
with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
acq.abort()
- assert True
-
+ assert not mmc.is_sequence_running()
def test_abort_from_external(launch_mm_headless, setup_data_folder):
"""
@@ -508,7 +510,7 @@ def hook_fn(_events):
dataset = acq.get_dataset()
try:
- assert(len(dataset.index) < 1000)
+ assert(len(dataset.axes['z']) < 1000)
finally:
dataset.close()
@@ -648,7 +650,7 @@ def test_multi_channel_parsing(launch_mm_headless, setup_data_folder):
dataset = acq.get_dataset()
try:
- assert all([channel in dataset.get_channel_names() for channel in ["DAPI", "FITC"]])
+ assert all([channel in dataset.axes['channel'] for channel in ["DAPI", "FITC"]])
finally:
dataset.close()
From 3b9e81252407ac28f35e0e02b178de363ae157ad Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Tue, 11 Jun 2024 05:13:20 +0200
Subject: [PATCH 04/34] bump ndtiff version
---
requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements.txt b/requirements.txt
index 1dc8dcf9..ad9de86e 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,7 +1,7 @@
numpy
dask[array]>=2022.2.0
pyzmq
-ndtiff>=3.0.0
+ndtiff>=3.0.1
docstring-inheritance
pymmcore
sortedcontainers
From 4228d9fd8f22676a8ae5192cf4aee31e2166d1cc Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Thu, 13 Jun 2024 11:39:40 +0200
Subject: [PATCH 05/34] add tests for python backend Acquisition Engine and RAM
storage; Update to using new NDStorage package
---
.../remote/RemoteNotificationHandler.java | 1 -
pycromanager/__init__.py | 2 +-
pycromanager/acquisition/RAMStorage_java.py | 11 +++-
.../acquisition/acquisition_superclass.py | 2 +
.../acquisition/java_backend_acquisitions.py | 60 +++++++++----------
.../python_backend_acquisitions.py | 32 +++++-----
pycromanager/test/conftest.py | 9 +--
pycromanager/test/test_acquisition.py | 1 -
requirements.txt | 4 +-
9 files changed, 61 insertions(+), 61 deletions(-)
diff --git a/java/src/main/java/org/micromanager/remote/RemoteNotificationHandler.java b/java/src/main/java/org/micromanager/remote/RemoteNotificationHandler.java
index e9db6757..4a47bd61 100644
--- a/java/src/main/java/org/micromanager/remote/RemoteNotificationHandler.java
+++ b/java/src/main/java/org/micromanager/remote/RemoteNotificationHandler.java
@@ -64,7 +64,6 @@ public void start() {
ex.printStackTrace();
throw new RuntimeException(ex);
}
-
pushSocket_.push(e);
if (e.isAcquisitionEventsFinishedNotification()) {
eventsFinished = true;
diff --git a/pycromanager/__init__.py b/pycromanager/__init__.py
index c2e7d09c..a6aafabf 100644
--- a/pycromanager/__init__.py
+++ b/pycromanager/__init__.py
@@ -8,5 +8,5 @@
from pycromanager.core import Core
from pyjavaz import JavaObject, JavaClass, PullSocket, PushSocket
from pycromanager.acquisition.acq_eng_py.main.acq_notification import AcqNotification
-from ndtiff import Dataset
+from ndstorage import Dataset
from ._version import __version__, version_info
diff --git a/pycromanager/acquisition/RAMStorage_java.py b/pycromanager/acquisition/RAMStorage_java.py
index 5075b86c..fc73156f 100644
--- a/pycromanager/acquisition/RAMStorage_java.py
+++ b/pycromanager/acquisition/RAMStorage_java.py
@@ -1,7 +1,7 @@
from pyjavaz.wrappers import JavaObject
-from ndtiff import NDStorage
+from ndtiff.ndstorage_base import NDStorageBase
-class NDRAMDatasetJava(NDStorage):
+class NDRAMDatasetJava(NDStorageBase):
"""
A python class that wraps a Java-backend RAM data storage.
@@ -14,8 +14,12 @@ def __init__(self, java_RAM_data_storage):
self._java_RAM_data_storage = java_RAM_data_storage
self._index_keys = set()
+ def __del__(self):
+ print('ram storage descutructor called')
+ self.close()
+
def close(self):
- pass
+ self._java_RAM_data_storage = None # allow the Java side to be garbage collected
def add_available_axes(self, image_coordinates):
"""
@@ -51,6 +55,7 @@ def read_image(self, channel=None, z=None, time=None, position=None, row=None, c
if key not in self._index_keys:
return None
java_hashmap = JavaObject('java.util.HashMap')
+ # raise Exception
for k, v in axes.items():
java_hashmap.put(k, v)
tagged_image = self._java_RAM_data_storage.get_image(java_hashmap)
diff --git a/pycromanager/acquisition/acquisition_superclass.py b/pycromanager/acquisition/acquisition_superclass.py
index b1db8fe2..ccf00db9 100644
--- a/pycromanager/acquisition/acquisition_superclass.py
+++ b/pycromanager/acquisition/acquisition_superclass.py
@@ -17,6 +17,7 @@
from inspect import signature
from typing import Generator
from types import GeneratorType
+import time
from queue import Queue
from typing import Generator, Dict, Union
@@ -336,6 +337,7 @@ def _storage_monitor_fn():
if callback is not None:
callback(image_notification.payload, dataset)
+
t = threading.Thread(target=_storage_monitor_fn, name='StorageMonitorThread')
t.start()
return t
diff --git a/pycromanager/acquisition/java_backend_acquisitions.py b/pycromanager/acquisition/java_backend_acquisitions.py
index e0f1041d..6589bf7c 100644
--- a/pycromanager/acquisition/java_backend_acquisitions.py
+++ b/pycromanager/acquisition/java_backend_acquisitions.py
@@ -17,7 +17,7 @@
from pycromanager.mm_java_classes import ZMQRemoteMMCoreJ, Magellan
from pycromanager.acquisition.RAMStorage_java import NDRAMDatasetJava
-from ndtiff import Dataset
+from ndstorage import Dataset
import os.path
import queue
from docstring_inheritance import NumpyDocstringInheritanceMeta
@@ -345,43 +345,43 @@ def __init__(
self._napari_viewer = napari_viewer
start_napari_signalling(self._napari_viewer, self.get_dataset())
-
######## Public API methods with unique implementations for Java backend ###########
def await_completion(self):
- while not self._acq.are_events_finished() or (
- self._acq.get_data_sink() is not None and not self._acq.get_data_sink().is_finished()):
+ try:
+ while not self._acq.are_events_finished() or (
+ self._acq.get_data_sink() is not None and not self._acq.get_data_sink().is_finished()):
+ self._check_for_exceptions()
+ self._acq.block_until_events_finished(0.01)
+ # This will block until saving is finished, if there is a data sink
+ self._acq.wait_for_completion()
self._check_for_exceptions()
- self._acq.block_until_events_finished(0.01)
- # This will block until saving is finished, if there is a data sink
- self._acq.wait_for_completion()
- self._check_for_exceptions()
-
- for hook_thread in self._hook_threads:
- hook_thread.join()
+ finally:
+ for hook_thread in self._hook_threads:
+ hook_thread.join()
- if hasattr(self, '_event_thread'):
- self._event_thread.join()
+ if hasattr(self, '_event_thread'):
+ self._event_thread.join()
- # need to do this so its _Bridge can be garbage collected and a reference to the JavaBackendAcquisition
- # does not prevent Bridge cleanup and process exiting
- self._remote_acq = None
-
- # Wait on all the other threads to shut down properly
- if hasattr(self, '_storage_monitor_thread'):
- self._storage_monitor_thread.join()
-
- if hasattr(self, '_acq_notification_recieving_thread'):
- # for backwards compatiblitiy with older versions of Pycromanager java before this added
- self._acq_notification_recieving_thread.join()
- self._remote_notification_handler.notification_handling_complete()
# need to do this so its _Bridge can be garbage collected and a reference to the JavaBackendAcquisition
# does not prevent Bridge cleanup and process exiting
- self._remote_notification_handler = None
- self._acq_notification_dispatcher_thread.join()
-
- self._acq = None
- self._finished = True
+ self._remote_acq = None
+
+ # Wait on all the other threads to shut down properly
+ if hasattr(self, '_storage_monitor_thread'):
+ self._storage_monitor_thread.join()
+
+ if hasattr(self, '_acq_notification_recieving_thread'):
+ # for backwards compatiblitiy with older versions of Pycromanager java before this added
+ self._acq_notification_recieving_thread.join()
+ self._remote_notification_handler.notification_handling_complete()
+ # need to do this so its _Bridge can be garbage collected and a reference to the JavaBackendAcquisition
+ # does not prevent Bridge cleanup and process exiting
+ self._remote_notification_handler = None
+ self._acq_notification_dispatcher_thread.join()
+
+ self._acq = None
+ self._finished = True
def get_viewer(self):
diff --git a/pycromanager/acquisition/python_backend_acquisitions.py b/pycromanager/acquisition/python_backend_acquisitions.py
index 22acb40b..690cf2f2 100644
--- a/pycromanager/acquisition/python_backend_acquisitions.py
+++ b/pycromanager/acquisition/python_backend_acquisitions.py
@@ -7,8 +7,8 @@
import threading
from inspect import signature
-from ndtiff.ndram_dataset import NDRAMDataset
-from ndtiff.ndtiff_dataset import NDTiffDataset
+from ndstorage.ndram_dataset import NDRAMDataset
+from ndstorage.ndtiff_dataset import NDTiffDataset
class PythonBackendAcquisition(Acquisition, metaclass=NumpyDocstringInheritanceMeta):
"""
@@ -120,19 +120,21 @@ def post_notification(notification):
def await_completion(self):
"""Wait for acquisition to finish and resources to be cleaned up"""
- while not self._acq.are_events_finished() or (
- self._acq.get_data_sink() is not None and not self._acq.get_data_sink().is_finished()):
- self._check_for_exceptions()
- self._acq.block_until_events_finished(0.05)
- if self._acq.get_data_sink() is not None:
- self._acq.get_data_sink().block_until_finished(0.05)
- self._check_for_exceptions()
- self._event_thread.join()
- self._notification_dispatch_thread.join()
- self._storage_monitor_thread.join()
-
- self._acq = None
- self._finished = True
+ try:
+ while not self._acq.are_events_finished() or (
+ self._acq.get_data_sink() is not None and not self._acq.get_data_sink().is_finished()):
+ self._check_for_exceptions()
+ self._acq.block_until_events_finished(0.05)
+ if self._acq.get_data_sink() is not None:
+ self._acq.get_data_sink().block_until_finished(0.05)
+ self._check_for_exceptions()
+ finally:
+ self._event_thread.join()
+ self._notification_dispatch_thread.join()
+ self._storage_monitor_thread.join()
+
+ self._acq = None
+ self._finished = True
def get_viewer(self):
"""
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index 10debb17..2d31c4a8 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -15,11 +15,6 @@
import socket
from pycromanager.install import download_and_install
-# def pytest_runtest_protocol(item, nextitem):
-# """Add a delay between each test."""
-# time.sleep(0.5) # Adjust the delay time (in seconds) as needed
-# return None # Continue with the default test execution
-
def is_port_in_use(port):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
return s.connect_ex(('localhost', port)) == 0
@@ -114,7 +109,6 @@ def install_mm():
@pytest.fixture(scope="session", params=['save_to_disk', 'RAM'])
-# @pytest.fixture(scope="session", params=['RAM'])
def setup_data_folder(request):
if request.param != 'save_to_disk':
yield None
@@ -128,8 +122,7 @@ def setup_data_folder(request):
shutil.rmtree(data_folder_path)
-# @pytest.fixture(scope="session", params=['python_backend', 'java_backend'])
-@pytest.fixture(scope="session", params=['java_backend'])
+@pytest.fixture(scope="session", params=['python_backend', 'java_backend'])
def launch_mm_headless(request, install_mm):
python_backend = request.param == 'python_backend'
mm_install_dir = install_mm
diff --git a/pycromanager/test/test_acquisition.py b/pycromanager/test/test_acquisition.py
index 5b5923dd..77a979ad 100644
--- a/pycromanager/test/test_acquisition.py
+++ b/pycromanager/test/test_acquisition.py
@@ -12,7 +12,6 @@ def check_acq_sequenced(events, expected_num_events):
def check_acq_not_sequenced(events):
return isinstance(events, dict)
-
def test_timelapse_acq(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=10, time_interval_s=0.1)
diff --git a/requirements.txt b/requirements.txt
index ad9de86e..fd13c200 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,8 +1,8 @@
numpy
dask[array]>=2022.2.0
pyzmq
-ndtiff>=3.0.1
+ndstorage
docstring-inheritance
pymmcore
sortedcontainers
-pyjavaz==1.1.1
+pyjavaz>=1.2
From a322c8fe4d6105a2b9823d8ff0cb69505616cc09 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Thu, 13 Jun 2024 11:40:02 +0200
Subject: [PATCH 06/34] bump version
---
pycromanager/_version.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pycromanager/_version.py b/pycromanager/_version.py
index a5ba57d7..f5f5122b 100644
--- a/pycromanager/_version.py
+++ b/pycromanager/_version.py
@@ -1,2 +1,2 @@
-version_info = (0, 33, 0)
+version_info = (0, 34, 0)
__version__ = ".".join(map(str, version_info))
From ca7d46ef93f84866cd1704f633cd622885cb4620 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Thu, 13 Jun 2024 13:02:40 +0200
Subject: [PATCH 07/34] new package name
---
pycromanager/acquisition/RAMStorage_java.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pycromanager/acquisition/RAMStorage_java.py b/pycromanager/acquisition/RAMStorage_java.py
index fc73156f..64e9eb5e 100644
--- a/pycromanager/acquisition/RAMStorage_java.py
+++ b/pycromanager/acquisition/RAMStorage_java.py
@@ -1,5 +1,5 @@
from pyjavaz.wrappers import JavaObject
-from ndtiff.ndstorage_base import NDStorageBase
+from ndstorage.ndstorage_base import NDStorageBase
class NDRAMDatasetJava(NDStorageBase):
"""
From 75c02bbe7311cb1389643917ad1ed307c303b687 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Thu, 13 Jun 2024 15:34:03 +0200
Subject: [PATCH 08/34] fix windows installer problems
---
pycromanager/install.py | 33 ++++++++++++++++++++++++++-------
1 file changed, 26 insertions(+), 7 deletions(-)
diff --git a/pycromanager/install.py b/pycromanager/install.py
index 1801ba91..ae8676d3 100644
--- a/pycromanager/install.py
+++ b/pycromanager/install.py
@@ -14,10 +14,27 @@
MM_DOWNLOAD_URL_MAC = MM_DOWNLOAD_URL_BASE + '/nightly/2.0/Mac'
MM_DOWNLOAD_URL_WINDOWS = MM_DOWNLOAD_URL_BASE + '/nightly/2.0/Windows'
-def _find_versions(platform='Windows'):
+def _get_platform():
+ """
+ Get the platform of the system
+
+ Returns
+ -------
+ str
+ "Windows" or "Mac"
+ """
+ if sys.platform.startswith('win'):
+ return 'Windows'
+ elif sys.platform.startswith('darwin'):
+ return 'Mac'
+ else:
+ raise ValueError(f"Unsupported OS: {sys.platform}")
+
+def _find_versions():
"""
Find all available versions of Micro-Manager nightly builds
"""
+ platform = _get_platform()
# Get the webpage
if platform == 'Windows':
webpage = requests.get(MM_DOWNLOAD_URL_WINDOWS)
@@ -28,14 +45,12 @@ def _find_versions(platform='Windows'):
return re.findall(r'class="rowDefault" href="([^"]+)', webpage.text)
-def download_and_install(windows=True, destination='auto'):
+def download_and_install(destination='auto', mm_install_log_path=None):
"""
Download and install the latest nightly build of Micro-Manager
Parameters
----------
- windows : bool
- Whether to download the Windows or Mac version
destination : str
The directory to install Micro-Manager to. If 'auto', it will install to the user's home directory.
@@ -44,17 +59,21 @@ def download_and_install(windows=True, destination='auto'):
str
The path to the installed Micro-Manager directory
"""
+ windows = _get_platform() == 'Windows'
platform = 'Windows' if windows else 'Mac'
installer = 'mm_installer.exe' if windows else 'mm_installer.dmg'
- latest_version = MM_DOWNLOAD_URL_BASE + _find_versions(platform)[0]
+ latest_version = MM_DOWNLOAD_URL_BASE + _find_versions()[0]
wget.download(latest_version, out=installer, bar=lambda curr, total, width: print(f"\rDownloading installer: {curr / total*100:.2f}%", end=''))
if windows:
if destination == 'auto':
destination = r'C:\Program Files\Micro-Manager'
- cmd = f"{installer} /SP /VERYSILENT /SUPRESSMSGBOXES /DIR={destination}"
- # /LOG={mm_install_log_path}"
+ cmd = f"{installer} /SP /VERYSILENT /SUPRESSMSGBOXES /CURRENTUSER /DIR=\"{destination}\""
+
+ if mm_install_log_path:
+ cmd += f" /LOG={mm_install_log_path}"
subprocess.run(cmd, shell=True)
+
return destination
else:
if destination == 'auto':
From b860b8734273ec1325c0c753fbb6eb1be53f82f7 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Thu, 13 Jun 2024 15:41:17 +0200
Subject: [PATCH 09/34] fix test configuration
---
pycromanager/test/conftest.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index 2d31c4a8..c2ce1c5f 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -59,7 +59,7 @@ def install_mm():
yield
else:
# Download an install latest nightly build
- mm_install_dir = download_and_install(windows=sys.platform.startswith('win'), destination='auto')
+ mm_install_dir = download_and_install(destination='auto')
#### Replace with newer versions of Java libraries ####
# find pycro-manager/java path
From 63a9b381b414b4eb32f8e464e05963cfa9469340 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Thu, 13 Jun 2024 15:55:32 +0200
Subject: [PATCH 10/34] remove extraneous print
---
pycromanager/acquisition/RAMStorage_java.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/pycromanager/acquisition/RAMStorage_java.py b/pycromanager/acquisition/RAMStorage_java.py
index 64e9eb5e..c60b750c 100644
--- a/pycromanager/acquisition/RAMStorage_java.py
+++ b/pycromanager/acquisition/RAMStorage_java.py
@@ -15,7 +15,6 @@ def __init__(self, java_RAM_data_storage):
self._index_keys = set()
def __del__(self):
- print('ram storage descutructor called')
self.close()
def close(self):
From 621b1db2dd939b40a0a66c0063afc906d894e834 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Thu, 13 Jun 2024 21:18:25 +0200
Subject: [PATCH 11/34] make it easier to run both python and java tests
---
pycromanager/test/conftest.py | 73 +++++++++++++++++++----------------
1 file changed, 40 insertions(+), 33 deletions(-)
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index c2ce1c5f..1ae39b80 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -2,6 +2,8 @@
import sys
import shutil
import subprocess
+import warnings
+
import pytest
import wget
import requests
@@ -72,42 +74,47 @@ def install_mm():
raise RuntimeError('Could not find pycro-manager/java path')
# Delete the pycromanagerjava.jar file that is packaged with the nightly build
- pycromanager_jar_path = os.path.join(mm_install_dir, 'plugins', 'Micro-Manager', 'PycromanagerJava-*.jar')
- for file_path in glob.glob(pycromanager_jar_path):
- os.remove(file_path)
- print(f'Removed {file_path}')
-
- # Copy the pycromanagerjava.jar file that was compiled by the github action
- # into the nightly build so that it will test with the latest code
- compiled_jar_path = os.path.join(java_path, 'target', 'PycromanagerJava-*.jar')
- destination_path = os.path.join(mm_install_dir, 'plugins', 'Micro-Manager', 'PycromanagerJava.jar')
-
- # Find the actual file that matches the pattern and copy it to the destination
- matched_files = [file for file in glob.glob(compiled_jar_path)
- if not any(exclude in file for exclude in ['-javadoc', '-sources', '.asc', '.pom'])]
- if matched_files:
- file_path = matched_files[0]
- shutil.copy2(file_path, destination_path)
- print(f'Copied {file_path} to {destination_path}')
- else:
- print(f'No matching JAR file found at {compiled_jar_path}')
- raise FileNotFoundError(f'No matching JAR file found at {compiled_jar_path}')
-
- # Update pycromanager dependency jar files packaged with the Micro-manager nightly build
- # Files are updated only if they are larger version
- # Copy dependency jar files if present in target/dependency
- if os.path.isdir(os.path.join(java_path, 'target/dependency')):
- replace_jars(os.path.join(java_path, 'target/dependency'), os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'),
- ['AcqEngJ', 'NDTiffStorage', 'NDViewer'])
- # Copy dependency jar files if present in ../../REPO_NAME/target
- for repo_name in ['AcqEngJ', 'NDTiffStorage', 'NDViewer']:
- if os.path.isdir(os.path.join(java_path, f'../../{repo_name}/target')):
- replace_jars(os.path.join(java_path, f'../../{repo_name}/target'),
- os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'), [repo_name])
-
+ try:
+ pycromanager_jar_path = os.path.join(mm_install_dir, 'plugins', 'Micro-Manager', 'PycromanagerJava-*.jar')
+ for file_path in glob.glob(pycromanager_jar_path):
+ os.remove(file_path)
+ print(f'Removed {file_path}')
+
+ # Copy the pycromanagerjava.jar file that was compiled by the github action
+ # into the nightly build so that it will test with the latest code
+ compiled_jar_path = os.path.join(java_path, 'target', 'PycromanagerJava-*.jar')
+ destination_path = os.path.join(mm_install_dir, 'plugins', 'Micro-Manager', 'PycromanagerJava.jar')
+
+ # Find the actual file that matches the pattern and copy it to the destination
+ matched_files = [file for file in glob.glob(compiled_jar_path)
+ if not any(exclude in file for exclude in ['-javadoc', '-sources', '.asc', '.pom'])]
+ if matched_files:
+ file_path = matched_files[0]
+ shutil.copy2(file_path, destination_path)
+ print(f'Copied {file_path} to {destination_path}')
+ else:
+ print(f'No matching JAR file found at {compiled_jar_path}')
+ raise FileNotFoundError(f'No matching JAR file found at {compiled_jar_path}')
+
+ # Update pycromanager dependency jar files packaged with the Micro-manager nightly build
+ # Files are updated only if they are larger version
+ # Copy dependency jar files if present in target/dependency
+ if os.path.isdir(os.path.join(java_path, 'target/dependency')):
+ replace_jars(os.path.join(java_path, 'target/dependency'), os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'),
+ ['AcqEngJ', 'NDTiffStorage', 'NDViewer'])
+ # Copy dependency jar files if present in ../../REPO_NAME/target
+ for repo_name in ['AcqEngJ', 'NDTiffStorage', 'NDViewer']:
+ if os.path.isdir(os.path.join(java_path, f'../../{repo_name}/target')):
+ replace_jars(os.path.join(java_path, f'../../{repo_name}/target'),
+ os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'), [repo_name])
+
+ except Exception as e:
+ warnings.warn(f'Failed to replace JAR files: {e}')
+ # let this continue so python tests can still run
yield mm_install_dir
+
@pytest.fixture(scope="session", params=['save_to_disk', 'RAM'])
def setup_data_folder(request):
if request.param != 'save_to_disk':
From 1d44a9455750f4b467a33e12cd895fe0a495306f Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Thu, 13 Jun 2024 21:24:11 +0200
Subject: [PATCH 12/34] remove print
---
pycromanager/acquisition/RAMStorage_java.py | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/pycromanager/acquisition/RAMStorage_java.py b/pycromanager/acquisition/RAMStorage_java.py
index 64e9eb5e..90529e3e 100644
--- a/pycromanager/acquisition/RAMStorage_java.py
+++ b/pycromanager/acquisition/RAMStorage_java.py
@@ -15,8 +15,7 @@ def __init__(self, java_RAM_data_storage):
self._index_keys = set()
def __del__(self):
- print('ram storage descutructor called')
- self.close()
+ self.close()
def close(self):
self._java_RAM_data_storage = None # allow the Java side to be garbage collected
From 8be98909ec08a4af8dc2a0d90a54d79605a1940e Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Thu, 13 Jun 2024 21:25:10 +0200
Subject: [PATCH 13/34] update requirements
---
requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements.txt b/requirements.txt
index fd13c200..b93de60f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,7 +1,7 @@
numpy
dask[array]>=2022.2.0
pyzmq
-ndstorage
+ndstorage>=0.1.4
docstring-inheritance
pymmcore
sortedcontainers
From 8080635d9f2ce66efa41e09485536308930eec98 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 07:57:04 +0200
Subject: [PATCH 14/34] update pyjavaz dep as well
---
pycromanager/test/conftest.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index 1ae39b80..a52e9301 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -103,7 +103,7 @@ def install_mm():
replace_jars(os.path.join(java_path, 'target/dependency'), os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'),
['AcqEngJ', 'NDTiffStorage', 'NDViewer'])
# Copy dependency jar files if present in ../../REPO_NAME/target
- for repo_name in ['AcqEngJ', 'NDTiffStorage', 'NDViewer']:
+ for repo_name in ['AcqEngJ', 'NDTiffStorage', 'NDViewer', 'PyJavaZ']:
if os.path.isdir(os.path.join(java_path, f'../../{repo_name}/target')):
replace_jars(os.path.join(java_path, f'../../{repo_name}/target'),
os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'), [repo_name])
From c348ebc50796b676215d8d0ef076b8a37dcec4f2 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 08:29:11 +0200
Subject: [PATCH 15/34] replace pyjavaz and better logging
---
pycromanager/install.py | 9 ++++++++-
pycromanager/test/conftest.py | 10 +++++++++-
2 files changed, 17 insertions(+), 2 deletions(-)
diff --git a/pycromanager/install.py b/pycromanager/install.py
index ae8676d3..e11e3d79 100644
--- a/pycromanager/install.py
+++ b/pycromanager/install.py
@@ -63,7 +63,14 @@ def download_and_install(destination='auto', mm_install_log_path=None):
platform = 'Windows' if windows else 'Mac'
installer = 'mm_installer.exe' if windows else 'mm_installer.dmg'
latest_version = MM_DOWNLOAD_URL_BASE + _find_versions()[0]
- wget.download(latest_version, out=installer, bar=lambda curr, total, width: print(f"\rDownloading installer: {curr / total*100:.2f}%", end=''))
+ # make a progress bar that updates every 0.5 seconds
+ def bar(curr, total, width):
+ if not hasattr(bar, 'last_update'):
+ bar.last_update = 0
+ if curr / total*100 - bar.last_update > 0.5:
+ print(f"\rDownloading installer: {curr / total*100:.2f}%", end='')
+ bar.last_update = curr / total*100
+ wget.download(latest_version, out=installer, bar=bar)
if windows:
if destination == 'auto':
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index a52e9301..b0bef5b3 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -100,10 +100,17 @@ def install_mm():
# Files are updated only if they are larger version
# Copy dependency jar files if present in target/dependency
if os.path.isdir(os.path.join(java_path, 'target/dependency')):
+ # print jars present here
+ print('JAR files present in target/dependency:')
+ for f in os.listdir(os.path.join(java_path, 'target/dependency')):
+ print(f)
replace_jars(os.path.join(java_path, 'target/dependency'), os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'),
- ['AcqEngJ', 'NDTiffStorage', 'NDViewer'])
+ ['AcqEngJ', 'NDTiffStorage', 'NDViewer', 'PyJavaZ'])
# Copy dependency jar files if present in ../../REPO_NAME/target
for repo_name in ['AcqEngJ', 'NDTiffStorage', 'NDViewer', 'PyJavaZ']:
+ print(f'JAR files present in {repo_name}/target:')
+ for f in os.listdir(os.path.join(java_path, f'../../{repo_name}/target')):
+ print(f)
if os.path.isdir(os.path.join(java_path, f'../../{repo_name}/target')):
replace_jars(os.path.join(java_path, f'../../{repo_name}/target'),
os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'), [repo_name])
@@ -111,6 +118,7 @@ def install_mm():
except Exception as e:
warnings.warn(f'Failed to replace JAR files: {e}')
# let this continue so python tests can still run
+
yield mm_install_dir
From 491b00d52756f39d946d0e1800f5adf35339573c Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 08:41:22 +0200
Subject: [PATCH 16/34] bump pm java version
---
java/pom.xml | 4 ++--
pycromanager/test/conftest.py | 17 ++++++++++-------
2 files changed, 12 insertions(+), 9 deletions(-)
diff --git a/java/pom.xml b/java/pom.xml
index bf999a65..237ef8ef 100644
--- a/java/pom.xml
+++ b/java/pom.xml
@@ -2,7 +2,7 @@
4.0.0
org.micro-manager.pycro-manager
PycroManagerJava
- 0.46.8
+ 0.46.9
jar
Pycro-Manager Java
The Java components of Pycro-Manager
@@ -154,4 +154,4 @@
-
\ No newline at end of file
+
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index b0bef5b3..22c462e4 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -43,6 +43,8 @@ def replace_jars(new_file_path, old_file_path, jar_names: list):
new_jar_name, new_jar_version = find_jar(new_file_path, jar_name)
old_jar_name, old_jar_version = find_jar(old_file_path, jar_name)
+ print('Old version:', old_jar_name, old_jar_version)
+ print('New version:', new_jar_name, new_jar_version)
if new_jar_name is not None:
# Only replace jar file if newly compiled file version is larger
if new_jar_version[0] > old_jar_version[0] or \
@@ -106,14 +108,15 @@ def install_mm():
print(f)
replace_jars(os.path.join(java_path, 'target/dependency'), os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'),
['AcqEngJ', 'NDTiffStorage', 'NDViewer', 'PyJavaZ'])
+ # Not needed because deps of deps are already included in the JARs?
# Copy dependency jar files if present in ../../REPO_NAME/target
- for repo_name in ['AcqEngJ', 'NDTiffStorage', 'NDViewer', 'PyJavaZ']:
- print(f'JAR files present in {repo_name}/target:')
- for f in os.listdir(os.path.join(java_path, f'../../{repo_name}/target')):
- print(f)
- if os.path.isdir(os.path.join(java_path, f'../../{repo_name}/target')):
- replace_jars(os.path.join(java_path, f'../../{repo_name}/target'),
- os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'), [repo_name])
+ # for repo_name in ['AcqEngJ', 'NDTiffStorage', 'NDViewer', 'PyJavaZ']:
+ # print(f'JAR files present in {repo_name}/target:')
+ # for f in os.listdir(os.path.join(java_path, f'../../{repo_name}/target')):
+ # print(f)
+ # if os.path.isdir(os.path.join(java_path, f'../../{repo_name}/target')):
+ # replace_jars(os.path.join(java_path, f'../../{repo_name}/target'),
+ # os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'), [repo_name])
except Exception as e:
warnings.warn(f'Failed to replace JAR files: {e}')
From e21472b97fa680c47ac6326de36f562db9a8abc2 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 09:14:48 +0200
Subject: [PATCH 17/34] update NDTiff etc
---
.github/workflows/Java_dependency_update.yml | 2 +-
build_automation/update_PycroManagerJava.py | 4 ++--
build_automation/update_dependency.py | 4 ++--
build_automation/update_mm_ivy.py | 4 ++--
4 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/.github/workflows/Java_dependency_update.yml b/.github/workflows/Java_dependency_update.yml
index 8c386aa5..f5212bbb 100644
--- a/.github/workflows/Java_dependency_update.yml
+++ b/.github/workflows/Java_dependency_update.yml
@@ -1,4 +1,4 @@
-# When NDTiff, AcqEngJ, or NDViewer update, a automatic push
+# When NDStorage, AcqEngJ, or NDViewer update, a automatic push
# to the dependency-update branch of pycro-manager will be generated
# that updates the version in their POM.xml files
# This script should then:
diff --git a/build_automation/update_PycroManagerJava.py b/build_automation/update_PycroManagerJava.py
index 374ff5fe..c82d1027 100644
--- a/build_automation/update_PycroManagerJava.py
+++ b/build_automation/update_PycroManagerJava.py
@@ -6,12 +6,12 @@
def read_versions(root):
versions = {}
versions['PycroManagerJava'] = Version(root.find("version").text)
- # iterate through the dependencies and get NDTiff, NDViewer, and AcqEngJ
+ # iterate through the dependencies and get NDStorage, NDViewer, and AcqEngJ
dependencies = root.findall(".//dependency")
for dependency in dependencies:
artifactId = dependency.find("artifactId").text
version = dependency.find("version").text
- if artifactId in ["NDTiffStorage", "NDViewer", "AcqEngJ"]:
+ if artifactId in ["NDStorage", "NDViewer", "AcqEngJ"]:
versions[artifactId] = Version(version)
return versions
diff --git a/build_automation/update_dependency.py b/build_automation/update_dependency.py
index 14ccedce..3f65a1df 100644
--- a/build_automation/update_dependency.py
+++ b/build_automation/update_dependency.py
@@ -1,6 +1,6 @@
"""
-Script for updating NDTiff/AcqEngJ/NDViewer to latest version
-It runs on the *-update branch and is called by an action in NDTiff/AcqEngJ/NDViewer repo
+Script for updating NDStorage/AcqEngJ/NDViewer to latest version
+It runs on the *-update branch and is called by an action in NDStorage/AcqEngJ/NDViewer repo
"""
import xml.etree.ElementTree as ET
diff --git a/build_automation/update_mm_ivy.py b/build_automation/update_mm_ivy.py
index c65f5980..654acd26 100644
--- a/build_automation/update_mm_ivy.py
+++ b/build_automation/update_mm_ivy.py
@@ -15,12 +15,12 @@
def read_versions(root):
versions = {}
versions['PycroManagerJava'] = Version(root.find("version").text)
- # iterate through the dependencies and get NDTiff, NDViewer, and AcqEngJ
+ # iterate through the dependencies and get NDStorage, NDViewer, and AcqEngJ
dependencies = root.findall(".//dependency")
for dependency in dependencies:
artifactId = dependency.find("artifactId").text
version = dependency.find("version").text
- if artifactId in ["NDTiffStorage", "NDViewer", "AcqEngJ"]:
+ if artifactId in ["NDStorage", "NDViewer", "AcqEngJ"]:
versions[artifactId] = Version(version)
return versions
From b0d0a5f4a8d916fa8af2098ecab490e517cd8798 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 09:17:29 +0200
Subject: [PATCH 18/34] manually update ndstorage version
---
java/pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/java/pom.xml b/java/pom.xml
index 237ef8ef..f4b35cd9 100644
--- a/java/pom.xml
+++ b/java/pom.xml
@@ -64,7 +64,7 @@
org.micro-manager.ndtiffstorage
- NDTiffStorage
+ NDStorage
2.17.0
From bfc9aa90ccb5c11ca5f7a66b98cf78bfcf220c30 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 09:38:35 +0200
Subject: [PATCH 19/34] revert back JAr name
---
build_automation/update_mm_ivy.py | 2 +-
pycromanager/test/conftest.py | 2 ++
2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/build_automation/update_mm_ivy.py b/build_automation/update_mm_ivy.py
index 654acd26..1be2379d 100644
--- a/build_automation/update_mm_ivy.py
+++ b/build_automation/update_mm_ivy.py
@@ -20,7 +20,7 @@ def read_versions(root):
for dependency in dependencies:
artifactId = dependency.find("artifactId").text
version = dependency.find("version").text
- if artifactId in ["NDStorage", "NDViewer", "AcqEngJ"]:
+ if artifactId in ["NDTiffStorage", "NDViewer", "AcqEngJ"]:
versions[artifactId] = Version(version)
return versions
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index 22c462e4..22657d46 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -108,6 +108,8 @@ def install_mm():
print(f)
replace_jars(os.path.join(java_path, 'target/dependency'), os.path.join(mm_install_dir, 'plugins', 'Micro-Manager'),
['AcqEngJ', 'NDTiffStorage', 'NDViewer', 'PyJavaZ'])
+
+
# Not needed because deps of deps are already included in the JARs?
# Copy dependency jar files if present in ../../REPO_NAME/target
# for repo_name in ['AcqEngJ', 'NDTiffStorage', 'NDViewer', 'PyJavaZ']:
From bf9c28c0b898900125db74fde1003937bfcca2a5 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 10:14:07 +0200
Subject: [PATCH 20/34] change back NDTiffStorage.jar name
---
java/pom.xml | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/java/pom.xml b/java/pom.xml
index f4b35cd9..54a6268e 100644
--- a/java/pom.xml
+++ b/java/pom.xml
@@ -64,8 +64,8 @@
org.micro-manager.ndtiffstorage
- NDStorage
- 2.17.0
+ NDTiffStorage
+ 2.18.0
From 4622764cd631c0ec473c13ce3c931f79c6948a11 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 10:27:32 +0200
Subject: [PATCH 21/34] bump ndtiff version
---
requirements.txt | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/requirements.txt b/requirements.txt
index b93de60f..24771f8a 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,7 +1,7 @@
numpy
dask[array]>=2022.2.0
pyzmq
-ndstorage>=0.1.4
+ndstorage>=0.1.5
docstring-inheritance
pymmcore
sortedcontainers
From 6607cd0c250080260c30a180b6c50e19b29557e1 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 11:04:16 +0200
Subject: [PATCH 22/34] fix python image processor test
---
.../acq_eng_py/main/AcqEngPy_Acquisition.py | 2 +-
.../python_backend_acquisitions.py | 12 +++++++++--
pycromanager/install.py | 20 ++++++++++++++++++-
pycromanager/test/conftest.py | 5 ++++-
pycromanager/test/test_callback_functions.py | 4 ++--
5 files changed, 36 insertions(+), 7 deletions(-)
diff --git a/pycromanager/acquisition/acq_eng_py/main/AcqEngPy_Acquisition.py b/pycromanager/acquisition/acq_eng_py/main/AcqEngPy_Acquisition.py
index 9f83b916..6bc6fa90 100644
--- a/pycromanager/acquisition/acq_eng_py/main/AcqEngPy_Acquisition.py
+++ b/pycromanager/acquisition/acq_eng_py/main/AcqEngPy_Acquisition.py
@@ -127,7 +127,7 @@ def saving_thread(acq):
if acq.data_sink_:
if acq.debug_mode_:
acq.core_.log_message("Saving image")
- if not img.pix and not img.tags:
+ if img.tags is None and img.pix is None:
break
acq.save_image(img)
if acq.debug_mode_:
diff --git a/pycromanager/acquisition/python_backend_acquisitions.py b/pycromanager/acquisition/python_backend_acquisitions.py
index 690cf2f2..3be9b154 100644
--- a/pycromanager/acquisition/python_backend_acquisitions.py
+++ b/pycromanager/acquisition/python_backend_acquisitions.py
@@ -194,9 +194,17 @@ def _process(self):
# this is a signal to stop
self.output_queue.put(tagged_image)
break
- process_fn_result = self._pycromanager_acq._call_image_process_fn(tagged_image.tags, tagged_image.pix)
+ process_fn_result = self._pycromanager_acq._call_image_process_fn(tagged_image.pix, tagged_image.tags)
+ try:
+ self._pycromanager_acq._check_for_exceptions()
+ except Exception as e:
+ # unclear if this is functioning properly, check later
+ self._acq.abort()
if process_fn_result is not None:
- self.output_queue.put(process_fn_result)
+ # turn it into the expected tagged_image
+ # TODO: change this on later unification of acq engines
+ tagged_image.pix, tagged_image.tags = process_fn_result
+ self.output_queue.put(tagged_image)
# otherwise the image processor intercepted the image and nothing to do here
class AcquisitionHook:
diff --git a/pycromanager/install.py b/pycromanager/install.py
index e11e3d79..d860c4f0 100644
--- a/pycromanager/install.py
+++ b/pycromanager/install.py
@@ -44,6 +44,24 @@ def _find_versions():
raise ValueError(f"Unsupported OS: {platform}")
return re.findall(r'class="rowDefault" href="([^"]+)', webpage.text)
+def find_existing_mm_install():
+ """
+ Check if Micro-Manager is installed in the default auto-download paths
+
+ Returns
+ -------
+ str
+ The path to the installed Micro-Manager directory, or None if not found
+ """
+ platform = _get_platform()
+ if platform == 'Windows':
+ if os.path.isdir(r'C:\Program Files\Micro-Manager'):
+ return r'C:\Program Files\Micro-Manager'
+ elif platform == 'Mac':
+ if os.path.isdir(str(os.path.expanduser('~')) + '/Micro-Manager'):
+ return str(os.path.expanduser('~')) + '/Micro-Manager'
+ else:
+ raise ValueError(f"Unsupported OS: {platform}")
def download_and_install(destination='auto', mm_install_log_path=None):
"""
@@ -84,7 +102,7 @@ def bar(curr, total, width):
return destination
else:
if destination == 'auto':
- destination = os.path.expanduser('~') + '/Micro-Manager'
+ destination = str(os.path.expanduser('~')) + '/Micro-Manager'
try:
# unmount if already mounted
subprocess.run(['hdiutil', 'detach', '/Volumes/Micro-Manager'])
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index 22657d46..716040eb 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -15,7 +15,7 @@
from pycromanager import start_headless
from pycromanager.headless import stop_headless
import socket
-from pycromanager.install import download_and_install
+from pycromanager.install import download_and_install, find_existing_mm_install
def is_port_in_use(port):
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
@@ -61,6 +61,9 @@ def install_mm():
if is_port_in_use(4827):
print('Using Micro-manager running on port 4827 for testing')
yield
+ elif find_existing_mm_install():
+ print('Micro-Manager is already installed, skipping installation')
+ yield find_existing_mm_install()
else:
# Download an install latest nightly build
mm_install_dir = download_and_install(destination='auto')
diff --git a/pycromanager/test/test_callback_functions.py b/pycromanager/test/test_callback_functions.py
index 32d0828e..e6da2ab0 100644
--- a/pycromanager/test/test_callback_functions.py
+++ b/pycromanager/test/test_callback_functions.py
@@ -8,7 +8,7 @@
def test_img_process_fn(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=3)
- def hook_fn(image, metadata):
+ def image_proc_fn(image, metadata):
assert np.sum(image) > 0
assert isinstance(metadata, dict)
@@ -18,7 +18,7 @@ def hook_fn(image, metadata):
return image, metadata
with Acquisition(setup_data_folder, 'acq', show_display=False,
- image_process_fn=hook_fn) as acq:
+ image_process_fn=image_proc_fn) as acq:
acq.acquire(events)
dataset = acq.get_dataset()
From 7ce58adb71298049385e78997812b0f8cc1338d2 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 11:38:06 +0200
Subject: [PATCH 23/34] better error handling (but still havent fixed test)
---
pycromanager/acquisition/RAMStorage_java.py | 3 +-
.../acquisition/java_backend_acquisitions.py | 28 +++++++++++--------
pycromanager/test/conftest.py | 8 ++++--
pycromanager/test/test_acquisition.py | 1 +
4 files changed, 26 insertions(+), 14 deletions(-)
diff --git a/pycromanager/acquisition/RAMStorage_java.py b/pycromanager/acquisition/RAMStorage_java.py
index c60b750c..6266471c 100644
--- a/pycromanager/acquisition/RAMStorage_java.py
+++ b/pycromanager/acquisition/RAMStorage_java.py
@@ -54,10 +54,11 @@ def read_image(self, channel=None, z=None, time=None, position=None, row=None, c
if key not in self._index_keys:
return None
java_hashmap = JavaObject('java.util.HashMap')
- # raise Exception
for k, v in axes.items():
java_hashmap.put(k, v)
tagged_image = self._java_RAM_data_storage.get_image(java_hashmap)
+ print('java ram storage class:', str(self._java_RAM_data_storage))
+ print('tagged image: ', str(tagged_image))
pixels = tagged_image.pix
metadata = tagged_image.tags
return pixels.reshape(metadata['Height'], metadata['Width'])
diff --git a/pycromanager/acquisition/java_backend_acquisitions.py b/pycromanager/acquisition/java_backend_acquisitions.py
index 6589bf7c..3bff99e1 100644
--- a/pycromanager/acquisition/java_backend_acquisitions.py
+++ b/pycromanager/acquisition/java_backend_acquisitions.py
@@ -190,10 +190,10 @@ def _notification_handler_fn(acquisition, notification_push_port, connected_even
monitor_socket = PullSocket(notification_push_port)
connected_event.set()
- try:
- events_finished = False
- data_sink_finished = False
- while True:
+ events_finished = False
+ data_sink_finished = False
+ while True:
+ try:
message = monitor_socket.receive()
notification = AcqNotification.from_json(message)
@@ -226,11 +226,12 @@ def _notification_handler_fn(acquisition, notification_push_port, connected_even
if events_finished and data_sink_finished:
break
- except Exception as e:
- traceback.print_exc()
- acquisition.abort(e)
- finally:
- monitor_socket.close()
+ except Exception as e:
+ traceback.print_exc()
+ acquisition.abort(e)
+ continue # perform an orderly shutdown
+
+ monitor_socket.close()
class JavaBackendAcquisition(Acquisition, metaclass=NumpyDocstringInheritanceMeta):
@@ -319,6 +320,7 @@ def __init__(
# while the acquisition is still running, and (optionally )so that a image_saved_fn can be called
# when images are written to disk/RAM storage
storage_java_class = data_sink.get_storage()
+ print(storage_java_class)
summary_metadata = storage_java_class.get_summary_metadata()
if directory is not None:
# NDTiff dataset saved to disk on Java side
@@ -380,8 +382,12 @@ def await_completion(self):
self._remote_notification_handler = None
self._acq_notification_dispatcher_thread.join()
- self._acq = None
- self._finished = True
+ try:
+ # one final check for exceptions for stuff that may have happened during shutdown
+ self._check_for_exceptions()
+ finally:
+ self._acq = None
+ self._finished = True
def get_viewer(self):
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index 716040eb..6ac79565 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -131,7 +131,9 @@ def install_mm():
-@pytest.fixture(scope="session", params=['save_to_disk', 'RAM'])
+# @pytest.fixture(scope="session", params=['save_to_disk', 'RAM'])
+@pytest.fixture(scope="session", params=['RAM'])
+
def setup_data_folder(request):
if request.param != 'save_to_disk':
yield None
@@ -145,7 +147,9 @@ def setup_data_folder(request):
shutil.rmtree(data_folder_path)
-@pytest.fixture(scope="session", params=['python_backend', 'java_backend'])
+# @pytest.fixture(scope="session", params=['python_backend', 'java_backend'])
+@pytest.fixture(scope="session", params=['java_backend'])
+
def launch_mm_headless(request, install_mm):
python_backend = request.param == 'python_backend'
mm_install_dir = install_mm
diff --git a/pycromanager/test/test_acquisition.py b/pycromanager/test/test_acquisition.py
index 77a979ad..1f7f5df4 100644
--- a/pycromanager/test/test_acquisition.py
+++ b/pycromanager/test/test_acquisition.py
@@ -77,6 +77,7 @@ def test_empty_mda_acq(launch_mm_headless, setup_data_folder):
with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
acq.acquire(events)
+
dataset = acq.get_dataset()
try:
assert dataset.axes == {}
From 59ece1bee1ff539d848a69bbd5e9434541da4440 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 14:54:33 +0200
Subject: [PATCH 24/34] update to pyjavaz with fixed concurrency issues
---
pycromanager/acquisition/RAMStorage_java.py | 2 --
pycromanager/acquisition/java_backend_acquisitions.py | 1 -
pycromanager/test/conftest.py | 8 ++------
requirements.txt | 2 +-
4 files changed, 3 insertions(+), 10 deletions(-)
diff --git a/pycromanager/acquisition/RAMStorage_java.py b/pycromanager/acquisition/RAMStorage_java.py
index 6266471c..6f54cbec 100644
--- a/pycromanager/acquisition/RAMStorage_java.py
+++ b/pycromanager/acquisition/RAMStorage_java.py
@@ -57,8 +57,6 @@ def read_image(self, channel=None, z=None, time=None, position=None, row=None, c
for k, v in axes.items():
java_hashmap.put(k, v)
tagged_image = self._java_RAM_data_storage.get_image(java_hashmap)
- print('java ram storage class:', str(self._java_RAM_data_storage))
- print('tagged image: ', str(tagged_image))
pixels = tagged_image.pix
metadata = tagged_image.tags
return pixels.reshape(metadata['Height'], metadata['Width'])
diff --git a/pycromanager/acquisition/java_backend_acquisitions.py b/pycromanager/acquisition/java_backend_acquisitions.py
index 3bff99e1..7e4d4d9c 100644
--- a/pycromanager/acquisition/java_backend_acquisitions.py
+++ b/pycromanager/acquisition/java_backend_acquisitions.py
@@ -320,7 +320,6 @@ def __init__(
# while the acquisition is still running, and (optionally )so that a image_saved_fn can be called
# when images are written to disk/RAM storage
storage_java_class = data_sink.get_storage()
- print(storage_java_class)
summary_metadata = storage_java_class.get_summary_metadata()
if directory is not None:
# NDTiff dataset saved to disk on Java side
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index 6ac79565..716040eb 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -131,9 +131,7 @@ def install_mm():
-# @pytest.fixture(scope="session", params=['save_to_disk', 'RAM'])
-@pytest.fixture(scope="session", params=['RAM'])
-
+@pytest.fixture(scope="session", params=['save_to_disk', 'RAM'])
def setup_data_folder(request):
if request.param != 'save_to_disk':
yield None
@@ -147,9 +145,7 @@ def setup_data_folder(request):
shutil.rmtree(data_folder_path)
-# @pytest.fixture(scope="session", params=['python_backend', 'java_backend'])
-@pytest.fixture(scope="session", params=['java_backend'])
-
+@pytest.fixture(scope="session", params=['python_backend', 'java_backend'])
def launch_mm_headless(request, install_mm):
python_backend = request.param == 'python_backend'
mm_install_dir = install_mm
diff --git a/requirements.txt b/requirements.txt
index 24771f8a..c84a097c 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -5,4 +5,4 @@ ndstorage>=0.1.5
docstring-inheritance
pymmcore
sortedcontainers
-pyjavaz>=1.2
+pyjavaz>=1.2.1
From 8e14f13e98e2b3951fcc05699c91b44a6af38ba4 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 15:16:00 +0200
Subject: [PATCH 25/34] increase memory
---
pycromanager/test/conftest.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pycromanager/test/conftest.py b/pycromanager/test/conftest.py
index 716040eb..e59fd501 100644
--- a/pycromanager/test/conftest.py
+++ b/pycromanager/test/conftest.py
@@ -162,7 +162,7 @@ def launch_mm_headless(request, install_mm):
java_loc = os.environ["JAVA"]
start_headless(mm_install_dir, config_file, java_loc=java_loc,
- buffer_size_mb=128, max_memory_mb=128, # set these low for github actions
+ buffer_size_mb=2048, max_memory_mb=2048, # set these low for github actions
debug=True)
yield
@@ -171,7 +171,7 @@ def launch_mm_headless(request, install_mm):
else: # python backend
config_file = os.path.join(mm_install_dir, 'MMConfig_demo.cfg')
start_headless(mm_install_dir, config_file,
- buffer_size_mb=128, max_memory_mb=128, # set these low for github actions
+ buffer_size_mb=2048, max_memory_mb=2048, # set these low for github actions
python_backend=True,
debug=True)
yield
From 707e88e1042abcb89eade52894b3c74334e1e0e9 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 15:17:04 +0200
Subject: [PATCH 26/34] small change to test
---
pycromanager/test/test_callback_functions.py | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pycromanager/test/test_callback_functions.py b/pycromanager/test/test_callback_functions.py
index e6da2ab0..45e0d915 100644
--- a/pycromanager/test/test_callback_functions.py
+++ b/pycromanager/test/test_callback_functions.py
@@ -39,7 +39,7 @@ def hook_fn(image, metadata):
with Acquisition(directory=None, name='acq', show_display=False, image_process_fn=hook_fn) as acq:
acq.acquire(events)
- dataset = acq.get_dataset() # Can this be moved out of the Acquisition context?
+ dataset = acq.get_dataset()
assert len(dataset.get_image_coordinates_list()) == 0
From 79fcc92cfaf89f4bf5c3764bad5d29f7f3d8f0c4 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 15:36:47 +0200
Subject: [PATCH 27/34] fix test and add better logging
---
pycromanager/test/test_acquisition.py | 62 ++++++++++----------
pycromanager/test/test_callback_functions.py | 4 +-
pycromanager/test/test_startup.py | 4 +-
3 files changed, 34 insertions(+), 36 deletions(-)
diff --git a/pycromanager/test/test_acquisition.py b/pycromanager/test/test_acquisition.py
index 1f7f5df4..f11a149d 100644
--- a/pycromanager/test/test_acquisition.py
+++ b/pycromanager/test/test_acquisition.py
@@ -19,7 +19,7 @@ def hook_fn(_events):
assert check_acq_not_sequenced(_events)
return _events
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_timelapse_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -36,7 +36,7 @@ def hook_fn(_events):
assert check_acq_sequenced(_events, 10), 'Sequenced acquisition is not built correctly'
return _events
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_timelapse_seq_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -51,7 +51,7 @@ def test_empty_list_acq(launch_mm_headless, setup_data_folder):
events = []
with pytest.raises(Exception):
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_empty_list_acq', show_display=False) as acq:
acq.acquire(events)
@@ -59,7 +59,7 @@ def test_empty_dict_acq(launch_mm_headless, setup_data_folder):
events = {}
with pytest.raises(Exception):
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_empty_dict_acq', show_display=False) as acq:
acq.acquire(events)
@@ -67,14 +67,14 @@ def test_empty_dict_list_acq(launch_mm_headless, setup_data_folder):
events = [{}, {}]
with pytest.raises(Exception):
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_empty_dict_list_acq', show_display=False) as acq:
acq.acquire(events)
def test_empty_mda_acq(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events()
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_empty_mda_acq', show_display=False) as acq:
acq.acquire(events)
@@ -88,7 +88,7 @@ def test_empty_mda_acq(launch_mm_headless, setup_data_folder):
def test_single_snap_acq(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=1)
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_single_snap_acq', show_display=False) as acq:
acq.acquire(events)
dataset = acq.get_dataset()
@@ -110,7 +110,7 @@ def test_multi_d_acq(launch_mm_headless, setup_data_folder):
order="tcz",
)
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_multi_d_acq', show_display=False) as acq:
acq.acquire(events)
dataset = acq.get_dataset()
@@ -141,7 +141,7 @@ def hook_fn(_events):
assert check_acq_sequenced(_events, len(events)), 'Sequenced acquisition is not built correctly'
return None # no need to actually acquire the data
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_zstack_seq_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -163,7 +163,7 @@ def hook_fn(_events):
assert check_acq_sequenced(_events, len(events)), 'Sequenced acquisition is not built correctly'
return None # no need to actually acquire the data
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_channel_seq_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -189,7 +189,7 @@ def hook_fn(_events):
assert check_acq_sequenced(_events, len(events)), 'Sequenced acquisition is not built correctly'
return None # no need to actually acquire the data
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_channel_exp_seq_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -214,7 +214,7 @@ def hook_fn(_events):
assert check_acq_not_sequenced(_events), 'Sequenced acquisition is not built correctly'
return _events
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_channel_noseq_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -246,7 +246,7 @@ def hook_fn(_events):
assert check_acq_sequenced(_events, len(events)), 'Sequenced acquisition is not built correctly'
return None # no need to actually acquire the data
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_channel_z_seq_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -269,7 +269,7 @@ def hook_fn(_events):
assert check_acq_sequenced(_events, len(events)), 'Sequenced acquisition is not built correctly'
return None # no need to actually acquire the data
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_z_channel_seq_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -292,7 +292,7 @@ def hook_fn(_events):
assert check_acq_sequenced(_events, 4), 'Sequenced acquisition is not built correctly'
return None # no need to actually acquire the data
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_channel_seq_z_noseq_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -322,7 +322,7 @@ def hook_fn(_events):
assert check_acq_sequenced(_events, 5), 'Sequenced acquisition is not built correctly'
return _events
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_channel_noseq_z_seq_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -355,7 +355,7 @@ def hook_fn(_events):
assert check_acq_sequenced(_events, len(events)), 'Sequenced acquisition is not built correctly'
return None # no need to actually acquire the data
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_time_channel_z_seq_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -379,7 +379,7 @@ def hook_fn(_events):
assert check_acq_sequenced(_events, len(events)), 'Sequenced acquisition is not built correctly'
return None # no need to actually acquire the data
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_time_z_channel_seq_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -403,7 +403,7 @@ def hook_fn(_events):
assert check_acq_sequenced(_events, 20), 'Sequenced acquisition is not built correctly'
return None # no need to actually acquire the data
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_time_noseq_z_channel_seq_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -423,7 +423,7 @@ def hook_fn(_events):
return _events
t_start = time.time()
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_time_noseq_z_seq_interval_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
t_end = time.time()
@@ -443,7 +443,7 @@ def hook_fn(_events):
mmc = Core()
mmc.set_exposure(1000)
- with Acquisition(setup_data_folder, 'acq', show_display=False, pre_hardware_hook_fn=hook_fn) as acq:
+ with Acquisition(setup_data_folder, 'test_abort_sequenced_timelapse', show_display=False, pre_hardware_hook_fn=hook_fn) as acq:
events = multi_d_acquisition_events(1000)
acq.acquire(events)
time.sleep(10)
@@ -466,7 +466,7 @@ def test_abort_with_no_events(launch_mm_headless, setup_data_folder):
Test that aborting before any events processed doesnt cause hang or exception
"""
mmc = Core()
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_abort_with_no_events', show_display=False) as acq:
acq.abort()
assert not mmc.is_sequence_running()
@@ -475,7 +475,7 @@ def test_abort_from_external(launch_mm_headless, setup_data_folder):
Simulates the acquisition being shutdown from a remote source (e.g. Xing out the viewer)
"""
with pytest.raises(AcqAlreadyCompleteException):
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_abort_from_external', show_display=False) as acq:
events = multi_d_acquisition_events(num_time_points=6)
acq.acquire(events[0])
# this simulates an abort from the java side unbeknownst to python side
@@ -498,7 +498,7 @@ def hook_fn(_events):
assert check_acq_sequenced(_events, 1000), 'Sequenced acquisition is not built correctly'
return _events
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_abort_sequenced_zstack', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
events = multi_d_acquisition_events(z_start=0, z_end=999, z_step=1)
acq.acquire(events)
@@ -523,7 +523,7 @@ def test_change_image_size(launch_mm_headless, setup_data_folder):
mmc.set_property('Camera', 'OnCameraCCDXSize', '1024')
mmc.set_property('Camera', 'OnCameraCCDYSize', '1024')
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_change_image_size', show_display=False) as acq:
events = multi_d_acquisition_events(num_time_points=5)
acq.acquire(events)
@@ -545,7 +545,7 @@ def test_change_roi(launch_mm_headless, setup_data_folder):
mmc = Core()
mmc.set_roi(*(0, 0, 100, 100))
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_change_roi', show_display=False) as acq:
events = multi_d_acquisition_events(num_time_points=5)
acq.acquire(events)
@@ -568,7 +568,7 @@ def test_change_binning(launch_mm_headless, setup_data_folder):
mmc.set_property('Camera', 'OnCameraCCDYSize', '512')
mmc.set_property('Camera', 'Binning', '2')
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_change_binning', show_display=False) as acq:
events = multi_d_acquisition_events(num_time_points=5)
acq.acquire(events)
@@ -595,7 +595,7 @@ def hook_fn(_events):
assert check_acq_not_sequenced(_events), 'Sequenced acquisition is not built correctly'
return _events
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_multiple_positions_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -622,7 +622,7 @@ def hook_fn(_events):
assert check_acq_not_sequenced(_events), 'Sequenced acquisition is not built correctly'
return _events
- with Acquisition(setup_data_folder, 'acq', show_display=False,
+ with Acquisition(setup_data_folder, 'test_multiple_labeled_positions_acq', show_display=False,
pre_hardware_hook_fn=hook_fn) as acq:
acq.acquire(events)
@@ -645,7 +645,7 @@ def test_multi_channel_parsing(launch_mm_headless, setup_data_folder):
channels=["DAPI", "FITC"],
)
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_multi_channel_parsing', show_display=False) as acq:
acq.acquire(events)
dataset = acq.get_dataset()
@@ -660,7 +660,7 @@ def test_empty_axes(launch_mm_headless, setup_data_folder):
Test that images with empty axes are correctly saved
"""
- with Acquisition(setup_data_folder, 'acq', show_display=False) as acq:
+ with Acquisition(setup_data_folder, 'test_empty_axes', show_display=False) as acq:
acq.acquire({'axes': {}})
dataset = acq.get_dataset()
diff --git a/pycromanager/test/test_callback_functions.py b/pycromanager/test/test_callback_functions.py
index 45e0d915..d4710b18 100644
--- a/pycromanager/test/test_callback_functions.py
+++ b/pycromanager/test/test_callback_functions.py
@@ -37,7 +37,7 @@ def test_img_process_fn_no_save(launch_mm_headless):
def hook_fn(image, metadata):
return None
- with Acquisition(directory=None, name='acq', show_display=False, image_process_fn=hook_fn) as acq:
+ with Acquisition(directory=None, name='test_img_process_fn_no_save', show_display=False, image_process_fn=hook_fn) as acq:
acq.acquire(events)
dataset = acq.get_dataset()
@@ -54,7 +54,7 @@ def saved(_axis, _dataset):
saved.num_saved += 1
saved.num_saved = 0
- with Acquisition(directory=setup_data_folder, name="acq",
+ with Acquisition(directory=setup_data_folder, name="test_img_process_fn_image_saved_fn_consistency",
image_saved_fn=saved, image_process_fn=processed,
show_display=False) as acq:
acq.acquire(multi_d_acquisition_events(num_time_points=200))
diff --git a/pycromanager/test/test_startup.py b/pycromanager/test/test_startup.py
index 7d4fbbae..9dafe6fd 100644
--- a/pycromanager/test/test_startup.py
+++ b/pycromanager/test/test_startup.py
@@ -2,6 +2,4 @@
from pycromanager import Core
def test_connect_to_core(launch_mm_headless):
- mmc = Core()
-
- assert mmc._java_class == 'mmcorej.CMMCore'
+ mmc = Core()
\ No newline at end of file
From b658269187d737141321111a526487cef27c00e2 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 15:54:19 +0200
Subject: [PATCH 28/34] logging
---
pycromanager/test/test_notifications.py | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/pycromanager/test/test_notifications.py b/pycromanager/test/test_notifications.py
index deb106f2..d2e21d66 100644
--- a/pycromanager/test/test_notifications.py
+++ b/pycromanager/test/test_notifications.py
@@ -18,21 +18,21 @@
def test_async_image_read(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=10, time_interval_s=0.5)
- with Acquisition(directory=setup_data_folder, show_display=False) as acq:
+ with Acquisition(directory=setup_data_folder, name='test_async_image_read', show_display=False) as acq:
future = acq.acquire(events)
image = future.await_image_saved({'time': 5}, return_image=True)
assert np.all(image == acq.get_dataset().read_image(time=5))
def test_async_image_read_sequence(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=10, time_interval_s=0)
- with Acquisition(directory=setup_data_folder, show_display=False) as acq:
+ with Acquisition(directory=setup_data_folder, name='test_async_image_read_sequence', show_display=False) as acq:
future = acq.acquire(events)
image = future.await_image_saved({'time': 5}, return_image=True)
assert np.all(image == acq.get_dataset().read_image(time=5))
def test_async_images_read(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=10, time_interval_s=0.5)
- with Acquisition(directory=setup_data_folder, show_display=False) as acq:
+ with Acquisition(directory=setup_data_folder, name='test_async_images_read', show_display=False) as acq:
future = acq.acquire(events)
images = future.await_image_saved([{'time': 7}, {'time': 8}, {'time': 9}], return_image=True)
assert (len(images) == 3)
@@ -43,7 +43,7 @@ def test_async_images_read(launch_mm_headless, setup_data_folder):
def test_async_images_read_sequence(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=10, time_interval_s=0)
- with Acquisition(directory=setup_data_folder, show_display=False) as acq:
+ with Acquisition(directory=setup_data_folder, name='test_async_images_read_sequence', show_display=False) as acq:
future = acq.acquire(events)
images = future.await_image_saved([{'time': 7}, {'time': 8}, {'time': 9}], return_image=True)
assert (len(images) == 3)
From fc76a86a4e26a48d45edca48134d28d3b008f898 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 16:08:25 +0200
Subject: [PATCH 29/34] add explicit closing to fix test
---
pycromanager/test/test_notifications.py | 1 +
1 file changed, 1 insertion(+)
diff --git a/pycromanager/test/test_notifications.py b/pycromanager/test/test_notifications.py
index d2e21d66..53b5b9fc 100644
--- a/pycromanager/test/test_notifications.py
+++ b/pycromanager/test/test_notifications.py
@@ -51,4 +51,5 @@ def test_async_images_read_sequence(launch_mm_headless, setup_data_folder):
# Make sure the returned images were the correct ones
on_disk = [acq.get_dataset().read_image(time=t) for t in [7, 8, 9]]
assert all([np.all(on_disk[i] == images[i]) for i in range(3)])
+ acq.get_dataset().close()
From c4f325da704099142de59e65ebc7308535990234 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 22:15:50 +0200
Subject: [PATCH 30/34] close datasets
---
pycromanager/test/test_callback_functions.py | 3 +++
pycromanager/test/test_notifications.py | 2 +-
2 files changed, 4 insertions(+), 1 deletion(-)
diff --git a/pycromanager/test/test_callback_functions.py b/pycromanager/test/test_callback_functions.py
index d4710b18..e6e802b9 100644
--- a/pycromanager/test/test_callback_functions.py
+++ b/pycromanager/test/test_callback_functions.py
@@ -42,6 +42,7 @@ def hook_fn(image, metadata):
dataset = acq.get_dataset()
assert len(dataset.get_image_coordinates_list()) == 0
+ dataset.close()
def test_img_process_fn_image_saved_fn_consistency(launch_mm_headless, setup_data_folder):
@@ -61,6 +62,7 @@ def saved(_axis, _dataset):
assert(processed.num_processed == 200)
assert(saved.num_saved == 200)
+ acq.get_dataset().close()
def test_event_serialize_and_deserialize(launch_mm_headless):
"""
@@ -94,5 +96,6 @@ def hook_fn(event):
events_copy = [e for e in events]
for test_event in events:
acq.acquire(test_event)
+ acq.get_dataset().close()
diff --git a/pycromanager/test/test_notifications.py b/pycromanager/test/test_notifications.py
index 53b5b9fc..481b58a6 100644
--- a/pycromanager/test/test_notifications.py
+++ b/pycromanager/test/test_notifications.py
@@ -51,5 +51,5 @@ def test_async_images_read_sequence(launch_mm_headless, setup_data_folder):
# Make sure the returned images were the correct ones
on_disk = [acq.get_dataset().read_image(time=t) for t in [7, 8, 9]]
assert all([np.all(on_disk[i] == images[i]) for i in range(3)])
- acq.get_dataset().close()
+ # acq.get_dataset().close()
From 78eaee0dbe75ba8498669e73f00977f51694ab40 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 22:37:51 +0200
Subject: [PATCH 31/34] close datasets
---
pycromanager/test/test_notifications.py | 5 ++++-
1 file changed, 4 insertions(+), 1 deletion(-)
diff --git a/pycromanager/test/test_notifications.py b/pycromanager/test/test_notifications.py
index 481b58a6..0b725f19 100644
--- a/pycromanager/test/test_notifications.py
+++ b/pycromanager/test/test_notifications.py
@@ -22,6 +22,7 @@ def test_async_image_read(launch_mm_headless, setup_data_folder):
future = acq.acquire(events)
image = future.await_image_saved({'time': 5}, return_image=True)
assert np.all(image == acq.get_dataset().read_image(time=5))
+ acq.get_dataset().close()
def test_async_image_read_sequence(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=10, time_interval_s=0)
@@ -29,6 +30,7 @@ def test_async_image_read_sequence(launch_mm_headless, setup_data_folder):
future = acq.acquire(events)
image = future.await_image_saved({'time': 5}, return_image=True)
assert np.all(image == acq.get_dataset().read_image(time=5))
+ acq.get_dataset().close()
def test_async_images_read(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=10, time_interval_s=0.5)
@@ -40,6 +42,7 @@ def test_async_images_read(launch_mm_headless, setup_data_folder):
# Make sure the returned images were the correct ones
on_disk = [acq.get_dataset().read_image(time=t) for t in [7, 8, 9]]
assert all([np.all(on_disk[i] == images[i]) for i in range(3)])
+ acq.get_dataset().close()
def test_async_images_read_sequence(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=10, time_interval_s=0)
@@ -51,5 +54,5 @@ def test_async_images_read_sequence(launch_mm_headless, setup_data_folder):
# Make sure the returned images were the correct ones
on_disk = [acq.get_dataset().read_image(time=t) for t in [7, 8, 9]]
assert all([np.all(on_disk[i] == images[i]) for i in range(3)])
- # acq.get_dataset().close()
+ acq.get_dataset().close()
From 75d70e89612f36715644d59877e5104f8178e329 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Fri, 14 Jun 2024 23:34:46 +0200
Subject: [PATCH 32/34] possible test fix
---
pycromanager/test/test_callback_functions.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pycromanager/test/test_callback_functions.py b/pycromanager/test/test_callback_functions.py
index e6e802b9..764d2869 100644
--- a/pycromanager/test/test_callback_functions.py
+++ b/pycromanager/test/test_callback_functions.py
@@ -31,7 +31,7 @@ def image_proc_fn(image, metadata):
dataset.close()
-def test_img_process_fn_no_save(launch_mm_headless):
+def test_img_process_fn_no_save(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=3)
def hook_fn(image, metadata):
@@ -64,7 +64,7 @@ def saved(_axis, _dataset):
assert(saved.num_saved == 200)
acq.get_dataset().close()
-def test_event_serialize_and_deserialize(launch_mm_headless):
+def test_event_serialize_and_deserialize(launch_mm_headless, setup_data_folder):
"""
Test for cycle consistency of event serialization and deserialization.
"""
From f60d5e1b9ca41e59d5db4919f00163b656f0e267 Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Sat, 15 Jun 2024 19:19:13 +0200
Subject: [PATCH 33/34] fix test
---
.../acquisition/acq_eng_py/internal/engine.py | 2 +-
.../acq_eng_py/main/acquisition_event.py | 14 +++++++-------
.../acquisition/python_backend_acquisitions.py | 8 +++++++-
pycromanager/test/test_callback_functions.py | 12 +++++-------
4 files changed, 20 insertions(+), 16 deletions(-)
diff --git a/pycromanager/acquisition/acq_eng_py/internal/engine.py b/pycromanager/acquisition/acq_eng_py/internal/engine.py
index 7b96a323..6018d783 100644
--- a/pycromanager/acquisition/acq_eng_py/internal/engine.py
+++ b/pycromanager/acquisition/acq_eng_py/internal/engine.py
@@ -104,7 +104,7 @@ def process_acquisition_event_inner():
try:
self.check_for_default_devices(event)
if event.acquisition_.is_debug_mode():
- self.core.logMessage("Processing event: " + event.to_string())
+ self.core.logMessage("Processing event: " + str(event))
self.core.logMessage("checking for sequencing")
if not self.sequenced_events and not event.is_acquisition_sequence_end_event():
self.sequenced_events.append(event)
diff --git a/pycromanager/acquisition/acq_eng_py/main/acquisition_event.py b/pycromanager/acquisition/acq_eng_py/main/acquisition_event.py
index 03310725..22d7601d 100644
--- a/pycromanager/acquisition/acq_eng_py/main/acquisition_event.py
+++ b/pycromanager/acquisition/acq_eng_py/main/acquisition_event.py
@@ -152,8 +152,8 @@ def event_from_json(data, acq):
if "min_start_time" in data:
event.miniumumStartTime_ms_ = int(data["min_start_time"] * 1000)
- if "timeout" in data:
- event.timeout_ms_ = float(data["timeout"])
+ if "timeout_ms" in data:
+ event.timeout_ms_ = float(data["timeout_ms"])
if "config_group" in data:
event.configGroup_ = data["config_group"][0]
@@ -162,12 +162,12 @@ def event_from_json(data, acq):
if "exposure" in data:
event.exposure_ = float(data["exposure"])
- if "timeout_ms" in data:
- event.slmImage_ = float(data["timeout_ms"])
+ # if "timeout_ms" in data:
+ # event.slmImage_ = float(data["timeout_ms"])
if "stage_positions" in data:
for stagePos in data["stage_positions"]:
- event.setStageCoordinate(stagePos[0], stagePos[1])
+ event.set_stage_coordinate(stagePos[0], stagePos[1])
if "z" in data:
event.zPosition_ = float(data["z"])
@@ -423,9 +423,9 @@ def get_tags(self):
return dict(self.tags_)
def __str__(self):
- if self.specialFlag_ == AcquisitionEvent.SpecialFlag.AcquisitionFinished:
+ if self.specialFlag_ == AcquisitionEvent.SpecialFlag.ACQUISITION_FINISHED:
return "Acq finished event"
- elif self.specialFlag_ == AcquisitionEvent.SpecialFlag.AcquisitionSequenceEnd:
+ elif self.specialFlag_ == AcquisitionEvent.SpecialFlag.ACQUISITION_SEQUENCE_END:
return "Acq sequence end event"
builder = []
diff --git a/pycromanager/acquisition/python_backend_acquisitions.py b/pycromanager/acquisition/python_backend_acquisitions.py
index 3be9b154..070adc93 100644
--- a/pycromanager/acquisition/python_backend_acquisitions.py
+++ b/pycromanager/acquisition/python_backend_acquisitions.py
@@ -6,6 +6,7 @@
from pycromanager.acq_future import AcqNotification
import threading
from inspect import signature
+import traceback
from ndstorage.ndram_dataset import NDRAMDataset
from ndstorage.ndtiff_dataset import NDTiffDataset
@@ -219,7 +220,12 @@ def run(self, event):
if AcquisitionEvent.is_acquisition_finished_event(event):
return event
acq = event.acquisition_
- output = self._hook_fn(event.to_json())
+ try:
+ output = self._hook_fn(event.to_json())
+ except Exception as e:
+ acq.abort()
+ traceback.print_exc()
+ return # cancel event and let the shutdown process handle the exception
if output is not None:
return AcquisitionEvent.from_json(output, acq)
diff --git a/pycromanager/test/test_callback_functions.py b/pycromanager/test/test_callback_functions.py
index 764d2869..993f1439 100644
--- a/pycromanager/test/test_callback_functions.py
+++ b/pycromanager/test/test_callback_functions.py
@@ -31,13 +31,13 @@ def image_proc_fn(image, metadata):
dataset.close()
-def test_img_process_fn_no_save(launch_mm_headless, setup_data_folder):
+def test_img_process_fn_no_save(launch_mm_headless):
events = multi_d_acquisition_events(num_time_points=3)
def hook_fn(image, metadata):
return None
- with Acquisition(directory=None, name='test_img_process_fn_no_save', show_display=False, image_process_fn=hook_fn) as acq:
+ with Acquisition(name='test_img_process_fn_no_save', show_display=False, image_process_fn=hook_fn) as acq:
acq.acquire(events)
dataset = acq.get_dataset()
@@ -64,7 +64,7 @@ def saved(_axis, _dataset):
assert(saved.num_saved == 200)
acq.get_dataset().close()
-def test_event_serialize_and_deserialize(launch_mm_headless, setup_data_folder):
+def test_event_serialize_and_deserialize(launch_mm_headless):
"""
Test for cycle consistency of event serialization and deserialization.
"""
@@ -82,13 +82,11 @@ def test_event_serialize_and_deserialize(launch_mm_headless, setup_data_folder):
'properties': [['DeviceName', 'PropertyName', 'PropertyValue']]},
{'axes': {'z': 1},
'stage_positions': [['ZDeviceName', 123.45]]},
- {'axes': {'time': 2},
- 'timeout': 1000},
]
def hook_fn(event):
- test_event = events.pop(0)
- assert (event == test_event)
+ test_event = events_copy.pop(0)
+ assert event == test_event
return None # cancel the event
with Acquisition(show_display=False, pre_hardware_hook_fn=hook_fn) as acq:
From 4714cb7623ceda0d6b90c77dd33820fd6e95339c Mon Sep 17 00:00:00 2001
From: Henry Pinkard <7969470+henrypinkard@users.noreply.github.com>
Date: Sat, 15 Jun 2024 19:34:46 +0200
Subject: [PATCH 34/34] try restoring folder
---
pycromanager/test/test_callback_functions.py | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/pycromanager/test/test_callback_functions.py b/pycromanager/test/test_callback_functions.py
index 993f1439..123a227b 100644
--- a/pycromanager/test/test_callback_functions.py
+++ b/pycromanager/test/test_callback_functions.py
@@ -31,7 +31,7 @@ def image_proc_fn(image, metadata):
dataset.close()
-def test_img_process_fn_no_save(launch_mm_headless):
+def test_img_process_fn_no_save(launch_mm_headless, setup_data_folder):
events = multi_d_acquisition_events(num_time_points=3)
def hook_fn(image, metadata):
@@ -64,7 +64,7 @@ def saved(_axis, _dataset):
assert(saved.num_saved == 200)
acq.get_dataset().close()
-def test_event_serialize_and_deserialize(launch_mm_headless):
+def test_event_serialize_and_deserialize(launch_mm_headless, setup_data_folder):
"""
Test for cycle consistency of event serialization and deserialization.
"""