diff --git a/build_automation/update_dependency.py b/build_automation/update_dependency.py
index 3f65a1df..145516e3 100644
--- a/build_automation/update_dependency.py
+++ b/build_automation/update_dependency.py
@@ -11,13 +11,17 @@
import time
dep_name = sys.argv[1]
+if len(sys.argv) == 3: # NDTiffStorage.jar comes from NDStorage repo
+ repo_name = sys.argv[2]
+else:
+ repo_name = dep_name
git_repos_dir = str(Path(__file__).parent.parent.parent) + '/'
-if('java' in os.listdir(git_repos_dir + dep_name)):
- pom_path = git_repos_dir + dep_name + '/java/pom.xml'
+if('java' in os.listdir(git_repos_dir + repo_name)):
+ pom_path = git_repos_dir + repo_name + '/java/pom.xml'
else:
- pom_path = git_repos_dir + dep_name + '/pom.xml'
+ pom_path = git_repos_dir + repo_name + '/pom.xml'
# Get the latest version number
diff --git a/java/pom.xml b/java/pom.xml
index e1c7d700..6a0bb800 100644
--- a/java/pom.xml
+++ b/java/pom.xml
@@ -2,7 +2,7 @@
4.0.0
org.micro-manager.pycro-manager
PycroManagerJava
- 0.46.13
+ 0.46.16
jar
Pycro-Manager Java
The Java components of Pycro-Manager
@@ -55,7 +55,7 @@
org.micro-manager.acqengj
AcqEngJ
- 0.38.0
+ 0.38.1
org.micro-manager.ndviewer
@@ -65,7 +65,7 @@
org.micro-manager.ndtiffstorage
NDTiffStorage
- 2.18.2
+ 2.18.4
@@ -133,13 +133,7 @@
true
20
-
+
@@ -154,4 +148,4 @@
-
+
\ No newline at end of file
diff --git a/pycromanager/_version.py b/pycromanager/_version.py
index db276572..d7681796 100644
--- a/pycromanager/_version.py
+++ b/pycromanager/_version.py
@@ -1,2 +1,2 @@
-version_info = (0, 34, 4)
+version_info = (0, 34, 8)
__version__ = ".".join(map(str, version_info))
diff --git a/pycromanager/acquisition/java_backend_acquisitions.py b/pycromanager/acquisition/java_backend_acquisitions.py
index 9f24e232..da5c0503 100644
--- a/pycromanager/acquisition/java_backend_acquisitions.py
+++ b/pycromanager/acquisition/java_backend_acquisitions.py
@@ -44,7 +44,7 @@ def _run_acq_event_source(acquisition, event_port, event_queue, debug=False):
# Initiate the normal shutdown process
if not acquisition._acq.is_finished():
# if it has been finished through something happening on the other side
- event_socket.send({"event_implementations": [{"special": "acquisition-end"}]})
+ event_socket.send({"events": [{"special": "acquisition-end"}]})
# wait for signal that acquisition has received the end signal
while not acquisition._acq.is_finished():
acquisition._acq.block_until_events_finished(0.01)
@@ -55,9 +55,9 @@ def _run_acq_event_source(acquisition, event_port, event_queue, debug=False):
break
# TODO in theory it could be aborted in between the check above and sending below,
# maybe consider putting a timeout on the send?
- event_socket.send({"event_implementations": events if type(events) == list else [events]})
+ event_socket.send({"events": events if type(events) == list else [events]})
if debug:
- logger.debug("sent event_implementations")
+ logger.debug("sent events")
except Exception as e:
acquisition.abort(e)
finally:
@@ -81,8 +81,8 @@ def _run_acq_hook(acquisition, pull_port,
pull_socket.close()
return
else:
- if "event_implementations" in event_msg.keys():
- event_msg = event_msg["event_implementations"] # convert from sequence
+ if "events" in event_msg.keys():
+ event_msg = event_msg["events"] # convert from sequence
params = signature(hook_fn).parameters
if len(params) == 1 or len(params) == 2:
try:
@@ -100,7 +100,7 @@ def _run_acq_hook(acquisition, pull_port,
if isinstance(new_event_msg, list):
new_event_msg = {
- "event_implementations": new_event_msg
+ "events": new_event_msg
} # convert back to the expected format for a sequence
push_socket.send(new_event_msg)
@@ -159,7 +159,7 @@ def process_and_sendoff(image_tags_tuple, original_dtype):
while True:
message = None
while message is None:
- message = pull_socket.receive(timeout=30, suppress_debug_message=True) # check for execution_engine message
+ message = pull_socket.receive(timeout=30, suppress_debug_message=True) # check for new message
if "special" in message and message["special"] == "finished":
pull_socket.close()
@@ -198,16 +198,17 @@ def _notification_handler_fn(acquisition, notification_push_port, connected_even
notification = AcqNotification.from_json(message)
# these are processed seperately to handle image saved callback
- # Decode the Data storage_implementations class-specific notification
+ # Decode the Data storage class-specific notification
if AcqNotification.is_image_saved_notification(notification): # it was saved to RAM, not disk
if not notification.is_data_sink_finished_notification():
- # check if NDTiff data storage_implementations used
- if acquisition._directory is not None:
+ # check if NDTiff data storage used
+ if acquisition._directory is not None or isinstance(acquisition, MagellanAcquisition) or \
+ isinstance(acquisition, XYTiledAcquisition):
index_entry = notification.payload.encode('ISO-8859-1')
axes = acquisition._dataset.add_index_entry(index_entry)
# swap the notification.payload from the byte array of index information to axes
notification.payload = axes
- else: # RAM storage_implementations
+ else: # RAM storage
axes = json.loads(notification.payload)
acquisition._dataset.add_available_axes(axes)
notification.payload = axes
@@ -300,12 +301,7 @@ def __init__(
warnings.warn('Could not create acquisition notification handler. '
'Update Micro-Manager and Pyrcro-Manager to the latest versions to fix this')
- # Start remote acquisition
- # Acquistition.start is now deprecated, so this can be removed later
- # Acquisitions now get started automatically when the first event_implementations submitted
- # but Magellan acquisitons (and probably others that generate their own event_implementations)
- # will need some execution_engine method to submit event_implementations only after image processors etc have been added
- self._acq.start()
+
self._dataset_disk_location = (
self._acq.get_data_sink().get_storage().get_disk_location()
if self._acq.get_data_sink() is not None
@@ -314,14 +310,14 @@ def __init__(
self._start_events()
- # Load remote storage_implementations
+ # Load remote storage
data_sink = self._acq.get_data_sink()
# load a view of the dataset in progress. This is used so that acq.get_dataset() can be called
# while the acquisition is still running, and (optionally )so that a image_saved_fn can be called
- # when images are written to disk/RAM storage_implementations
+ # when images are written to disk/RAM storage
storage_java_class = data_sink.get_storage()
summary_metadata = storage_java_class.get_summary_metadata()
- if directory is not None:
+ if directory is not None or isinstance(self, MagellanAcquisition) or isinstance(self, XYTiledAcquisition):
# NDTiff dataset saved to disk on Java side
self._dataset = Dataset(dataset_path=self._dataset_disk_location, summary_metadata=summary_metadata)
else:
@@ -350,7 +346,7 @@ def __init__(
def await_completion(self):
try:
- while not self._acq._are_events_finished() or (
+ while not self._acq.are_events_finished() or (
self._acq.get_data_sink() is not None and not self._acq.get_data_sink().is_finished()):
self._check_for_exceptions()
self._acq.block_until_events_finished(0.01)
@@ -364,10 +360,6 @@ def await_completion(self):
if hasattr(self, '_event_thread'):
self._event_thread.join()
- # need to do this so its _Bridge can be garbage collected and a reference to the JavaBackendAcquisition
- # does not prevent Bridge cleanup and process exiting
- self._remote_acq = None
-
# Wait on all the other threads to shut down properly
if hasattr(self, '_storage_monitor_thread'):
self._storage_monitor_thread.join()
@@ -395,10 +387,6 @@ def get_viewer(self):
else:
return self._napari_viewer
- def abort(self, exception=None):
- self._exception = exception
- self._acq.abort()
-
######## Private methods ###########
def _start_receiving_notifications(self):
"""
@@ -436,9 +424,6 @@ def _check_for_exceptions(self):
if self._exception is not None:
raise self._exception
- def _are_events_finished(self):
- return self._acq.are_events_finished()
-
def _start_events(self, **kwargs):
self.event_port = self._acq.get_event_port()
@@ -459,7 +444,7 @@ def _initialize_image_processor(self, **kwargs):
self._acq.add_image_processor(java_processor)
self._processor_thread = self._start_processor(
java_processor, kwargs['image_process_fn'],
- # Some acquisitions (e.g. Explore acquisitions) create event_implementations on Java side
+ # Some acquisitions (e.g. Explore acquisitions) create events on Java side
self._event_queue if hasattr(self, '_event_queue') else None,
process=False)
@@ -498,8 +483,8 @@ def _initialize_hooks(self, **kwargs):
def _create_remote_acquisition(self, **kwargs):
core = ZMQRemoteMMCoreJ(port=self._port, timeout=self._timeout, debug=self._debug)
acq_factory = JavaObject("org.micromanager.remote.RemoteAcquisitionFactory",
- # create a execution_engine socket for it to run on so that it can have blocking calls without interfering with
- # the main socket or other kernel sockets
+ # create a new socket for it to run on so that it can have blocking calls without interfering with
+ # the main socket or other internal sockets
new_socket=True,
port=self._port, args=[core], debug=self._debug, timeout=self._timeout)
show_viewer = kwargs['show_display'] is True and kwargs['napari_viewer'] is None
@@ -640,6 +625,7 @@ def __init__(
l = locals()
named_args = {arg_name: l[arg_name] for arg_name in arg_names}
super().__init__(**named_args)
+ self._acq.start()
def _create_remote_acquisition(self, port, **kwargs):
core = ZMQRemoteMMCoreJ(port=self._port, timeout=self._timeout)
@@ -655,7 +641,7 @@ def _create_remote_acquisition(self, port, **kwargs):
x_overlap = self.tile_overlap
y_overlap = self.tile_overlap
- self._remote_acq = acq_factory.create_tiled_acquisition(
+ self._acq = acq_factory.create_tiled_acquisition(
kwargs['directory'],
kwargs['name'],
show_viewer,
@@ -670,7 +656,7 @@ def _create_remote_acquisition(self, port, **kwargs):
class ExploreAcquisition(JavaBackendAcquisition):
"""
Launches a user interface for an "Explore Acquisition"--a type of XYTiledAcquisition
- in which acquisition event_implementations come from the user dynamically driving the stage and selecting
+ in which acquisition events come from the user dynamically driving the stage and selecting
areas to image
"""
@@ -717,6 +703,7 @@ def __init__(
l = locals()
named_args = {arg_name: l[arg_name] for arg_name in arg_names}
super().__init__(**named_args)
+ self._acq.start()
def _create_remote_acquisition(self, port, **kwargs):
if type(self.tile_overlap) is tuple:
@@ -727,7 +714,7 @@ def _create_remote_acquisition(self, port, **kwargs):
ui_class = JavaClass('org.micromanager.explore.ExploreAcqUIAndStorage')
ui = ui_class.create(kwargs['directory'], kwargs['name'], x_overlap, y_overlap, self.z_step_um, self.channel_group)
- self._remote_acq = ui.get_acquisition()
+ self._acq = ui.get_acquisition()
def _start_events(self, **kwargs):
pass # These come from the user
@@ -774,6 +761,7 @@ def __init__(
l = locals()
named_args = {arg_name: l[arg_name] for arg_name in arg_names}
super().__init__(**named_args)
+ self._acq.start()
def _start_events(self, **kwargs):
pass # Magellan handles this on Java side
@@ -784,7 +772,7 @@ def _create_event_queue(self, **kwargs):
def _create_remote_acquisition(self, **kwargs):
magellan_api = Magellan()
if self.magellan_acq_index is not None:
- self._remote_acq = magellan_api.create_acquisition(self.magellan_acq_index, False)
+ self._acq = magellan_api.create_acquisition(self.magellan_acq_index, False)
elif self.magellan_explore:
- self._remote_acq = magellan_api.create_explore_acquisition(False)
+ self._acq = magellan_api.create_explore_acquisition(False)
self._event_queue = None
diff --git a/pycromanager/install.py b/pycromanager/install.py
index d860c4f0..addb478d 100644
--- a/pycromanager/install.py
+++ b/pycromanager/install.py
@@ -14,6 +14,26 @@
MM_DOWNLOAD_URL_MAC = MM_DOWNLOAD_URL_BASE + '/nightly/2.0/Mac'
MM_DOWNLOAD_URL_WINDOWS = MM_DOWNLOAD_URL_BASE + '/nightly/2.0/Windows'
+def _get_download_url(ci_build=False):
+ """
+ Get the download URL for the latest nightly build of Micro-Manager
+
+ Returns
+ -------
+ str
+ The URL to the latest nightly build
+ """
+ platform = _get_platform()
+ if platform == 'Windows':
+ url = MM_DOWNLOAD_URL_WINDOWS
+ elif platform == 'Mac':
+ url = MM_DOWNLOAD_URL_MAC
+ else:
+ raise ValueError(f"Unsupported OS: {platform}")
+ if ci_build:
+ url = url.replace('nightly', 'ci')
+ return url
+
def _get_platform():
"""
Get the platform of the system
@@ -30,18 +50,12 @@ def _get_platform():
else:
raise ValueError(f"Unsupported OS: {sys.platform}")
-def _find_versions():
+def _find_versions(ci_build=False):
"""
- Find all available versions of Micro-Manager nightly builds
+ Find all available versions of Micro-Manager builds
"""
- platform = _get_platform()
# Get the webpage
- if platform == 'Windows':
- webpage = requests.get(MM_DOWNLOAD_URL_WINDOWS)
- elif platform == 'Mac':
- webpage = requests.get(MM_DOWNLOAD_URL_MAC)
- else:
- raise ValueError(f"Unsupported OS: {platform}")
+ webpage = requests.get(_get_download_url(ci_build))
return re.findall(r'class="rowDefault" href="([^"]+)', webpage.text)
def find_existing_mm_install():
@@ -63,7 +77,7 @@ def find_existing_mm_install():
else:
raise ValueError(f"Unsupported OS: {platform}")
-def download_and_install(destination='auto', mm_install_log_path=None):
+def download_and_install(destination='auto', mm_install_log_path=None, ci_build=False):
"""
Download and install the latest nightly build of Micro-Manager
@@ -71,6 +85,10 @@ def download_and_install(destination='auto', mm_install_log_path=None):
----------
destination : str
The directory to install Micro-Manager to. If 'auto', it will install to the user's home directory.
+ mm_install_log_path : str
+ Path to save the installation log to
+ ci_build : bool
+ If True, download the latest CI build instead of nightly build
Returns
-------
@@ -80,7 +98,7 @@ def download_and_install(destination='auto', mm_install_log_path=None):
windows = _get_platform() == 'Windows'
platform = 'Windows' if windows else 'Mac'
installer = 'mm_installer.exe' if windows else 'mm_installer.dmg'
- latest_version = MM_DOWNLOAD_URL_BASE + _find_versions()[0]
+ latest_version = _get_download_url(ci_build) + '/' + _find_versions(ci_build)[0].split('/')[-1]
# make a progress bar that updates every 0.5 seconds
def bar(curr, total, width):
if not hasattr(bar, 'last_update'):
@@ -88,6 +106,7 @@ def bar(curr, total, width):
if curr / total*100 - bar.last_update > 0.5:
print(f"\rDownloading installer: {curr / total*100:.2f}%", end='')
bar.last_update = curr / total*100
+ print('Downloading: ', latest_version)
wget.download(latest_version, out=installer, bar=bar)
if windows:
diff --git a/pycromanager/test/test_acquisition.py b/pycromanager/test/test_acquisition.py
index 9398f7dd..890797f4 100644
--- a/pycromanager/test/test_acquisition.py
+++ b/pycromanager/test/test_acquisition.py
@@ -300,7 +300,7 @@ def hook_fn(_events):
def test_channel_noseq_z_seq_acq(launch_mm_headless, setup_data_folder):
"""
Test that z-steps can be sequenced even if channels are not sequenced in TPCZ order acquisitions.
- Also integration_tests that channels exposure times are set correctly
+ Also test that channels exposure times are set correctly
"""
channels = ['DAPI', 'FITC', 'Rhodamine', 'Cy5']
@@ -463,27 +463,31 @@ def hook_fn(_events):
def test_abort_with_no_events(launch_mm_headless, setup_data_folder):
"""
- Test that aborting before any event_implementations processed doesnt cause hang or exception
+ Test that aborting before any events processed doesnt cause hang or exception
"""
mmc = Core()
with Acquisition(setup_data_folder, 'test_abort_with_no_events', show_display=False) as acq:
acq.abort()
assert not mmc.is_sequence_running()
-
-def test_abort_from_external(launch_mm_headless, setup_data_folder):
- """
- Simulates the acquisition being shutdown from a remote source (e.g. Xing out the viewer)
- """
- with pytest.raises(AcqAlreadyCompleteException):
- with Acquisition(setup_data_folder, 'test_abort_from_external', show_display=False) as acq:
- events = multi_d_acquisition_events(num_time_points=6)
- acq.acquire(events[0])
- # this simulates an abort from the java side unbeknownst to python side
- # it comes from a execution_engine thread so it is non-blocking to the port
- acq._acq.abort()
- for event in events[1:]:
- acq.acquire(event)
- time.sleep(5)
+ acq.get_dataset().close()
+
+# def test_abort_from_external(launch_mm_headless, setup_data_folder):
+# """
+# Simulates the acquisition being shutdown from a remote source (e.g. Xing out the viewer)
+# """
+# with pytest.raises(AcqAlreadyCompleteException):
+# try:
+# with Acquisition(setup_data_folder, 'test_abort_from_external', show_display=False) as acq:
+# events = multi_d_acquisition_events(num_time_points=6)
+# acq.acquire(events[0])
+# # this simulates an abort from the java side unbeknownst to python side
+# # it comes from a new thread so it is non-blocking to the port
+# acq._acq.abort()
+# for event in events[1:]:
+# acq.acquire(event)
+# time.sleep(5)
+# finally:
+# acq.get_dataset().close()
def test_abort_sequenced_zstack(launch_mm_headless, setup_data_folder):
"""
diff --git a/requirements.txt b/requirements.txt
index 6801d394..c8236a2b 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -5,6 +5,6 @@ ndstorage>=0.1.6
docstring-inheritance
pymmcore
sortedcontainers
-pyjavaz>=1.2.1
wget
-pydantic>=2.0.0
\ No newline at end of file
+pyjavaz==1.2.4
+wget