From 7b43fc790d666eecfc929523feaadad4d45d6f57 Mon Sep 17 00:00:00 2001 From: tazlin Date: Wed, 4 Oct 2023 10:14:31 -0400 Subject: [PATCH] fix: refuse to start if no models loaded --- .../process_management/inference_process.py | 16 ++++++++++++++++ .../process_management/process_manager.py | 11 +++++++++++ 2 files changed, 27 insertions(+) diff --git a/horde_worker_regen/process_management/inference_process.py b/horde_worker_regen/process_management/inference_process.py index 80e1b457..7cf7c47e 100644 --- a/horde_worker_regen/process_management/inference_process.py +++ b/horde_worker_regen/process_management/inference_process.py @@ -101,6 +101,22 @@ def __init__( except Exception as e: logger.critical(f"Failed to initialise HordeCheckpointLoader: {type(e).__name__} {e}") + if SharedModelManager.manager.compvis is None: + logger.critical("Failed to initialise SharedModelManager") + self.send_process_state_change_message( + process_state=HordeProcessState.PROCESS_ENDED, + info="Failed to initialise compvis in SharedModelManager", + ) + return + + if SharedModelManager.manager.compvis.available_models == 0: + logger.critical("No models available in SharedModelManager") + self.send_process_state_change_message( + process_state=HordeProcessState.PROCESS_ENDED, + info="No models available in SharedModelManager", + ) + return + logger.info("HordeInferenceProcess initialised") self.send_process_state_change_message( diff --git a/horde_worker_regen/process_management/process_manager.py b/horde_worker_regen/process_management/process_manager.py index 63fecdbe..ab3b5aff 100644 --- a/horde_worker_regen/process_management/process_manager.py +++ b/horde_worker_regen/process_management/process_manager.py @@ -490,6 +490,17 @@ def __init__( time.sleep(5) def is_time_for_shutdown(self) -> bool: + if all( + inference_process.last_process_state == HordeProcessState.PROCESS_ENDING + or inference_process.last_process_state == HordeProcessState.PROCESS_ENDED + for inference_process in [ + inference_process + for inference_process in self._process_map.values() + if inference_process.process_type == HordeProcessKind.INFERENCE + ] + ): + return True + if len(self.completed_jobs) > 0: return False