From 2aa4d46577f94b17e0573a4db458e379414d97ed Mon Sep 17 00:00:00 2001 From: Eva Lott Date: Thu, 13 Jun 2024 11:39:30 +0100 Subject: [PATCH] allowed `_pos_out` to have the old `on_update` alongside the new --- src/pandablocks_ioc/_hdf_ioc.py | 8 +++- src/pandablocks_ioc/ioc.py | 66 ++++++++++++++++++++++++--------- tests/test_hdf_ioc.py | 4 +- 3 files changed, 57 insertions(+), 21 deletions(-) diff --git a/src/pandablocks_ioc/_hdf_ioc.py b/src/pandablocks_ioc/_hdf_ioc.py index b4e5b22b..cfbc854b 100644 --- a/src/pandablocks_ioc/_hdf_ioc.py +++ b/src/pandablocks_ioc/_hdf_ioc.py @@ -335,6 +335,7 @@ def __init__( self, client: AsyncioClient, dataset_name_cache: Dict[str, Dict[str, str]], + datasets_record_updater: Callable, record_prefix: str, ): if find_spec("h5py") is None: @@ -343,6 +344,7 @@ def __init__( self._client = client self.dataset_name_cache = dataset_name_cache + self.datasets_record_updater = datasets_record_updater path_length = os.pathconf("/", "PC_PATH_MAX") filename_length = os.pathconf("/", "PC_NAME_MAX") @@ -658,8 +660,10 @@ async def _handle_hdf5_data(self) -> None: self._num_captured_record.set ) - # Get the dataset names, or use the record name if no - # dataset name is provided + # Update `DATA:DATASETS` to match the names of the datasets + # in the HDF5 file + self.datasets_record_updater() + buffer = HDF5Buffer( capture_mode, filepath, diff --git a/src/pandablocks_ioc/ioc.py b/src/pandablocks_ioc/ioc.py index 595a31f6..c22a382a 100644 --- a/src/pandablocks_ioc/ioc.py +++ b/src/pandablocks_ioc/ioc.py @@ -537,8 +537,6 @@ def update_cache( else: self._record_name_to_dataset_name.pop(record_name, None) - self.update_dataset_name_to_type() - def update_dataset_name_to_type(self): dataset_name_list = list(self._record_name_to_dataset_name.values()) self._datasets_table.update_row("Name", dataset_name_list) @@ -913,6 +911,17 @@ def _make_pos_out( labels, capture_index = self._process_labels( field_info.capture_labels, values[capture_record_name] ) + + capture_record_updater: _RecordUpdater + + def capture_record_on_update(new_capture_mode): + self._dataset_name_cache.update_cache( + record_name, + record_dict[dataset_record_name].record.get(), + labels[new_capture_mode], + ) + return capture_record_updater.update(new_capture_mode) + record_dict[capture_record_name] = self._create_record_info( capture_record_name, "Capture options", @@ -921,14 +930,20 @@ def _make_pos_out( PviGroup.CAPTURE, labels=labels, initial_value=capture_index, - on_update=lambda new_capture_mode: ( - self._dataset_name_cache.update_cache( - record_name, - record_dict[dataset_record_name].record.get(), - labels[new_capture_mode], - ) - ), + on_update=capture_record_on_update, + ) + + capture_record_info = RecordInfo( + data_type_func=builder.mbbOut, labels=labels, is_in_record=False + ) + capture_record_updater = _RecordUpdater( + capture_record_info, + self._record_prefix, + self._client, + self._all_values_dict, + labels if labels else None, ) + record_dict[dataset_record_name] = self._create_record_info( dataset_record_name, "Used to adjust the dataset name to one more scientifically relevant", @@ -1082,6 +1097,16 @@ def _make_ext_out( ), ) + capture_record_updater: _RecordUpdater + + def capture_record_on_update(new_capture_mode): + self._dataset_name_cache.update_cache( + record_name, + record_dict[dataset_record_name].record.get(), + labels[new_capture_mode], + ) + return capture_record_updater.update(new_capture_mode) + record_dict[capture_record_name] = self._create_record_info( capture_record_name, field_info.description, @@ -1090,13 +1115,17 @@ def _make_ext_out( PviGroup.OUTPUTS, labels=labels, initial_value=capture_index, - on_update=lambda new_capture_mode: ( - self._dataset_name_cache.update_cache( - record_name, - record_dict[dataset_record_name].record.get(), - labels[new_capture_mode], - ) - ), + on_update=capture_record_on_update, + ) + capture_record_info = RecordInfo( + data_type_func=builder.mbbOut, labels=labels, is_in_record=False + ) + capture_record_updater = _RecordUpdater( + capture_record_info, + self._record_prefix, + self._client, + self._all_values_dict, + labels if labels else None, ) return record_dict @@ -1860,7 +1889,10 @@ def create_block_records( add_pcap_arm_pvi_info(PviGroup.INPUTS, pcap_arm_record) HDF5RecordController( - self._client, self._dataset_name_cache.cache, self._record_prefix + self._client, + self._dataset_name_cache.cache, + self._dataset_name_cache.update_dataset_name_to_type, + self._record_prefix, ) return record_dict diff --git a/tests/test_hdf_ioc.py b/tests/test_hdf_ioc.py index 83414d68..ae63fbc2 100644 --- a/tests/test_hdf_ioc.py +++ b/tests/test_hdf_ioc.py @@ -234,7 +234,7 @@ async def hdf5_controller( } hdf5_controller = HDF5RecordController( - AsyncioClient("localhost"), dataset_name_cache, test_prefix + AsyncioClient("localhost"), dataset_name_cache, lambda: None, test_prefix ) # When using tests w/o CA, need to manually set _directory_exists to 1 @@ -254,7 +254,7 @@ def subprocess_func( async def wrapper(): builder.SetDeviceName(namespace_prefix) client = MockedAsyncioClient(standard_responses) - HDF5RecordController(client, {}, namespace_prefix) + HDF5RecordController(client, {}, lambda: None, namespace_prefix) dispatcher = asyncio_dispatcher.AsyncioDispatcher() builder.LoadDatabase() softioc.iocInit(dispatcher)