diff --git a/docs/how-to/capture-hdf.md b/docs/how-to/capture-hdf.md index f68e644e..df57ea0b 100644 --- a/docs/how-to/capture-hdf.md +++ b/docs/how-to/capture-hdf.md @@ -9,6 +9,8 @@ These can be viewed from the DATA screen. ``` - The file directory and name are chosen with `:DATA:HDFDirectory` and `:DATA:HDFFileName`. +- The number of directories that the IOC is allowed to create provided they don't exist is determined by `:DATA:CreateDirectory`. The behavior of this signal is the same as the identical PV in [`areaDetector`](https://areadetector.github.io/areaDetector/ADCore/NDPluginFile.html). +- `DATA:DirectoryExists` represents whether or not the directory specified exists and is writable by the user under which the IOC is running. - `:DATA:NumCapture` is the number of frames to capture in the file. - `:DATA:NumCaptured` is the number of frames written to file. - `:DATA:NumReceived` is the number of frames received from the panda. diff --git a/src/pandablocks_ioc/_hdf_ioc.py b/src/pandablocks_ioc/_hdf_ioc.py index 3c2edc39..46d4c6bf 100644 --- a/src/pandablocks_ioc/_hdf_ioc.py +++ b/src/pandablocks_ioc/_hdf_ioc.py @@ -5,6 +5,7 @@ from collections import deque from enum import Enum from importlib.util import find_spec +from pathlib import Path from typing import Callable, Deque, Optional, Union from pandablocks.asyncio import AsyncioClient @@ -311,6 +312,8 @@ class HDF5RecordController: _client: AsyncioClient _directory_record: RecordWrapper + _create_directory_record: RecordWrapper + _directory_exists_record: RecordWrapper _file_name_record: RecordWrapper _file_number_record: RecordWrapper _file_format_record: RecordWrapper @@ -340,7 +343,8 @@ def __init__(self, client: AsyncioClient, record_prefix: str): length=path_length, DESC="File path for HDF5 files", validate=self._parameter_validate, - on_update=self._update_full_file_path, + on_update=self._update_directory_path, + always_update=True, ) add_automatic_pvi_info( PviGroup.HDF, @@ -352,6 +356,40 @@ def __init__(self, client: AsyncioClient, record_prefix: str): record_prefix + ":" + self._DATA_PREFIX + ":HDFDirectory" ) + create_directory_record_name = EpicsName(self._DATA_PREFIX + ":CreateDirectory") + self._create_directory_record = builder.longOut( + create_directory_record_name, + initial_value=0, + DESC="Directory creation depth", + ) + add_automatic_pvi_info( + PviGroup.HDF, + self._create_directory_record, + create_directory_record_name, + builder.longOut, + ) + self._create_directory_record.add_alias( + record_prefix + ":" + create_directory_record_name.upper() + ) + + directory_exists_name = EpicsName(self._DATA_PREFIX + ":DirectoryExists") + self._directory_exists_record = builder.boolIn( + directory_exists_name, + ZNAM="No", + ONAM="Yes", + initial_value=0, + DESC="Directory exists", + ) + add_automatic_pvi_info( + PviGroup.HDF, + self._directory_exists_record, + directory_exists_name, + builder.boolIn, + ) + self._directory_exists_record.add_alias( + record_prefix + ":" + directory_exists_name.upper() + ) + file_name_record_name = EpicsName(self._DATA_PREFIX + ":HDF_FILE_NAME") self._file_name_record = builder.longStringOut( file_name_record_name, @@ -523,6 +561,70 @@ def _parameter_validate(self, record: RecordWrapper, new_val) -> bool: return False return True + async def _update_directory_path(self, new_val) -> None: + """Handles writes to the directory path PV, creating + directories based on the setting of the CreateDirectory record""" + new_path = Path(new_val).absolute() + create_dir_depth = self._create_directory_record.get() + max_dirs_to_create = 0 + if create_dir_depth < 0: + max_dirs_to_create = abs(create_dir_depth) + elif create_dir_depth > len(new_path.parents): + max_dirs_to_create = 0 + elif create_dir_depth > 0: + max_dirs_to_create = len(new_path.parents) - create_dir_depth + + logging.debug(f"Permitted to create up to {max_dirs_to_create} dirs.") + dirs_to_create = 0 + for p in reversed(new_path.parents): + if not p.exists(): + if dirs_to_create == 0: + # First directory level that does not exist, log it. + logging.error(f"All dir from {str(p)} and below do not exist!") + dirs_to_create += 1 + else: + logging.info(f"{str(p)} exists") + + # Account for target path itself not existing + if not os.path.exists(new_path): + dirs_to_create += 1 + + logging.debug(f"Need to create {dirs_to_create} directories.") + + # Case where all dirs exist + if dirs_to_create == 0: + if os.access(new_path, os.W_OK): + status_msg = "Dir exists and is writable" + self._directory_exists_record.set(1) + else: + status_msg = "Dirs exist but aren't writable." + self._directory_exists_record.set(0) + # Case where we will create directories + elif dirs_to_create <= max_dirs_to_create: + logging.debug(f"Attempting to create {dirs_to_create} dir(s)...") + try: + os.makedirs(new_path, exist_ok=True) + status_msg = f"Created {dirs_to_create} dirs." + self._directory_exists_record.set(1) + except PermissionError: + status_msg = "Permission error creating dirs!" + self._directory_exists_record.set(0) + # Case where too many directories need to be created + else: + status_msg = f"Need to create {dirs_to_create} > {max_dirs_to_create} dirs." + self._directory_exists_record.set(0) + + if self._directory_exists_record.get() == 0: + sevr = alarm.MAJOR_ALARM, alrm = alarm.STATE_ALARM + logging.error(status_msg) + else: + sevr = alarm.NO_ALARM, alrm = alarm.NO_ALARM + logging.debug(status_msg) + + self._status_message_record.set(status_msg, severity=sevr, alarm=alrm) + + await self._update_full_file_path(new_val) + async def _update_full_file_path(self, new_val) -> None: self._full_file_path_record.set(self._get_filepath()) @@ -532,6 +634,12 @@ async def _handle_hdf5_data(self) -> None: This method expects to be run as an asyncio Task.""" try: # Set up the hdf buffer + + if not self._directory_exists_record.get() == 1: + raise RuntimeError( + "Configured HDF directory does not exist or is not writable!" + ) + num_capture: int = self._num_capture_record.get() capture_mode: CaptureMode = CaptureMode(self._capture_mode_record.get()) filepath = self._get_filepath() diff --git a/tests/conftest.py b/tests/conftest.py index 44dd9e76..94dfed72 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,4 +1,23 @@ # flake8: noqa +import os + from fixtures.mocked_panda import * from fixtures.panda_data import * + + +# Autouse fixture that will set all EPICS networking env vars to use lo interface +# to avoid false failures caused by things like firewalls blocking EPICS traffic. +@pytest.fixture(scope="session", autouse=True) +def configure_epics_environment(): + os.environ["EPICS_CAS_INTF_ADDR_LIST"] = "127.0.0.1" + os.environ["EPICS_CAS_BEACON_ADDR_LIST"] = "127.0.0.1" + os.environ["EPICS_CA_ADDR_LIST"] = "127.0.0.1" + os.environ["EPICS_CAS_AUTO_ADDR_LIST"] = "NO" + os.environ["EPICS_CA_AUTO_BEACON_ADDR_LIST"] = "NO" + + os.environ["EPICS_PVAS_INTF_ADDR_LIST"] = "127.0.0.1" + os.environ["EPICS_PVAS_BEACON_ADDR_LIST"] = "127.0.0.1" + os.environ["EPICS_PVA_ADDR_LIST"] = "127.0.0.1" + os.environ["EPICS_PVAS_AUTO_BEACON_ADDR_LIST"] = "NO" + os.environ["EPICS_PVA_AUTO_ADDR_LIST"] = "NO" diff --git a/tests/test-bobfiles/DATA.bob b/tests/test-bobfiles/DATA.bob index 4a8a8fd4..e4ed651b 100644 --- a/tests/test-bobfiles/DATA.bob +++ b/tests/test-bobfiles/DATA.bob @@ -3,7 +3,7 @@ 0 0 506 - 413 + 463 4 4 @@ -30,7 +30,7 @@ 5 30 496 - 106 + 156 true Label @@ -52,7 +52,7 @@ Label - Hdf File Name + Createdirectory 0 25 250 @@ -60,19 +60,57 @@ TextEntry - TEST_PREFIX:DATA:HDF_FILE_NAME + TEST_PREFIX:DATA:CreateDirectory 255 25 205 20 1 + + + Label + Directoryexists + 0 + 50 + 250 + 20 + + + TextUpdate + TEST_PREFIX:DATA:DirectoryExists + 255 + 50 + 205 + 20 + + + + + 1 + + + Label + Hdf File Name + 0 + 75 + 250 + 20 + + + TextEntry + TEST_PREFIX:DATA:HDF_FILE_NAME + 255 + 75 + 205 + 20 + 1 6 Label Hdf Full File Path 0 - 50 + 100 250 20 @@ -80,7 +118,7 @@ TextUpdate TEST_PREFIX:DATA:HDF_FULL_FILE_PATH 255 - 50 + 100 205 20 @@ -94,7 +132,7 @@ CAPTURE 5 - 141 + 191 496 206 true @@ -268,7 +306,7 @@ OUTPUTS 5 - 352 + 402 496 56 true diff --git a/tests/test_hdf_ioc.py b/tests/test_hdf_ioc.py index 649e7fb3..d9d16427 100644 --- a/tests/test_hdf_ioc.py +++ b/tests/test_hdf_ioc.py @@ -2,6 +2,7 @@ import asyncio import logging +import os from asyncio import CancelledError from collections import deque from multiprocessing.connection import Connection @@ -228,6 +229,10 @@ async def hdf5_controller( test_prefix, hdf5_test_prefix = new_random_hdf5_prefix hdf5_controller = HDF5RecordController(AsyncioClient("localhost"), test_prefix) + + # When using tests w/o CA, need to manually set _directory_exists to 1 + hdf5_controller._directory_exists_record.set(1) + yield hdf5_controller # Give time for asyncio to fully close its connections await asyncio.sleep(0) @@ -345,6 +350,12 @@ async def test_hdf5_ioc(hdf5_subprocess_ioc): val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) assert val == "OK" + val = await caget(hdf5_test_prefix + ":CreateDirectory") + assert val == 0 + + val = await caget(hdf5_test_prefix + ":DirectoryExists") + assert val == 0 + async def test_hdf5_ioc_parameter_validate_works( hdf5_subprocess_ioc_no_logging_check, tmp_path @@ -384,6 +395,99 @@ async def test_hdf5_ioc_parameter_validate_works( assert val == str(tmp_path) + "/name.h5" # put should have been stopped +@pytest.mark.parametrize( + "create_depth, path, expect_exists, restrict_permissions", + [ + (0, ".", True, False), + (0, "panda_test1", False, False), + (-2, "panda_test2", True, False), + (-1, "panda_test3/depth_2", False, False), + (1, "panda_test4/depth_2", True, False), + (0, ".", False, True), + (1, "panda_test5", False, True), + (-1, "panda_test6", False, True), + (10, "panda_test7", False, False), + ], +) +async def test_hdf5_dir_creation( + hdf5_subprocess_ioc, + tmp_path: Path, + create_depth: int, + path: str, + expect_exists: bool, + restrict_permissions: bool, +): + """Test to see if directory creation/exists works as expected""" + + if restrict_permissions: + # Artificially restrict perms for temp folder to simulate perm issues. + tmp_path.chmod(0o444) + + _, hdf5_test_prefix = hdf5_subprocess_ioc + + target_path = str(tmp_path / path) + + await caput( + hdf5_test_prefix + ":CreateDirectory", + create_depth, + wait=True, + ) + await caput( + hdf5_test_prefix + ":HDFDirectory", + target_path, + datatype=DBR_CHAR_STR, + wait=True, + ) + exists = await caget(hdf5_test_prefix + ":DirectoryExists") + + assert (exists > 0) == expect_exists + if expect_exists: + assert os.path.exists(target_path) + assert os.access(target_path, os.W_OK) + + if restrict_permissions: + # Put back default permissions + tmp_path.chmod(0o700) + + +async def test_hdf5_file_writing_no_dir(hdf5_subprocess_ioc, tmp_path: Path, caplog): + """Test that if dir doesn't exist, HDF file writing fails with a runtime error""" + test_prefix, hdf5_test_prefix = hdf5_subprocess_ioc + + test_dir = tmp_path + test_filename = "test.h5" + await caput( + hdf5_test_prefix + ":HDFDirectory", + str(test_dir / "panda_test1"), + wait=True, + datatype=DBR_CHAR_STR, + ) + + exists = await caget(hdf5_test_prefix + ":DirectoryExists") + assert exists == 0 + + await caput( + hdf5_test_prefix + ":HDFFileName", "name.h5", wait=True, datatype=DBR_CHAR_STR + ) + await caput( + hdf5_test_prefix + ":HDFFileName", + test_filename, + wait=True, + timeout=TIMEOUT, + datatype=DBR_CHAR_STR, + ) + + val = await caget(hdf5_test_prefix + ":HDFFullFilePath", datatype=DBR_CHAR_STR) + assert val == "/".join([str(tmp_path), "panda_test1", test_filename]) + + await caput(hdf5_test_prefix + ":NumCapture", 1, wait=True, timeout=TIMEOUT) + + await caput(hdf5_test_prefix + ":Capture", 1, wait=True, timeout=TIMEOUT) + + val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) + assert val == "Capture disabled, unexpected exception" + + @pytest.mark.parametrize("num_capture", [1, 1000, 10000]) async def test_hdf5_file_writing_first_n( hdf5_subprocess_ioc, tmp_path: Path, caplog, num_capture @@ -513,7 +617,7 @@ async def test_hdf5_file_writing_last_n_endreason_not_ok( ) assert await caget(hdf5_test_prefix + ":NumCapture") == num_capture - # Initially Status should be "OK" + # Initially Status should be "Dir exists and is writable" val = await caget(hdf5_test_prefix + ":Status", datatype=DBR_CHAR_STR) assert val == "OK"