Skip to content

Commit

Permalink
Units signal metadata (#367)
Browse files Browse the repository at this point in the history
* Use information from signal backend to populate DataKey

* Add limits for all signals

* Revert adding limits to test records, refactor tests

* Refactor tests for DBR_DOUBLE backing

* Simplify metadata handling

* enum_choices instantiate once

* Descriptor -> DataKey

* Reduce metadata to just precision and units

* Restore lost commits

* Add explicit bool type

* Isolate boolean test behaviour

* Replace Descriptor return types with DataKay

* Express dict returns with literals

* Undo descriptor_rename

* Replace descriptor method with get_datakey

* foo

* Restore source to get_datakey

* Trim changed lines

* lint

* rename descriptor

* Add handling for propagating limits from underlying record (#370)

* unprivate variable

* Add limits handling for PVA and CA

* Tests for limits

* Limits are only ever type float

* Only add limits if set, used TypedDict

* Revert nan, add more tests

* Add limits and warnings to ioc

* Remove missed descriptor

* Re-remove string enum test

* fix mistaken logic change in ca converter

* Ruff format

* Remove unused CaArrayConverter

* Update tests/epics/test_signals.py

Co-authored-by: DiamondJoseph <[email protected]>

* Change PvaBoolConverter back to PvaEmumBoolConverter

* Put the CaArrayConverter back in

* Only overide value in CaArrayConverter

* Update docstrings

* Simplify PVA limits

---------

Co-authored-by: Joseph Ware <[email protected]>
Co-authored-by: DiamondJoseph <[email protected]>
  • Loading branch information
3 people authored Jun 13, 2024
1 parent b59d1e2 commit 20a9b85
Show file tree
Hide file tree
Showing 5 changed files with 408 additions and 103 deletions.
98 changes: 82 additions & 16 deletions src/ophyd_async/epics/_backend/_aioca.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
import sys
from dataclasses import dataclass
from enum import Enum
from typing import Any, Dict, Optional, Type, Union
from math import isnan, nan
from typing import Any, Dict, List, Optional, Type, Union

import numpy as np
from aioca import (
Expand All @@ -29,7 +30,7 @@
)
from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected

from .common import get_supported_values
from .common import LimitPair, Limits, common_meta, get_supported_values

dbr_to_dtype: Dict[Dbr, Dtype] = {
dbr.DBR_STRING: "string",
Expand All @@ -41,6 +42,64 @@
}


def _data_key_from_augmented_value(
value: AugmentedValue,
*,
choices: Optional[List[str]] = None,
dtype: Optional[str] = None,
) -> DataKey:
"""Use the return value of get with FORMAT_CTRL to construct a DataKey
describing the signal. See docstring of AugmentedValue for expected
value fields by DBR type.
Args:
value (AugmentedValue): Description of the the return type of a DB record
choices: Optional list of enum choices to pass as metadata in the datakey
dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
Returns:
DataKey: A rich DataKey describing the DB record
"""
source = f"ca://{value.name}"
assert value.ok, f"Error reading {source}: {value}"

scalar = value.element_count == 1
dtype = dtype or dbr_to_dtype[value.datatype]

d = DataKey(
source=source,
dtype=dtype if scalar else "array",
# strictly value.element_count >= len(value)
shape=[] if scalar else [len(value)],
)
for key in common_meta:
attr = getattr(value, key, nan)
if isinstance(attr, str) or not isnan(attr):
d[key] = attr

if choices is not None:
d["choices"] = choices

if limits := _limits_from_augmented_value(value):
d["limits"] = limits

return d


def _limits_from_augmented_value(value: AugmentedValue) -> Limits:
def get_limits(limit: str) -> LimitPair:
low = getattr(value, f"lower_{limit}_limit", None)
high = getattr(value, f"upper_{limit}_limit", None)
return LimitPair(low=low, high=high)

return Limits(
alarm=get_limits("alarm"),
control=get_limits("ctrl"),
display=get_limits("disp"),
warning=get_limits("warning"),
)


@dataclass
class CaConverter:
read_dbr: Optional[Dbr]
Expand All @@ -62,8 +121,8 @@ def reading(self, value: AugmentedValue):
"alarm_severity": -1 if value.severity > 2 else value.severity,
}

def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
return {"source": source, "dtype": dbr_to_dtype[value.datatype], "shape": []}
def get_datakey(self, value: AugmentedValue) -> DataKey:
return _data_key_from_augmented_value(value)


class CaLongStrConverter(CaConverter):
Expand All @@ -77,15 +136,17 @@ def write_value(self, value: str):


class CaArrayConverter(CaConverter):
def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
return {"source": source, "dtype": "array", "shape": [len(value)]}

def value(self, value: AugmentedValue):
return np.array(value, copy=False)


@dataclass
class CaEnumConverter(CaConverter):
"""To prevent issues when a signal is restarted and returns with different enum
values or orders, we put treat an Enum signal as a string, and cache the
choices on this class.
"""

choices: dict[str, str]

def write_value(self, value: Union[Enum, str]):
Expand All @@ -97,13 +158,18 @@ def write_value(self, value: Union[Enum, str]):
def value(self, value: AugmentedValue):
return self.choices[value]

def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
return {
"source": source,
"dtype": "string",
"shape": [],
"choices": list(self.choices),
}
def get_datakey(self, value: AugmentedValue) -> DataKey:
# Sometimes DBR_TYPE returns as String, must pass choices still
return _data_key_from_augmented_value(value, choices=list(self.choices.keys()))


@dataclass
class CaBoolConverter(CaConverter):
def value(self, value: AugmentedValue) -> bool:
return bool(value)

def get_datakey(self, value: AugmentedValue) -> DataKey:
return _data_key_from_augmented_value(value, dtype="bool")


class DisconnectedCaConverter(CaConverter):
Expand Down Expand Up @@ -145,7 +211,7 @@ def make_converter(
)
if pv_choices_len != 2:
raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
return CaConverter(dbr.DBR_SHORT, dbr.DBR_SHORT)
return CaBoolConverter(dbr.DBR_SHORT, dbr.DBR_SHORT)
elif pv_dbr == dbr.DBR_ENUM:
# This is an Enum
pv_choices = get_unique(
Expand Down Expand Up @@ -233,7 +299,7 @@ async def _caget(self, format: Format) -> AugmentedValue:

async def get_datakey(self, source: str) -> DataKey:
value = await self._caget(FORMAT_CTRL)
return self.converter.get_datakey(source, value)
return self.converter.get_datakey(value)

async def get_reading(self) -> Reading:
value = await self._caget(FORMAT_TIME)
Expand Down
97 changes: 81 additions & 16 deletions src/ophyd_async/epics/_backend/_p4p.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import time
from dataclasses import dataclass
from enum import Enum
from math import isnan, nan
from typing import Any, Dict, List, Optional, Sequence, Type, Union

from bluesky.protocols import DataKey, Dtype, Reading
Expand All @@ -20,7 +21,7 @@
)
from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected

from .common import get_supported_values
from .common import LimitPair, Limits, common_meta, get_supported_values

# https://mdavidsaver.github.io/p4p/values.html
specifier_to_dtype: Dict[str, Dtype] = {
Expand All @@ -39,6 +40,67 @@
}


def _data_key_from_value(
source: str,
value: Value,
*,
shape: Optional[list[int]] = None,
choices: Optional[list[str]] = None,
dtype: Optional[str] = None,
) -> DataKey:
"""
Args:
value (Value): Description of the the return type of a DB record
shape: Optional override shape when len(shape) > 1
choices: Optional list of enum choices to pass as metadata in the datakey
dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
Returns:
DataKey: A rich DataKey describing the DB record
"""
shape = shape or []
dtype = dtype or specifier_to_dtype[value.type().aspy("value")]
display_data = getattr(value, "display", None)

d = DataKey(
source=source,
dtype=dtype,
shape=shape,
)
if display_data is not None:
for key in common_meta:
attr = getattr(display_data, key, nan)
if isinstance(attr, str) or not isnan(attr):
d[key] = attr

if choices is not None:
d["choices"] = choices

if limits := _limits_from_value(value):
d["limits"] = limits

return d


def _limits_from_value(value: Value) -> Limits:
def get_limits(
substucture_name: str, low_name: str = "limitLow", high_name: str = "limitHigh"
) -> LimitPair:
substructure = getattr(value, substucture_name, None)
low = getattr(substructure, low_name, nan)
high = getattr(substructure, high_name, nan)
return LimitPair(
low=None if isnan(low) else low, high=None if isnan(high) else high
)

return Limits(
alarm=get_limits("valueAlarm", "lowAlarmLimit", "highAlarmLimit"),
control=get_limits("control"),
display=get_limits("display"),
warning=get_limits("valueAlarm", "lowWarningLimit", "highWarningLimit"),
)


class PvaConverter:
def write_value(self, value):
return value
Expand All @@ -56,8 +118,7 @@ def reading(self, value):
}

def get_datakey(self, source: str, value) -> DataKey:
dtype = specifier_to_dtype[value.type().aspy("value")]
return {"source": source, "dtype": dtype, "shape": []}
return _data_key_from_value(source, value)

def metadata_fields(self) -> List[str]:
"""
Expand All @@ -74,7 +135,9 @@ def value_fields(self) -> List[str]:

class PvaArrayConverter(PvaConverter):
def get_datakey(self, source: str, value) -> DataKey:
return {"source": source, "dtype": "array", "shape": [len(value["value"])]}
return _data_key_from_value(
source, value, dtype="array", shape=[len(value["value"])]
)


class PvaNDArrayConverter(PvaConverter):
Expand All @@ -98,7 +161,7 @@ def value(self, value):

def get_datakey(self, source: str, value) -> DataKey:
dims = self._get_dimensions(value)
return {"source": source, "dtype": "array", "shape": dims}
return _data_key_from_value(source, value, dtype="array", shape=dims)

def write_value(self, value):
# No clear use-case for writing directly to an NDArray, and some
Expand All @@ -109,6 +172,11 @@ def write_value(self, value):

@dataclass
class PvaEnumConverter(PvaConverter):
"""To prevent issues when a signal is restarted and returns with different enum
values or orders, we put treat an Enum signal as a string, and cache the
choices on this class.
"""

def __init__(self, choices: dict[str, str]):
self.choices = tuple(choices.values())

Expand All @@ -122,20 +190,17 @@ def value(self, value):
return self.choices[value["value"]["index"]]

def get_datakey(self, source: str, value) -> DataKey:
return {
"source": source,
"dtype": "string",
"shape": [],
"choices": list(self.choices),
}
return _data_key_from_value(
source, value, choices=list(self.choices), dtype="string"
)


class PvaEnumBoolConverter(PvaConverter):
class PvaEmumBoolConverter(PvaConverter):
def value(self, value):
return value["value"]["index"]
return bool(value["value"]["index"])

def get_datakey(self, source: str, value) -> DataKey:
return {"source": source, "dtype": "integer", "shape": []}
return _data_key_from_value(source, value, dtype="bool")


class PvaTableConverter(PvaConverter):
Expand All @@ -144,7 +209,7 @@ def value(self, value):

def get_datakey(self, source: str, value) -> DataKey:
# This is wrong, but defer until we know how to actually describe a table
return {"source": source, "dtype": "object", "shape": []} # type: ignore
return _data_key_from_value(source, value, dtype="object")


class PvaDictConverter(PvaConverter):
Expand Down Expand Up @@ -213,7 +278,7 @@ def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConve
)
if pv_choices_len != 2:
raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
return PvaEnumBoolConverter()
return PvaEmumBoolConverter()
elif "NTEnum" in typeid:
# This is an Enum
pv_choices = get_unique(
Expand Down
25 changes: 24 additions & 1 deletion src/ophyd_async/epics/_backend/common.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,28 @@
from enum import Enum
from typing import Dict, Optional, Tuple, Type
from typing import Dict, Optional, Tuple, Type, TypedDict

common_meta = {
"units",
"precision",
}


class LimitPair(TypedDict):
high: float | None
low: float | None

def __bool__(self) -> bool:
return self.low is None and self.high is None


class Limits(TypedDict):
alarm: LimitPair
control: LimitPair
display: LimitPair
warning: LimitPair

def __bool__(self) -> bool:
return any(self.alarm, self.control, self.display, self.warning)


def get_supported_values(
Expand Down
Loading

0 comments on commit 20a9b85

Please sign in to comment.