From 534bc6191c9a4b8b97434ada58f1149a4cf34e97 Mon Sep 17 00:00:00 2001 From: David Taylor Date: Tue, 4 Jun 2024 15:32:34 +1000 Subject: [PATCH 1/7] DB init script now creates version table so DB v1 backups restore cleanly. --- db/init.d/init_db.sql | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/db/init.d/init_db.sql b/db/init.d/init_db.sql index a37652c9..304aa48c 100755 --- a/db/init.d/init_db.sql +++ b/db/init.d/init_db.sql @@ -90,7 +90,7 @@ create table if not exists physical_logical_map ( primary key(physical_uid, logical_uid, start_time) ); -create table if not exists users( +create table if not exists users ( uid integer generated always as identity primary key, username text not null unique, salt text not null, @@ -100,11 +100,11 @@ create table if not exists users( read_only boolean default True not null ); +create table if not exists version ( + version integer not null +); + create index if not exists pd_src_id_idx on physical_devices using GIN (source_ids); insert into sources values ('ttn'), ('greenbrain'), ('wombat'), ('ydoc'), ('ict_eagleio'); - --- Enable the PostGIS extensions --- CREATE EXTENSION postgis; --- CREATE EXTENSION postgis_raster; --- CREATE EXTENSION postgis_sfcgal; +insert into version values (1); From 9450549f6f7a40eede8779ba4a310f3ee3add2dc Mon Sep 17 00:00:00 2001 From: David Taylor Date: Tue, 28 May 2024 15:57:47 +1000 Subject: [PATCH 2/7] Added the ability to include logical device properties in the ld ls command via the --properties flag. DAO & RestAPI updated to handle PostGIS geometry location column. Removed backoff annotations from DAO, they have not been useful. Changed errors raised by the DAO to be ValueErrors when parameter validation fails. --- compose/docker-compose.yml | 6 +- src/python/api/client/DAO.py | 254 ++++++++++++++-------------------- src/python/broker-cli.py | 6 +- src/python/pdmodels/Models.py | 23 +-- src/python/restapi/RestAPI.py | 2 + test/python/test_dao.py | 49 ++++--- test/python/test_restapi.py | 42 +++--- 7 files changed, 172 insertions(+), 210 deletions(-) diff --git a/compose/docker-compose.yml b/compose/docker-compose.yml index 1ebaccd5..0bdc276a 100644 --- a/compose/docker-compose.yml +++ b/compose/docker-compose.yml @@ -2,8 +2,8 @@ version: '3.1' services: db: - image: postgres:14.2 - restart: "no" + image: postgis/postgis:14-3.4 + restart: always env_file: - .env volumes: @@ -18,7 +18,7 @@ services: mq: hostname: "mq" image: rabbitmq:3.9-management - restart: "no" + restart: always env_file: - .env volumes: diff --git a/src/python/api/client/DAO.py b/src/python/api/client/DAO.py index f0bb19b3..5649725d 100644 --- a/src/python/api/client/DAO.py +++ b/src/python/api/client/DAO.py @@ -1,38 +1,37 @@ from contextlib import contextmanager from datetime import datetime, timezone -from email.utils import parseaddr -import logging, re, warnings -from turtle import back -from unittest import result -import backoff +import logging, warnings import dateutil.parser import psycopg2 from psycopg2 import pool import psycopg2.errors -from psycopg2.extensions import register_adapter, AsIs +from psycopg2.extensions import AsIs from psycopg2.extras import Json, register_uuid from typing import Any, Dict, List, Optional, Union import hashlib -import base64 import os import BrokerConstants -from pdmodels.Models import DeviceNote, Location, LogicalDevice, PhysicalDevice, PhysicalToLogicalMapping, User +from pdmodels.Models import BaseDevice, DeviceNote, LogicalDevice, PhysicalDevice, PhysicalToLogicalMapping, User logging.captureWarnings(True) + class DAOException(Exception): def __init__(self, msg: str = None, wrapped: Exception = None): self.msg: str = msg self.wrapped: Exception = wrapped + # This is raised in update methods when the entity to be updated does not exist. class DAODeviceNotFound(DAOException): pass + class DAOUserNotFound(DAOException): pass + # This is raised if Postgres raises a unique constraint exception. It is useful # for the REST API to know this was the problem rather than some general database # problem, and allows calling code to not have to examine the Postgres exception @@ -41,29 +40,15 @@ class DAOUniqeConstraintException(DAOException): pass -def adapt_location(location: Location): - """ - Transform a Location instance into a value usable in an SQL statement. - """ - return AsIs(f"('{location.lat},{location.long}')") - - -def cast_point(value, cur): - """ - Transform a value from an SQL result into a Location instance. - """ - if value is None: - return None - - if isinstance(value, str): - m = re.fullmatch(r'\(([+-]?\d+\.?\d*),([+-]?\d+\.?\d*)\)', value) - if m is not None: - return Location(lat=float(m.group(1)),long=m.group(2)) - - raise psycopg2.InterfaceError(f'Bad point representation: {value}') - - conn_pool = None +_geometry_type_oid = None +_physical_device_select_all_cols = """ +select uid, source_name, name, (select row_to_json(_) from (select ST_Y(location) as lat, ST_X(location) as long) as _) as location, last_seen, source_ids, properties from physical_devices +""" + +_logical_device_select_all_cols = """ +select uid, name, (select row_to_json(_) from (select ST_Y(location) as lat, ST_X(location) as long) as _) as location, last_seen, properties from logical_devices +""" def stop() -> None: @@ -94,7 +79,8 @@ def _get_connection(): if conn_pool is None: logging.info('Creating connection pool, registering type converters.') conn_pool = pool.ThreadedConnectionPool(1, 5) - _register_type_adapters() + # register_type_adapters() + register_uuid() conn = conn_pool.getconn() logging.debug(f'Taking conn {conn}') @@ -119,25 +105,6 @@ def free_conn(conn) -> None: conn_pool.putconn(conn) -def _register_type_adapters(): - """ - Register adapter functions to handle Location <-> SQL Point data types. - """ - # Apapter to convert from a Location instance to a quoted SQL string. - register_adapter(Location, adapt_location) - - # Adapter to convert from an SQL result to a Location instance. - with _get_connection() as conn, conn.cursor() as cursor: - conn.autocommit = True - cursor.execute("SELECT NULL::point") - point_oid = cursor.description[0][1] - POINT = psycopg2.extensions.new_type((point_oid,), "POINT", cast_point) - psycopg2.extensions.register_type(POINT) - - free_conn(conn) - register_uuid() - - def _dict_from_row(result_metadata, row) -> Dict[str, Any]: obj = {} for i, col_def in enumerate(result_metadata): @@ -146,14 +113,28 @@ def _dict_from_row(result_metadata, row) -> Dict[str, Any]: return obj +def _device_to_query_params(device: BaseDevice) -> dict: + dev_fields = {} + for k, v in vars(device).items(): + match k: + case 'properties' | 'source_ids': + dev_fields[k] = Json(v) + case 'location': + if device.location is None or device.location.lat is None or device.location.long is None: + dev_fields[k] = None + else: + dev_fields[k] = AsIs(f"ST_GeomFromText('POINT({device.location.long} {device.location.lat})')") + case _: + dev_fields[k] = v + + return dev_fields + + """ Physical device source CRUD methods - -create table if not exists sources ( - source_name text primary key not null -); """ -@backoff.on_exception(backoff.expo, DAOException, max_time=30) + + def get_all_physical_sources() -> List[PhysicalDevice]: conn = None try: @@ -175,21 +156,16 @@ def get_all_physical_sources() -> List[PhysicalDevice]: """ Physical device CRUD methods """ -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def create_physical_device(device: PhysicalDevice) -> PhysicalDevice: conn = None try: - dev_fields = {} - for k, v in vars(device).items(): - dev_fields[k] = v if k not in ('source_ids', 'properties') else Json(v) - + dev_fields = _device_to_query_params(device) with _get_connection() as conn, conn.cursor() as cursor: #logging.info(cursor.mogrify("insert into physical_devices (source_name, name, location, last_seen, source_ids, properties) values (%(source_name)s, %(name)s, %(location)s, %(last_seen)s, %(source_ids)s, %(properties)s) returning uid", dev_fields)) cursor.execute("insert into physical_devices (source_name, name, location, last_seen, source_ids, properties) values (%(source_name)s, %(name)s, %(location)s, %(last_seen)s, %(source_ids)s, %(properties)s) returning uid", dev_fields) uid = cursor.fetchone()[0] dev = _get_physical_device(conn, uid) - - return dev + return dev except Exception as err: raise err if isinstance(err, DAOException) else DAOException('create_physical_device failed.', err) finally: @@ -211,7 +187,7 @@ def _get_physical_device(conn, uid: int) -> PhysicalDevice: """ dev = None with conn.cursor() as cursor: - sql = 'select uid, source_name, name, location, last_seen, source_ids, properties from physical_devices where uid = %s' + sql = f'{_physical_device_select_all_cols} where uid = %s' cursor.execute(sql, (uid, )) row = cursor.fetchone() if row is not None: @@ -221,15 +197,12 @@ def _get_physical_device(conn, uid: int) -> PhysicalDevice: return dev -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_physical_device(uid: int) -> PhysicalDevice: conn = None try: - dev = None with _get_connection() as conn: dev = _get_physical_device(conn, uid) - - return dev + return dev except Exception as err: raise err if isinstance(err, DAOException) else DAOException('get_physical_device failed.', err) finally: @@ -237,13 +210,12 @@ def get_physical_device(uid: int) -> PhysicalDevice: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_pyhsical_devices_using_source_ids(source_name: str, source_ids: Dict[str, str]) -> List[PhysicalDevice]: conn = None try: devs = [] with _get_connection() as conn, conn.cursor() as cursor: - sql = 'select uid, source_name, name, location, last_seen, source_ids, properties from physical_devices where source_name = %s and source_ids @> %s order by uid asc' + sql = f'{_physical_device_select_all_cols} where source_name = %s and source_ids @> %s order by uid asc' args = (source_name, Json(source_ids)) #logging.info(cursor.mogrify(sql, args)) cursor.execute(sql, args) @@ -259,13 +231,12 @@ def get_pyhsical_devices_using_source_ids(source_name: str, source_ids: Dict[str free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_all_physical_devices() -> List[PhysicalDevice]: conn = None try: devs = [] with _get_connection() as conn, conn.cursor() as cursor: - sql = 'select uid, source_name, name, location, last_seen, source_ids, properties from physical_devices order by uid asc' + sql = f'{_physical_device_select_all_cols} order by uid asc' cursor.execute(sql) cursor.arraysize = 200 rows = cursor.fetchmany() @@ -284,13 +255,12 @@ def get_all_physical_devices() -> List[PhysicalDevice]: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_physical_devices_from_source(source_name: str) -> List[PhysicalDevice]: conn = None try: devs = [] with _get_connection() as conn, conn.cursor() as cursor: - sql = 'select uid, source_name, name, location, last_seen, source_ids, properties from physical_devices where source_name = %s order by uid asc' + sql = f'{_physical_device_select_all_cols} where source_name = %s order by uid asc' cursor.execute(sql, (source_name, )) cursor.arraysize = 200 rows = cursor.fetchmany() @@ -309,13 +279,15 @@ def get_physical_devices_from_source(source_name: str) -> List[PhysicalDevice]: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) -def get_physical_devices(query_args = {}) -> List[PhysicalDevice]: +def get_physical_devices(query_args=None) -> List[PhysicalDevice]: + if query_args is None: + query_args = {} + conn = None try: devs = [] with _get_connection() as conn, conn.cursor() as cursor: - sql = 'select uid, source_name, name, location, last_seen, source_ids, properties from physical_devices' + sql = _physical_device_select_all_cols args = {} add_where = True @@ -368,7 +340,6 @@ def get_physical_devices(query_args = {}) -> List[PhysicalDevice]: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def update_physical_device(device: PhysicalDevice) -> PhysicalDevice: conn = None try: @@ -415,7 +386,6 @@ def update_physical_device(device: PhysicalDevice) -> PhysicalDevice: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def delete_physical_device(uid: int) -> PhysicalDevice: conn = None try: @@ -433,7 +403,6 @@ def delete_physical_device(uid: int) -> PhysicalDevice: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def create_physical_device_note(uid: int, note: str) -> None: conn = None try: @@ -448,7 +417,6 @@ def create_physical_device_note(uid: int, note: str) -> None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_physical_device_notes(uid: int) -> List[DeviceNote]: conn = None try: @@ -466,7 +434,6 @@ def get_physical_device_notes(uid: int) -> List[DeviceNote]: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def update_physical_device_note(note: DeviceNote) -> None: conn = None try: @@ -480,7 +447,6 @@ def update_physical_device_note(note: DeviceNote) -> None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def delete_physical_device_note(uid: int) -> None: conn = None try: @@ -499,21 +465,15 @@ def delete_physical_device_note(uid: int) -> None: Logical Device CRUD methods """ -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def create_logical_device(device: LogicalDevice) -> LogicalDevice: conn = None try: - dev = None - dev_fields = {} - for k, v in vars(device).items(): - dev_fields[k] = v if k != 'properties' else Json(v) - + dev_fields = _device_to_query_params(device) with _get_connection() as conn, conn.cursor() as cursor: cursor.execute("insert into logical_devices (name, location, last_seen, properties) values (%(name)s, %(location)s, %(last_seen)s, %(properties)s) returning uid", dev_fields) uid = cursor.fetchone()[0] dev = _get_logical_device(conn, uid) - - return dev + return dev except Exception as err: raise err if isinstance(err, DAOException) else DAOException('create_logical_device failed.', err) finally: @@ -535,7 +495,15 @@ def _get_logical_device(conn, uid: int) -> LogicalDevice: """ dev = None with conn.cursor() as cursor: - sql = 'select uid, name, location, last_seen, properties from logical_devices where uid = %s' + """ + See: https://dba.stackexchange.com/questions/27732/set-names-to-attributes-when-creating-json-with-row-to-json + + for an explanation of how to name the fields in a row_to_json call. + + Example query tested in psql: + select uid, name, (select row_to_json(_) from (select ST_Y(location) as lat, ST_X(location) as long) as _) as location from logical_devices; + """ + sql = f'{_logical_device_select_all_cols} where uid = %s' cursor.execute(sql, (uid, )) row = cursor.fetchone() if row is not None: @@ -545,7 +513,6 @@ def _get_logical_device(conn, uid: int) -> LogicalDevice: return dev -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_logical_device(uid: int) -> LogicalDevice: conn = None try: @@ -559,15 +526,12 @@ def get_logical_device(uid: int) -> LogicalDevice: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_logical_devices(query_args = {}) -> List[LogicalDevice]: conn = None try: devs = [] with _get_connection() as conn, conn.cursor() as cursor: - #conn.autocommit = True - - sql = 'select uid, name, location, last_seen, properties from logical_devices' + sql = _logical_device_select_all_cols args = {} add_where = True @@ -592,12 +556,11 @@ def get_logical_devices(query_args = {}) -> List[LogicalDevice]: sql = sql + ' order by uid asc' #logging.info(cursor.mogrify(sql, args)) - + logging.info(cursor.mogrify(sql, args)) cursor.execute(sql, args) cursor.arraysize = 200 rows = cursor.fetchmany() while len(rows) > 0: - #logging.info(f'processing {len(rows)} rows.') for r in rows: d = LogicalDevice.parse_obj(_dict_from_row(cursor.description, r)) devs.append(d) @@ -612,7 +575,6 @@ def get_logical_devices(query_args = {}) -> List[LogicalDevice]: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def update_logical_device(device: LogicalDevice) -> LogicalDevice: conn = None try: @@ -652,7 +614,6 @@ def update_logical_device(device: LogicalDevice) -> LogicalDevice: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def delete_logical_device(uid: int) -> LogicalDevice: conn = None try: @@ -681,7 +642,6 @@ def delete_logical_device(uid: int) -> LogicalDevice: ); """ -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def insert_mapping(mapping: PhysicalToLogicalMapping) -> None: """ Insert a device mapping. @@ -691,7 +651,7 @@ def insert_mapping(mapping: PhysicalToLogicalMapping) -> None: with _get_connection() as conn, conn.cursor() as cursor: current_mapping = _get_current_device_mapping(conn, pd=mapping.pd.uid) if current_mapping is not None: - raise DAOUniqeConstraintException(f'insert_mapping failed: physical device {current_mapping.pd.uid} / "{current_mapping.pd.name}" is already mapped to logical device {current_mapping.ld.uid} / "{current_mapping.ld.name}"') + raise ValueError(f'insert_mapping failed: physical device {current_mapping.pd.uid} / "{current_mapping.pd.name}" is already mapped to logical device {current_mapping.ld.uid} / "{current_mapping.ld.name}"') current_mapping = _get_current_device_mapping(conn, ld=mapping.ld.uid) if current_mapping is not None: @@ -703,26 +663,33 @@ def insert_mapping(mapping: PhysicalToLogicalMapping) -> None: except psycopg2.errors.UniqueViolation as err: raise DAOUniqeConstraintException(f'Mapping already exists: {mapping.pd.uid} {mapping.pd.name} -> {mapping.ld.uid} {mapping.ld.name}, starting at {mapping.start_time}.', err) except Exception as err: - raise err if isinstance(err, DAOException) else DAOException('insert_mapping failed.', err) + match err: + case DAOException() | ValueError(): + raise err + case _: + raise DAOException('insert_mapping failed.', err) finally: if conn is not None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def end_mapping(pd: Optional[Union[PhysicalDevice, int]] = None, ld: Optional[Union[LogicalDevice, int]] = None) -> None: conn = None try: with _get_connection() as conn: _end_mapping(conn, pd, ld) except Exception as err: - raise err if isinstance(err, DAOException) else DAOException('end_mapping failed.', err) + match err: + case DAOException() | ValueError(): + raise err + case _: + raise DAOException('end_mapping failed.', err) + finally: if conn is not None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def _end_mapping(conn, pd: Optional[Union[PhysicalDevice, int]] = None, ld: Optional[Union[LogicalDevice, int]] = None) -> None: with conn.cursor() as cursor: mapping: PhysicalToLogicalMapping = _get_current_device_mapping(conn, pd, ld) @@ -730,10 +697,10 @@ def _end_mapping(conn, pd: Optional[Union[PhysicalDevice, int]] = None, ld: Opti return if pd is None and ld is None: - raise DAOException('A PhysicalDevice or a LogicalDevice (or an uid for one of them) must be supplied to end a mapping.') + raise ValueError('A PhysicalDevice or a LogicalDevice (or an uid for one of them) must be supplied to end a mapping.') if pd is not None and ld is not None: - raise DAOException('Both pd and ld were provided, only give one when ending a mapping.') + raise ValueError('Both pd and ld were provided, only give one when ending a mapping.') p_uid = None if pd is not None: @@ -752,7 +719,6 @@ def _end_mapping(conn, pd: Optional[Union[PhysicalDevice, int]] = None, ld: Opti logging.warning(f'No mapping was updated during end_mapping for {pd.uid} {pd.name} -> {ld.uid} {ld.name}') -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def delete_mapping(mapping: PhysicalToLogicalMapping) -> None: """ Remove an entry from the physical_logical_map table. Advised to use end_mapping instead. This method should only be used when permanently deleting a physical or logical device from the database. @@ -769,7 +735,6 @@ def delete_mapping(mapping: PhysicalToLogicalMapping) -> None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def toggle_device_mapping(is_active: bool, pd: Optional[Union[PhysicalDevice, int]] = None, ld: Optional[Union[LogicalDevice, int]] = None) -> None: """ Change the is_active column in the database @@ -790,10 +755,10 @@ def toggle_device_mapping(is_active: bool, pd: Optional[Union[PhysicalDevice, in def _toggle_device_mapping(conn, is_active, pd: Optional[Union[PhysicalDevice, int]] = None, ld: Optional[Union[LogicalDevice, int]] = None): if pd is None and ld is None: - raise DAOException('A PhysicalDevice or a LogicalDevice (or an uid for one of them) must be supplied to find a mapping.') + raise ValueError('A PhysicalDevice or a LogicalDevice (or an uid for one of them) must be supplied to find a mapping.') if pd is not None and ld is not None: - raise DAOException('Both pd and ld were provided, only give one.') + raise ValueError('Both pd and ld were provided, only give one.') p_uid = None if pd is not None: @@ -811,17 +776,19 @@ def _toggle_device_mapping(conn, is_active, pd: Optional[Union[PhysicalDevice, i return None -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_current_device_mapping(pd: Optional[Union[PhysicalDevice, int]] = None, ld: Optional[Union[LogicalDevice, int]] = None, only_current_mapping: bool = True) -> Optional[PhysicalToLogicalMapping]: conn = None try: - mapping = None with _get_connection() as conn: mapping = _get_current_device_mapping(conn, pd, ld, only_current_mapping) - - return mapping + return mapping except Exception as err: - raise err if isinstance(err, DAOException) else DAOException('get_current_device_mapping failed.', err) + match err: + case DAOException() | ValueError(): + raise err + case _: + raise DAOException('insert_mapping failed.', err) + finally: if conn is not None: free_conn(conn) @@ -831,10 +798,10 @@ def _get_current_device_mapping(conn, pd: Optional[Union[PhysicalDevice, int]] = mappings = None if pd is None and ld is None: - raise DAOException('A PhysicalDevice or a LogicalDevice (or an uid for one of them) must be supplied to find a mapping.') + raise ValueError('A PhysicalDevice or a LogicalDevice (or an uid for one of them) must be supplied to find a mapping.') if pd is not None and ld is not None: - raise DAOException('Both pd and ld were provided, only give one.') + raise ValueError('Both pd and ld were provided, only give one.') p_uid = None if pd is not None: @@ -868,13 +835,12 @@ def _get_current_device_mapping(conn, pd: Optional[Union[PhysicalDevice, int]] = return None -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_unmapped_physical_devices() -> List[PhysicalDevice]: conn = None try: devs = [] with _get_connection() as conn, conn.cursor() as cursor: - cursor.execute('select * from physical_devices where uid not in (select physical_uid from physical_logical_map where end_time is null) order by uid asc') + cursor.execute(f'{_physical_device_select_all_cols} where uid not in (select physical_uid from physical_logical_map where end_time is null) order by uid asc') for r in cursor: dfr = _dict_from_row(cursor.description, r) devs.append(PhysicalDevice.parse_obj(dfr)) @@ -887,7 +853,6 @@ def get_unmapped_physical_devices() -> List[PhysicalDevice]: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_logical_device_mappings(ld: Union[LogicalDevice, int]) -> List[PhysicalToLogicalMapping]: conn = None try: @@ -909,7 +874,6 @@ def get_logical_device_mappings(ld: Union[LogicalDevice, int]) -> List[PhysicalT free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_physical_device_mappings(pd: Union[PhysicalDevice, int]) -> List[PhysicalToLogicalMapping]: conn = None try: @@ -931,7 +895,6 @@ def get_physical_device_mappings(pd: Union[PhysicalDevice, int]) -> List[Physica if conn is not None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_all_current_mappings(return_uids: bool = True) -> List[PhysicalToLogicalMapping]: conn = None try: @@ -956,7 +919,6 @@ def get_all_current_mappings(return_uids: bool = True) -> List[PhysicalToLogical free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def add_raw_json_message(source_name: str, ts: datetime, correlation_uuid: str, msg, uid: int=None): conn = None try: @@ -1000,10 +962,10 @@ def get_physical_timeseries_message(start: datetime | None = None, end: datetime count = 1 if p_uid is None and l_uid is None: - raise DAOException('p_uid or l_uid must be supplied.') + raise ValueError('p_uid or l_uid must be supplied.') if p_uid is not None and l_uid is not None: - raise DAOException('Both p_uid and l_uid were provided, only give one.') + raise ValueError('Both p_uid and l_uid were provided, only give one.') if p_uid is not None: uid_col_name = 'physical_uid' @@ -1046,7 +1008,6 @@ def get_physical_timeseries_message(start: datetime | None = None, end: datetime free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def add_raw_text_message(source_name: str, ts: datetime, correlation_uuid: str, msg, uid: int=None): conn = None try: @@ -1065,7 +1026,6 @@ def add_raw_text_message(source_name: str, ts: datetime, correlation_uuid: str, """ User and authentication CRUD methods """ -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def user_add(uname: str, passwd: str, disabled: bool) -> None: #Generate salted password @@ -1088,7 +1048,6 @@ def user_add(uname: str, passwd: str, disabled: bool) -> None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def user_rm(uname: str) -> None: conn = None try: @@ -1102,7 +1061,6 @@ def user_rm(uname: str) -> None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def user_set_read_only(uname: str, read_only: bool) -> None: try: with _get_connection() as conn, conn.cursor() as cursor: @@ -1114,11 +1072,10 @@ def user_set_read_only(uname: str, read_only: bool) -> None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def get_user(uid = None, username = None, auth_token = None) -> User: conn = None if uid is None and username is None and auth_token is None: - raise DAOException('get_user requires at least one parameter') + raise ValueError('get_user requires at least one parameter') else: try: user = None @@ -1141,12 +1098,11 @@ def get_user(uid = None, username = None, auth_token = None) -> User: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def user_ls() -> List: try: with _get_connection() as conn, conn.cursor() as cursor: cursor.execute("select username from users order by uid") - results=cursor.fetchall() + results = cursor.fetchall() return [i[0] for i in results] except Exception as err: @@ -1156,15 +1112,13 @@ def user_ls() -> List: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def user_get_token(username, password) -> str | None: conn = None try: with _get_connection() as conn, conn.cursor() as cursor: cursor.execute("select salt, password, auth_token from users where username=%s",(username,)) - result=cursor.fetchone() - + result = cursor.fetchone() if result is None: return None @@ -1184,7 +1138,6 @@ def user_get_token(username, password) -> str | None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def token_is_valid(user_token) -> bool: ''' Check if token is in database and is valid @@ -1192,7 +1145,7 @@ def token_is_valid(user_token) -> bool: try: with _get_connection() as conn, conn.cursor() as cursor: cursor.execute("select uid from users where auth_token=%s and valid='True'", (user_token,)) - result=cursor.fetchone() + result = cursor.fetchone() if result is None: return False @@ -1204,11 +1157,10 @@ def token_is_valid(user_token) -> bool: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) -def token_refresh(uname)-> None: +def token_refresh(uname) -> None: # Auth token to be used on other endpoints. - auth_token=os.urandom(64).hex() + auth_token = os.urandom(64).hex() try: with _get_connection() as conn, conn.cursor() as cursor: @@ -1222,10 +1174,9 @@ def token_refresh(uname)-> None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def user_change_password(username: str, new_passwd: str) -> None: - salt=os.urandom(64).hex() - pass_hash=hashlib.scrypt(password=new_passwd.encode(), salt=salt.encode(), n=2**14, r=8, p=1, maxmem=0, dklen=64).hex() + salt = os.urandom(64).hex() + pass_hash = hashlib.scrypt(password=new_passwd.encode(), salt=salt.encode(), n=2**14, r=8, p=1, maxmem=0, dklen=64).hex() try: with _get_connection() as conn, conn.cursor() as cursor: @@ -1238,17 +1189,16 @@ def user_change_password(username: str, new_passwd: str) -> None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def user_change_password_and_token(new_passwd: str, prev_token: str) -> str: """ Changes user's password and auth token, returns users new auth token upon success """ #Generate salted password - salt=os.urandom(64).hex() - pass_hash=hashlib.scrypt(password=new_passwd.encode(), salt=salt.encode(), n=2**14, r=8, p=1, maxmem=0, dklen=64).hex() + salt = os.urandom(64).hex() + pass_hash = hashlib.scrypt(password=new_passwd.encode(), salt=salt.encode(), n=2**14, r=8, p=1, maxmem=0, dklen=64).hex() #Auth token to be used on other endpoints - auth_token=os.urandom(64).hex() + auth_token = os.urandom(64).hex() try: with _get_connection() as conn, conn.cursor() as cursor: @@ -1266,8 +1216,7 @@ def user_change_password_and_token(new_passwd: str, prev_token: str) -> str: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) -def token_disable(uname)->None: +def token_disable(uname) -> None: try: with _get_connection() as conn, conn.cursor() as cursor: @@ -1280,7 +1229,6 @@ def token_disable(uname)->None: free_conn(conn) -@backoff.on_exception(backoff.expo, DAOException, max_time=30) def token_enable(uname)-> None: try: with _get_connection() as conn, conn.cursor() as cursor: diff --git a/src/python/broker-cli.py b/src/python/broker-cli.py index 9ba85829..5d4c9529 100755 --- a/src/python/broker-cli.py +++ b/src/python/broker-cli.py @@ -70,6 +70,7 @@ def str_to_dict(val) -> Dict: ## List logical devices ld_ls_parser = ld_sub_parsers.add_parser('ls', help='list logical devices') +ld_ls_parser.add_argument('--properties', action='store_true', help='Include the properties field in the output', dest='include_props', required=False) ## Create logical devices ld_mk_parser = ld_sub_parsers.add_parser('create', help='create logical device') @@ -294,7 +295,10 @@ def main() -> None: elif args.cmd1 == 'ld': if args.cmd2 == 'ls': devs = dao.get_logical_devices() - tmp_list = list(map(lambda dev: dev.dict(exclude={'properties'}), devs)) + if args.include_props: + tmp_list = list(map(lambda dev: dev.dict(), devs)) + else: + tmp_list = list(map(lambda dev: dev.dict(exclude={'properties'}), devs)) print(pretty_print_json(tmp_list)) elif args.cmd2 == 'create': dev = LogicalDevice.parse_obj(dict_from_file_or_string()) diff --git a/src/python/pdmodels/Models.py b/src/python/pdmodels/Models.py index 576d1eed..497ca8d2 100644 --- a/src/python/pdmodels/Models.py +++ b/src/python/pdmodels/Models.py @@ -4,8 +4,8 @@ class Location(BaseModel): - lat: float - long: float + lat: Optional[float] + long: Optional[float] @staticmethod def from_ttn_device(ttn_dev: Dict): @@ -22,24 +22,25 @@ def from_ttn_device(ttn_dev: Dict): # Allowing extra attributes in this class to make life easier for the webapp - it can pass extra info # to the templates in the device object rather than passing in lists of mappings etc. -class PhysicalDevice(BaseModel, extra=Extra.allow): +class BaseDevice(BaseModel, extra=Extra.allow): uid: Optional[int] - source_name: str name: str location: Optional[Location] last_seen: Optional[datetime] - source_ids: Dict = {} properties: Dict = {} # Allowing extra attributes in this class to make life easier for the webapp - it can pass extra info # to the templates in the device object rather than passing in lists of mappings etc. -class LogicalDevice(BaseModel, extra=Extra.allow): - uid: Optional[int] - name: str - location: Optional[Location] - last_seen: Optional[datetime] - properties: Dict = {} +class PhysicalDevice(BaseDevice, extra=Extra.allow): + source_name: str + source_ids: Dict = {} + + +# Allowing extra attributes in this class to make life easier for the webapp - it can pass extra info +# to the templates in the device object rather than passing in lists of mappings etc. +class LogicalDevice(BaseDevice, extra=Extra.allow): + pass class PhysicalToLogicalMapping(BaseModel): diff --git a/src/python/restapi/RestAPI.py b/src/python/restapi/RestAPI.py index 23103f35..c056e554 100644 --- a/src/python/restapi/RestAPI.py +++ b/src/python/restapi/RestAPI.py @@ -288,6 +288,8 @@ async def insert_mapping(mapping: PhysicalToLogicalMapping) -> None: raise HTTPException(status_code=400, detail=err.msg) except dao.DAOException as err: raise HTTPException(status_code=500, detail=err.msg) + except ValueError as err: + raise HTTPException(status_code=400, detail=str(err)) @router.get("/mappings/current/", tags=['device mapping'], response_model=List[PhysicalToLogicalMapping], dependencies=[Depends(token_auth_scheme)]) diff --git a/test/python/test_dao.py b/test/python/test_dao.py index 2bae9aab..9dbbbe03 100644 --- a/test/python/test_dao.py +++ b/test/python/test_dao.py @@ -267,12 +267,12 @@ def test_insert_mapping(self): dao.insert_mapping(mapping) # This should fail due to duplicate start time. - self.assertRaises(dao.DAOException, dao.insert_mapping, mapping) + self.assertRaises(ValueError, dao.insert_mapping, mapping) # This should fail due to the physical device is still mapped to something. time.sleep(0.001) mapping.start_time= _now() - self.assertRaises(dao.DAOException, dao.insert_mapping, mapping) + self.assertRaises(ValueError, dao.insert_mapping, mapping) # Unmap the physical device so the next test doesn't fail due to the device being mapped. dao.end_mapping(pd=new_pdev) @@ -306,11 +306,11 @@ def test_get_current_device_mapping(self): self.assertIsNone(dao.get_current_device_mapping(ld=new_ldev)) # Confirm pd or ld must be given. - self.assertRaises(dao.DAOException, dao.get_current_device_mapping) + self.assertRaises(ValueError, dao.get_current_device_mapping) # Confirm only pd or ld can be given. - self.assertRaises(dao.DAOException, dao.get_current_device_mapping, -1, -1) - self.assertRaises(dao.DAOException, dao.get_current_device_mapping, new_pdev, new_ldev) + self.assertRaises(ValueError, dao.get_current_device_mapping, -1, -1) + self.assertRaises(ValueError, dao.get_current_device_mapping, new_pdev, new_ldev) # confirm a physical device can be mapped to a logical device. mapping1 = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=_now()) @@ -385,6 +385,13 @@ def test_end_mapping(self): pdev, new_pdev = self._create_physical_device() ldev, new_ldev = self._create_default_logical_device() + # Confirm pd or ld must be given. + self.assertRaises(ValueError, dao.end_mapping) + + # Confirm only pd or ld can be given. + self.assertRaises(ValueError, dao.end_mapping, -1, -1) + self.assertRaises(ValueError, dao.end_mapping, new_pdev, new_ldev) + mapping1 = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=_now()) dao.insert_mapping(mapping1) @@ -543,11 +550,11 @@ def test_get_unmapped_devices(self): def test_add_raw_json_message(self): uuid1 = uuid.uuid4() - obj1 = {'a':1, 'b':'2', 'c':True, 'd':False} + obj1 = {'a': 1, 'b': '2', 'c': True, 'd': False} dao.add_raw_json_message('ttn', _now(), uuid1, obj1) uuid2 = uuid.uuid4() - obj2 = {'a':1, 'b':'2', 'c':False, 'd':True} + obj2 = {'a': 1, 'b': '2', 'c': False, 'd': True} dao.add_raw_json_message('ttn', _now(), uuid2, obj2, 1) with dao._get_connection() as conn, conn.cursor() as cursor: @@ -667,7 +674,7 @@ def test_get_physical_timeseries_messages(self): self.assertEqual(len(msgs), 1) self.assertEqual(msgs[0], msg_ts[0]) - self.assertRaises(dao.DAOException, dao.get_physical_timeseries_message) + self.assertRaises(ValueError, dao.get_physical_timeseries_message) self.assertRaises(TypeError, dao.get_physical_timeseries_message, p_uid='x') self.assertRaises(TypeError, dao.get_physical_timeseries_message, l_uid='x') self.assertRaises(TypeError, dao.get_physical_timeseries_message, start='x', l_uid=1) @@ -703,13 +710,13 @@ def test_add_raw_text_message(self): dao.add_raw_text_message('ttn', _now(), uuid1, msg1) def test_user_add(self): - uname= _create_test_user() - users=dao.user_ls() + uname = _create_test_user() + users = dao.user_ls() self.assertEqual(uname, users[-1]) def test_user_rm(self): - uname= _create_test_user() + uname = _create_test_user() dao.user_rm(uname) self.assertFalse(uname in dao.user_ls()) @@ -722,39 +729,39 @@ def test_user_set_read_only(self): def test_add_non_unique_user(self): #Check that two users with the same username cannot be created - uname= _create_test_user() + uname = _create_test_user() self.assertRaises(dao.DAOUniqeConstraintException, dao.user_add, uname, 'password', False) def test_get_user_token(self): - uname= _create_test_user() + uname = _create_test_user() self.assertIsNotNone(dao.user_get_token(username=uname, password='password')) self.assertIsNone(dao.user_get_token(username=uname, password='x')) def test_user_token_refresh(self): - uname= _create_test_user() - token1=dao.user_get_token(username=uname, password='password') + uname = _create_test_user() + token1 = dao.user_get_token(username=uname, password='password') dao.token_refresh(uname=uname) - token2=dao.user_get_token(username=uname, password='password') + token2 = dao.user_get_token(username=uname, password='password') self.assertNotEqual(token1, token2) def test_user_token_disable(self): - uname= _create_test_user() - user_token=dao.user_get_token(username=uname, password='password') + uname = _create_test_user() + user_token = dao.user_get_token(username=uname, password='password') dao.token_disable(uname) self.assertFalse(dao.token_is_valid(user_token)) def test_user_token_enable(self): - uname= _create_test_user() - user_token=dao.user_get_token(username=uname, password='password') + uname = _create_test_user() + user_token = dao.user_get_token(username=uname, password='password') dao.token_disable(uname) dao.token_enable(uname) self.assertTrue(dao.token_is_valid(user_token)) def test_user_change_password(self): - uname= _create_test_user() + uname = _create_test_user() dao.user_change_password(uname, 'nuiscyeriygsreiuliu') self.assertIsNotNone(dao.user_get_token(username=uname, password='nuiscyeriygsreiuliu')) diff --git a/test/python/test_restapi.py b/test/python/test_restapi.py index 2ad80647..1b3fba10 100644 --- a/test/python/test_restapi.py +++ b/test/python/test_restapi.py @@ -11,6 +11,7 @@ from typing import Tuple import os +from test_utils import now logging.basicConfig(level=logging.INFO, format='%(asctime)s %(name)s: %(message)s', datefmt='%Y-%m-%dT%H:%M:%S%z') logger = logging.getLogger(__name__) @@ -62,13 +63,10 @@ def setUp(self): self._admin_token = dao.user_get_token(self._admin_username, 'password') self._ADMIN_HEADERS['Authorization'] = f'Bearer {self._admin_token}' - def now(self): - return datetime.datetime.now(tz=datetime.timezone.utc) - def _create_physical_device(self, expected_code=201, req_header=_HEADERS, dev=None) -> Tuple[ PhysicalDevice, PhysicalDevice]: if dev is None: - last_seen = self.now() + last_seen = now() dev = PhysicalDevice(source_name='ttn', name='Test Device', location=Location(lat=3, long=-31), last_seen=last_seen, source_ids={'appId': 'x', 'devId': 'y'}, @@ -292,10 +290,12 @@ def test_get_device_notes(self): LOGICAL DEVICES --------------------------------------------------------------------------""" - def _create_default_logical_device(self, expected_code=201, req_header=_HEADERS, dev=None) -> Tuple[ - LogicalDevice, LogicalDevice]: + def _create_default_logical_device(self, expected_code=201, req_header=None, dev=None) -> Tuple[LogicalDevice, LogicalDevice]: + if req_header is None: + req_header = self._HEADERS + if dev is None: - last_seen = self.now() + last_seen = now() dev = LogicalDevice(name='Test Device', location=Location(lat=3, long=-31), last_seen=last_seen, properties={'appId': 'x', 'devId': 'y', 'other': 'z'}) @@ -416,7 +416,7 @@ def test_delete_logical_device(self): def test_insert_mapping(self): pdev, new_pdev = self._create_physical_device(req_header=self._ADMIN_HEADERS) ldev, new_ldev = self._create_default_logical_device(req_header=self._ADMIN_HEADERS) - mapping = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=self.now()) + mapping = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=now()) # This should work. url = f'{_BASE}/mappings/' @@ -430,7 +430,7 @@ def test_insert_mapping(self): # This should fail because the physical device has a current mapping. time.sleep(0.001) - mapping.start_time = self.now() + mapping.start_time = now() payload = mapping.json() r = requests.post(url, headers=self._ADMIN_HEADERS, data=payload) self.assertEqual(r.status_code, 400) @@ -438,14 +438,14 @@ def test_insert_mapping(self): # End the current mapping and create a new one. This should work and # simulates 'pausing' a physical device while working on it. requests.patch(f'{url}physical/end/{mapping.pd.uid}', headers=self._ADMIN_HEADERS) - mapping.start_time = self.now() + mapping.start_time = now() payload = mapping.json() r = requests.post(url, headers=self._ADMIN_HEADERS, data=payload) self.assertEqual(r.status_code, 201) pdx = copy.deepcopy(new_pdev) pdx.uid = -1 - mapping = PhysicalToLogicalMapping(pd=pdx, ld=new_ldev, start_time=self.now()) + mapping = PhysicalToLogicalMapping(pd=pdx, ld=new_ldev, start_time=now()) # This should fail due to invalid physical uid. payload = mapping.json() r = requests.post(url, headers=self._ADMIN_HEADERS, data=payload) @@ -453,7 +453,7 @@ def test_insert_mapping(self): ldx = copy.deepcopy(new_ldev) ldx.uid = -1 - mapping = PhysicalToLogicalMapping(pd=new_pdev, ld=ldx, start_time=self.now()) + mapping = PhysicalToLogicalMapping(pd=new_pdev, ld=ldx, start_time=now()) # End the current mapping for the phyiscal device so the RESTAPI doesn't # return status 400. @@ -467,7 +467,7 @@ def test_insert_mapping(self): def test_get_mapping_from_physical(self): pdev, new_pdev = self._create_physical_device(req_header=self._ADMIN_HEADERS) ldev, new_ldev = self._create_default_logical_device(req_header=self._ADMIN_HEADERS) - mapping = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=self.now()) + mapping = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=now()) url = f'{_BASE}/mappings/' payload = mapping.json() @@ -491,7 +491,7 @@ def test_get_mapping_from_physical(self): # Confirm the latest mapping is returned. time.sleep(0.001) mapping2 = copy.deepcopy(mapping) - mapping2.start_time = self.now() + mapping2.start_time = now() self.assertNotEqual(mapping, mapping2) payload = mapping2.json() r = requests.post(url, headers=self._ADMIN_HEADERS, data=payload) @@ -505,7 +505,7 @@ def test_get_mapping_from_physical(self): def test_get_mapping_from_logical(self): pdev, new_pdev = self._create_physical_device(req_header=self._ADMIN_HEADERS) ldev, new_ldev = self._create_default_logical_device(req_header=self._ADMIN_HEADERS) - mapping = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=self.now()) + mapping = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=now()) url = f'{_BASE}/mappings/' payload = mapping.json() @@ -529,7 +529,7 @@ def test_get_mapping_from_logical(self): # Confirm the latest mapping is returned. time.sleep(0.001) mapping2 = copy.deepcopy(mapping) - mapping2.start_time = self.now() + mapping2.start_time = now() self.assertNotEqual(mapping, mapping2) payload = mapping2.json() r = requests.post(url, headers=self._ADMIN_HEADERS, data=payload) @@ -546,7 +546,7 @@ def compare_mappings_ignore_end_time(self, m1: PhysicalToLogicalMapping, m2: Phy def test_get_latest_mapping_from_physical(self): pdev, new_pdev = self._create_physical_device(req_header=self._ADMIN_HEADERS) ldev, new_ldev = self._create_default_logical_device(req_header=self._ADMIN_HEADERS) - mapping = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=self.now()) + mapping = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=now()) url = f'{_BASE}/mappings/' @@ -590,7 +590,7 @@ def test_get_latest_mapping_from_physical(self): def test_get_latest_mapping_from_logical(self): pdev, new_pdev = self._create_physical_device(req_header=self._ADMIN_HEADERS) ldev, new_ldev = self._create_default_logical_device(req_header=self._ADMIN_HEADERS) - mapping = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=self.now()) + mapping = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=now()) url = f'{_BASE}/mappings/' @@ -639,7 +639,7 @@ def test_get_all_logical_device_mappings(self): # Using the DAO to create the test data, the REST API methods to do this are # tested elsewhere. - mapping1 = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=self.now()) + mapping1 = PhysicalToLogicalMapping(pd=new_pdev, ld=new_ldev, start_time=now()) dao.insert_mapping(mapping1) time.sleep(0.1) dao.end_mapping(ld=new_ldev.uid) @@ -649,7 +649,7 @@ def test_get_all_logical_device_mappings(self): pdev2.name = 'D2' pdev2, new_pdev2 = self._create_physical_device(dev=pdev2, req_header=self._ADMIN_HEADERS) time.sleep(0.1) - mapping2 = PhysicalToLogicalMapping(pd=new_pdev2, ld=new_ldev, start_time=self.now()) + mapping2 = PhysicalToLogicalMapping(pd=new_pdev2, ld=new_ldev, start_time=now()) dao.insert_mapping(mapping2) time.sleep(0.1) dao.end_mapping(ld=new_ldev.uid) @@ -659,7 +659,7 @@ def test_get_all_logical_device_mappings(self): pdev3.name = 'D3' pdev3, new_pdev3 = self._create_physical_device(dev=pdev3, req_header=self._ADMIN_HEADERS) time.sleep(0.1) - mapping3 = PhysicalToLogicalMapping(pd=new_pdev3, ld=new_ldev, start_time=self.now()) + mapping3 = PhysicalToLogicalMapping(pd=new_pdev3, ld=new_ldev, start_time=now()) dao.insert_mapping(mapping3) url = f'{_BASE}/mappings/logical/all/{new_ldev.uid}' From fae8cf28bc5a392f9c1bec5fd12043842eecaddc Mon Sep 17 00:00:00 2001 From: dajtxx Date: Thu, 6 Jun 2024 10:00:16 +1000 Subject: [PATCH 3/7] Restoring lost changes to files. --- db/init.d/init_db.sql | 17 ++++++++++------- src/python/api/client/DAO.py | 1 - 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/db/init.d/init_db.sql b/db/init.d/init_db.sql index a37652c9..01425bad 100755 --- a/db/init.d/init_db.sql +++ b/db/init.d/init_db.sql @@ -1,3 +1,6 @@ +CREATE EXTENSION postgis; +CREATE EXTENSION pgcrypto; + create table if not exists sources ( source_name text primary key not null ); @@ -6,7 +9,7 @@ create table if not exists physical_devices ( uid integer generated always as identity primary key, source_name text not null references sources, name text not null, - location point, + location geometry('POINT', 4283), last_seen timestamptz, -- Store only top level key value pairs here; it is used -- for quickly finding a device using information carried @@ -72,7 +75,7 @@ create table if not exists device_blobs ( create table if not exists logical_devices ( uid integer generated always as identity primary key, name text not null, - location point, + location geometry('POINT', 4283), last_seen timestamptz, properties jsonb not null default '{}' ); @@ -100,11 +103,11 @@ create table if not exists users( read_only boolean default True not null ); +create table if not exists version ( + version integer not null +); + create index if not exists pd_src_id_idx on physical_devices using GIN (source_ids); insert into sources values ('ttn'), ('greenbrain'), ('wombat'), ('ydoc'), ('ict_eagleio'); - --- Enable the PostGIS extensions --- CREATE EXTENSION postgis; --- CREATE EXTENSION postgis_raster; --- CREATE EXTENSION postgis_sfcgal; +insert into version values (2); diff --git a/src/python/api/client/DAO.py b/src/python/api/client/DAO.py index 5649725d..08351c04 100644 --- a/src/python/api/client/DAO.py +++ b/src/python/api/client/DAO.py @@ -79,7 +79,6 @@ def _get_connection(): if conn_pool is None: logging.info('Creating connection pool, registering type converters.') conn_pool = pool.ThreadedConnectionPool(1, 5) - # register_type_adapters() register_uuid() conn = conn_pool.getconn() From abe96da0fc328dc5c09770d92d5dd9c235a7f49e Mon Sep 17 00:00:00 2001 From: dajtxx Date: Thu, 6 Jun 2024 11:40:06 +1000 Subject: [PATCH 4/7] Restoring lost upgrade script. --- db/upgrade/002.sql | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 db/upgrade/002.sql diff --git a/db/upgrade/002.sql b/db/upgrade/002.sql new file mode 100644 index 00000000..a6920962 --- /dev/null +++ b/db/upgrade/002.sql @@ -0,0 +1,17 @@ +CREATE EXTENSION postgis; +CREATE EXTENSION pgcrypto; + +SELECT AddGeometryColumn('logical_devices','geom',4283,'POINT',2); +SELECT AddGeometryColumn('physical_devices','geom',4283,'POINT',2); + +UPDATE logical_devices SET geom = ST_MakePoint(location[1], location[0]) WHERE location IS NOT NULL; +UPDATE physical_devices SET geom = ST_MakePoint(location[1], location[0]) WHERE location IS NOT NULL; + +ALTER TABLE logical_devices DROP COLUMN location; +ALTER TABLE physical_devices DROP COLUMN location; + +ALTER TABLE logical_devices RENAME COLUMN geom TO location; +ALTER TABLE physical_devices RENAME COLUMN geom TO location; + +TRUNCATE version; +insert into version values (2); From b93558265e818df16a7bf8ee68f669d989c46620 Mon Sep 17 00:00:00 2001 From: David Taylor Date: Thu, 6 Jun 2024 14:39:06 +1000 Subject: [PATCH 5/7] Webapp changes to handle the geometry location columns. --- src/python/api/client/DAO.py | 26 ++++++++++++++++++++++++-- src/python/restapi/RestAPI.py | 1 + src/www/app/main.py | 4 +++- 3 files changed, 28 insertions(+), 3 deletions(-) diff --git a/src/python/api/client/DAO.py b/src/python/api/client/DAO.py index 08351c04..5d342fd9 100644 --- a/src/python/api/client/DAO.py +++ b/src/python/api/client/DAO.py @@ -354,7 +354,18 @@ def update_physical_device(device: PhysicalDevice) -> PhysicalDevice: for name, val in vars(device).items(): if val != current_values[name]: update_col_names.append(f'{name} = %s') - update_col_values.append(val if name not in ('source_ids', 'properties') else Json(val)) + match name: + case 'location': + if val is not None: + val = AsIs(f"ST_PointFromText('POINT({val.long} {val.lat})')") + + update_col_values.append(val) + + case 'properties' | 'source_ids': + update_col_values.append(Json(val)) + + case _: + update_col_values.append(val) logging.debug(update_col_names) logging.debug(update_col_values) @@ -589,7 +600,18 @@ def update_logical_device(device: LogicalDevice) -> LogicalDevice: for name, val in vars(device).items(): if val != current_values[name]: update_col_names.append(f'{name} = %s') - update_col_values.append(val if name != 'properties' else Json(val)) + match name: + case 'location': + if val is not None: + val = AsIs(f"ST_PointFromText('POINT({val.long} {val.lat})')") + + update_col_values.append(val) + + case 'properties': + update_col_values.append(Json(val)) + + case _: + update_col_values.append(val) if len(update_col_names) < 1: return device diff --git a/src/python/restapi/RestAPI.py b/src/python/restapi/RestAPI.py index c056e554..387363e3 100644 --- a/src/python/restapi/RestAPI.py +++ b/src/python/restapi/RestAPI.py @@ -250,6 +250,7 @@ async def update_logical_device(device: LogicalDevice) -> LogicalDevice: except dao.DAODeviceNotFound as daonf: raise HTTPException(status_code=404, detail=daonf.msg) except dao.DAOException as err: + logging.exception(err) raise HTTPException(status_code=500, detail=err.msg) diff --git a/src/www/app/main.py b/src/www/app/main.py index dd89aecb..19291ca9 100644 --- a/src/www/app/main.py +++ b/src/www/app/main.py @@ -1,4 +1,5 @@ import atexit +import logging import time from typing import Tuple @@ -646,6 +647,7 @@ def UpdateLogicalDevice(): return 'Success', 200 except requests.exceptions.HTTPError as e: + logging.exception(e) if e.response.status_code == 403: return f"You do not have sufficient permissions to make this change", e.response.status_code @@ -669,7 +671,7 @@ def format_time_stamp(unformatted_time: datetime) -> str: def format_location_string(location: Location) -> str: formatted_location = '' - if location is not None: + if location is not None and location.lat is not None and location.long is not None: formatted_location = f'{location.lat:.5f}, {location.long:.5f}' return formatted_location From 0f7e9e1032f3c2e00c56ae20f4bc5ae72d06ef8c Mon Sep 17 00:00:00 2001 From: David Taylor Date: Fri, 7 Jun 2024 09:20:53 +1000 Subject: [PATCH 6/7] Removed unused variable. --- src/python/api/client/DAO.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/python/api/client/DAO.py b/src/python/api/client/DAO.py index 5d342fd9..896cedf8 100644 --- a/src/python/api/client/DAO.py +++ b/src/python/api/client/DAO.py @@ -41,7 +41,6 @@ class DAOUniqeConstraintException(DAOException): conn_pool = None -_geometry_type_oid = None _physical_device_select_all_cols = """ select uid, source_name, name, (select row_to_json(_) from (select ST_Y(location) as lat, ST_X(location) as long) as _) as location, last_seen, source_ids, properties from physical_devices """ From c58a997908bc429cc09f78b1bb02b6a3242d1da0 Mon Sep 17 00:00:00 2001 From: David Taylor Date: Fri, 7 Jun 2024 11:15:12 +1000 Subject: [PATCH 7/7] Fixed webapp map code to handle location lat or long being None, changed docker compose service restart and logging policies. --- compose/docker-compose.yml | 51 ++++++++++++++++++++++++++++++++------ src/www/app/main.py | 2 +- 2 files changed, 45 insertions(+), 8 deletions(-) diff --git a/compose/docker-compose.yml b/compose/docker-compose.yml index 0bdc276a..fddc48ef 100644 --- a/compose/docker-compose.yml +++ b/compose/docker-compose.yml @@ -30,7 +30,11 @@ services: restapi: image: broker/python-base - restart: "no" + logging: + driver: local + options: + max-file: "3" + restart: unless-stopped env_file: - .env depends_on: @@ -46,6 +50,11 @@ services: website: image: broker/mgmt-app + logging: + driver: local + options: + max-file: "3" + restart: unless-stopped env_file: - .env environment: @@ -59,7 +68,11 @@ services: ttn_webhook: image: broker/python-base - restart: "no" + logging: + driver: local + options: + max-file: "3" + restart: unless-stopped env_file: - .env profiles: @@ -74,7 +87,11 @@ services: ttn_processor: image: broker/python-base - restart: "no" + logging: + driver: local + options: + max-file: "3" + restart: unless-stopped env_file: - .env profiles: @@ -93,7 +110,11 @@ services: ttn_decoder: image: broker/ttn_decoder - restart: "no" + logging: + driver: local + options: + max-file: "3" + restart: unless-stopped env_file: - .env profiles: @@ -106,7 +127,11 @@ services: ydoc: image: broker/python-base - restart: "no" + logging: + driver: local + options: + max-file: "3" + restart: unless-stopped env_file: - .env profiles: @@ -123,7 +148,11 @@ services: wombat: image: broker/python-base - restart: "no" + logging: + driver: local + options: + max-file: "3" + restart: unless-stopped env_file: - .env profiles: @@ -140,7 +169,11 @@ services: lm: image: broker/python-base - restart: "no" + logging: + driver: local + options: + max-file: "3" + restart: unless-stopped env_file: - .env depends_on: @@ -155,6 +188,10 @@ services: delivery: image: broker/python-base + logging: + driver: local + options: + max-file: "3" restart: unless-stopped env_file: - .env diff --git a/src/www/app/main.py b/src/www/app/main.py index 19291ca9..83a47580 100644 --- a/src/www/app/main.py +++ b/src/www/app/main.py @@ -474,7 +474,7 @@ def show_map(): # folium.Marker([-31.956194913619864, 115.85911692112582], popup="Mt. Hood Meadows", tooltip='click me').add_to(center_map) data: List[LogicalDevice] = get_logical_devices(session.get('token'), include_properties=True) for dev in data: - if dev.location is not None: + if dev.location is not None and dev.location.lat is not None and dev.location.long is not None: color = 'blue' if dev.last_seen is None: