Skip to content

Commit

Permalink
Merge branch 'master' into scmn
Browse files Browse the repository at this point in the history
  • Loading branch information
dajtxx committed Sep 11, 2024
2 parents a22e7d2 + 69ad24e commit 4edc52b
Show file tree
Hide file tree
Showing 11 changed files with 671 additions and 560 deletions.
121 changes: 115 additions & 6 deletions src/python/api/client/DAO.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,17 @@
import psycopg2.errors
from psycopg2.extensions import AsIs
from psycopg2.extras import Json, register_uuid
from typing import Any, Dict, List, Optional, Union
from typing import Any, Dict, List, Optional, Tuple, Union
import hashlib
import os

import BrokerConstants
from pdmodels.Models import BaseDevice, DeviceNote, LogicalDevice, PhysicalDevice, PhysicalToLogicalMapping, User
from threading import Lock

logging.captureWarnings(True)

_lock = Lock()

class DAOException(Exception):
def __init__(self, msg: str = None, wrapped: Exception = None):
Expand Down Expand Up @@ -50,10 +52,19 @@ class DAOUniqeConstraintException(DAOException):
"""


_stopped = False

def stop() -> None:
global _stopped
logging.info('Closing connection pool.')
if conn_pool is not None:
conn_pool.closeall()
_lock.acquire()
try:
if not _stopped:
_stopped = True
if conn_pool is not None:
conn_pool.closeall()
finally:
_lock.release()


@contextmanager
Expand All @@ -73,8 +84,8 @@ def _get_connection():
# This throws an exception if the db hostname cannot be resolved, or
# the database is not accepting connections.
try:
# Try lazy initialisation the connection pool and Location/point
# converter to give the db as much time as possible to start.
# Try lazy initialisation the connection pool to give the db as
# much time as possible to start.
if conn_pool is None:
logging.info('Creating connection pool, registering type converters.')
conn_pool = pool.ThreadedConnectionPool(1, 5)
Expand Down Expand Up @@ -792,7 +803,7 @@ def toggle_device_mapping(is_active: bool, pd: Optional[Union[PhysicalDevice, in
_toggle_device_mapping(conn, is_active, pd, ld)
return
except Exception as err:
raise err if isinstance(err, DAOException) else DAOException('pause_current_device_mapping failed.', err)
raise err if isinstance(err, DAOException) else DAOException('toggle_device_mapping failed.', err)
finally:
if conn is not None:
free_conn(conn)
Expand Down Expand Up @@ -1319,3 +1330,101 @@ def token_enable(uname)-> None:
if conn is not None:
free_conn(conn)


# ===========================================================================
# Delivery thread related functions
# ===========================================================================

def _get_delivery_table_id(name: str) -> str:
#return sql.Identifier(f'{name}_delivery_q')
return f'{name}_delivery_q'

def create_delivery_table(name: str) -> None:
logging.info(f'Creating message delivery table for service {name}')

try:
qry = f"""create table if not exists {_get_delivery_table_id(name)} (
uid integer generated always as identity primary key,
json_msg jsonb not null,
retry_count integer not null default 0)"""

with _get_connection() as conn, conn.cursor() as cursor:
cursor.execute(qry)

except Exception as err:
raise err if isinstance(err, DAOException) else DAOException('create_delivery_table failed.', err)
finally:
if conn is not None:
conn.commit()
free_conn(conn)

def get_delivery_msg_count(name: str) -> int:
try:
with _get_connection() as conn, conn.cursor() as cursor:
cursor.execute(f'select count(uid) from {_get_delivery_table_id(name)}')
return cursor.fetchone()[0]

except Exception as err:
raise err if isinstance(err, DAOException) else DAOException('get_delivery_msg_count failed.', err)
finally:
if conn is not None:
conn.commit()
free_conn(conn)

def get_delivery_msg_batch(name: str, from_uid: int = 0, batch_size: int = 10) -> List[Tuple[int, list[dict[Any]]]]:
try:
with _get_connection() as conn, conn.cursor() as cursor:
# Using order by asc in case time series databases need values inserted in timestamp order.
cursor.execute(f'select uid, json_msg, retry_count from {_get_delivery_table_id(name)} where uid > %s order by uid asc limit %s', (from_uid, batch_size))
if cursor.rowcount < 1:
return 0, []

return cursor.fetchall()

except Exception as err:
raise err if isinstance(err, DAOException) else DAOException('get_delivery_msg_batch failed.', err)
finally:
if conn is not None:
conn.commit()
free_conn(conn)

def add_delivery_msg(name: str, msg: dict[Any]) -> None:
try:
with _get_connection() as conn, conn.cursor() as cursor:
cursor.execute(f'insert into {_get_delivery_table_id(name)} (json_msg) values (%s)', (Json(msg), ))

except Exception as err:
raise err if isinstance(err, DAOException) else DAOException('add_delivery_msg failed.', err)
finally:
if conn is not None:
conn.commit()
free_conn(conn)

def remove_delivery_msg(name: str, uid: int) -> None:
try:
with _get_connection() as conn, conn.cursor() as cursor:
cursor.execute(f'delete from {_get_delivery_table_id(name)} where uid = %s', (uid, ))

except Exception as err:
raise err if isinstance(err, DAOException) else DAOException('remove_delivery_msg failed.', err)
finally:
if conn is not None:
conn.commit()
free_conn(conn)

def retry_delivery_msg(name: str, uid: int) -> None:
try:
with _get_connection() as conn, conn.cursor() as cursor:
cursor.execute(f'select retry_count from {_get_delivery_table_id(name)} where uid = %s', (uid, ))
if cursor.rowcount < 1:
return

retry_count = cursor.fetchone()[0] + 1
cursor.execute(f'update {_get_delivery_table_id(name)} set retry_count = %s where uid = %s', (retry_count, uid))

except Exception as err:
raise err if isinstance(err, DAOException) else DAOException('retry_delivery_msg failed.', err)
finally:
if conn is not None:
conn.commit()
free_conn(conn)
21 changes: 13 additions & 8 deletions src/python/api/client/Ubidots.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@

from pdmodels.Models import Location, LogicalDevice

import util.LoggingUtil as lu

BASE_1_6 = "https://industrial.api.ubidots.com.au/api/v1.6"
BASE_2_0 = "https://industrial.api.ubidots.com.au/api/v2.0"

Expand Down Expand Up @@ -100,25 +102,25 @@ def get_all_devices() -> List[LogicalDevice]:

if response_obj['next'] is None:
break

page += 1

return devices


def get_device(label: str) -> LogicalDevice:
def get_device(label: str, logging_ctx: dict) -> LogicalDevice:
url = f'{BASE_2_0}/devices/~{label}'
time.sleep(0.3)
r = requests.get(url, headers=headers)
if r.status_code != 200:
logging.warn(f'devices/~{label} received response: {r.status_code}: {r.reason}')
lu.cid_logger.error(f'devices/~{label} received response: {r.status_code}: {r.reason}', extra=logging_ctx)
return None

response_obj = json.loads(r.content)
return _dict_to_logical_device(response_obj)


def post_device_data(label: str, body) -> None:
def post_device_data(label: str, body: dict, logging_ctx: dict) -> bool:
"""
Post timeseries data to an Ubidots device.
Expand All @@ -130,22 +132,25 @@ def post_device_data(label: str, body) -> None:
'temperature': {'value': 37.17, 'timestamp': 1643934748392}
}
"""
time.sleep(0.3)
url = f'{BASE_1_6}/devices/{label}'
hdrs = headers
hdrs['Content-Type'] = 'application/json'
body_str = json.dumps(body)
r = requests.post(url, headers=hdrs, data=body_str)
if r.status_code != 200:
logging.info(f'POST {url}: {r.status_code}: {r.reason}')
logging.info(body_str)
lu.cid_logger.info(f'POST {url}: {r.status_code}: {r.reason}', extra=logging_ctx)
return False

return True


def update_device(label: str, patch_obj) -> None:
def update_device(label: str, patch_obj: dict, logging_ctx: dict) -> None:
url = f'{BASE_2_0}/devices/~{label}'
time.sleep(0.3)
response = requests.patch(url, headers=headers, json=patch_obj)
if response.status_code != 200:
logging.warning(f'PATCH response: {response.status_code}: {response.reason}')
lu.cid_logger.error(f'PATCH response: {response.status_code}: {response.reason}', extra=logging_ctx)


def main():
Expand Down
13 changes: 7 additions & 6 deletions src/python/broker-cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ def str_to_logical_device(val) -> LogicalDevice:


def str_to_dict(val) -> Dict:
print(val, type(val))
return json.loads(val)


Expand Down Expand Up @@ -207,7 +208,7 @@ def plain_pd_list(devs: List[PhysicalDevice]):


def dict_from_file_or_string() -> dict:
if (hasattr(args, 'pd') or hasattr(args, 'ld')) and (hasattr(args, 'in_filename') and args.in_filename is not None):
if ((hasattr(args, 'pd') and args.pd is not None) or (hasattr(args, 'ld') and args.ld is not None)) and (hasattr(args, 'in_filename') and args.in_filename is not None):
raise RuntimeError('error: --json and --file are mutually exclusive.')

json_obj = None
Expand Down Expand Up @@ -283,7 +284,7 @@ def main() -> None:

dev = PhysicalDevice.parse_obj(dev)
print(pretty_print_json(dao.update_physical_device(dev)))

elif args.cmd2 == 'rm':
# Delete all physical_logical mappings to avoid foreign key violation
mappings = dao.get_physical_device_mappings(pd=args.p_uid)
Expand Down Expand Up @@ -373,9 +374,9 @@ def main() -> None:
current_mapping = dao.get_current_device_mapping(pd=args.p_uid, ld=args.l_uid)
if current_mapping is None:
raise RuntimeError("No current mapping for the uid given")

dao.toggle_device_mapping(args.enable, args.p_uid, args.l_uid)

elif args.cmd1 == 'users':
if args.cmd2 == 'add':
dao.user_add(uname=args.uname, passwd=args.passwd, disabled=args.disabled)
Expand All @@ -391,13 +392,13 @@ def main() -> None:

elif args.enable == True:
dao.token_enable(uname=args.uname)

if args.refresh == True:
dao.token_refresh(uname=args.uname)

elif args.cmd2 == 'chng':
dao.user_change_password(args.uname, args.passwd)

elif args.cmd2 == 'ls':
print(dao.user_ls())

Expand Down
Loading

0 comments on commit 4edc52b

Please sign in to comment.