diff --git a/.gitignore b/.gitignore index 55887ccc..92ed2c6f 100644 --- a/.gitignore +++ b/.gitignore @@ -231,4 +231,7 @@ swag/* !swag/nginx/proxy-confs/dpi-device-manager.subfolder.conf *.pem -settings.json \ No newline at end of file +settings.json + +#303 back up database +backup-db/ diff --git a/compose/.env b/compose/.env index d2cf7f4f..617fde97 100755 --- a/compose/.env +++ b/compose/.env @@ -15,6 +15,15 @@ PGDATABASE=broker POSTGRES_PASSWORD=CHANGEME POSTGRES_DB=broker +#TSDB -- KEEP SYNCED WITH ./.tsdb_env +TSDB_USER=postgres +TSDB_PASSWORD=admin +TSDB_PORT=5432 +TSDB_HOST=tsdb +TSDB_DB=postgres +TSDB_TABLE=timeseries #set in init.sql +NAMING_UPDATE_INTERVAL=600 ##how often on new message to map will it check to update word_list and type_maps + # Set this to the root of the git repo as it is seen by the containers. PYTHONPATH=/home/broker/python diff --git a/compose/.tsdb_env b/compose/.tsdb_env new file mode 100644 index 00000000..add83b6a --- /dev/null +++ b/compose/.tsdb_env @@ -0,0 +1,3 @@ +POSTGRES_USER=postgres +POSTGRES_PASSWORD=admin +POSTGRES_PORT=5432 diff --git a/compose/docker-compose.yml b/compose/docker-compose.yml index 41cdaadd..cfa62d52 100644 --- a/compose/docker-compose.yml +++ b/compose/docker-compose.yml @@ -253,6 +253,7 @@ services: working_dir: "/home/broker/python" entrypoint: [ "python", "-m", "delivery.FRRED" ] + axistech: image: broker/python-base logging: @@ -266,10 +267,39 @@ services: - frred depends_on: db: + entrypoint: [ "python", "-m", "pollers.axistech" ] + + timescaledb: + build: ../timescale # Point to the directory containing the custom Dockerfile + hostname: "tsdb" + image: custom-timescaledb:latest + restart: "no" + + env_file: + - .tsdb_env + volumes: + - ../timescale/init.sql:/docker-entrypoint-initdb.d/init.sql + - ../timescale/pgbackrest/pgbackrest.conf:/home/postgres/pgdata/backup/pgbackrest.conf + - ../timescale/logs:/var/log/timescale + ports: + - "5433:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${TSDB_USER}"] + interval: 10s + timeout: 5s + retries: 3 + + iota_tsdb_decoder: + image: broker/python-base + restart: "no" + env_file: + - .env + depends_on: + timescaledb: condition: "service_healthy" mq: condition: "service_healthy" volumes: - ../src/python:/home/broker/python working_dir: "/home/broker/python" - entrypoint: [ "python", "-m", "pollers.axistech" ] + entrypoint: [ "python", "-m", "timescale.TS_LTSReader" ] diff --git a/compose/production/prod.yml b/compose/production/prod.yml index 15461ee9..c31d1eb4 100644 --- a/compose/production/prod.yml +++ b/compose/production/prod.yml @@ -31,9 +31,20 @@ services: ports: - "127.0.0.1:5000:5000" + timescaledb: + volumes: + - tsdb_db:/home/postgres/pgdata/data + - pgbackrest_data:/var/lib/pgbackrest + volumes: broker_db: external: true mq_data: external: true + + tsdb_db: + external: true + + pgbackrest_data: + external: false diff --git a/compose/test/test.yml b/compose/test/test.yml index be235d3d..62d8767c 100644 --- a/compose/test/test.yml +++ b/compose/test/test.yml @@ -15,3 +15,10 @@ services: - ..:/home/broker/broker working_dir: "/home/broker/broker" entrypoint: [ "./forever.sh" ] + mq: + ports: + - 15672:15672 + restapi: + ports: + - 5687:5687 + diff --git a/db/init.d/init_db.sql b/db/init.d/init_db.sql index 9ac12cab..0df6d030 100755 --- a/db/init.d/init_db.sql +++ b/db/init.d/init_db.sql @@ -111,3 +111,352 @@ create index if not exists pd_src_id_idx on physical_devices using GIN (source_i insert into sources values ('ttn'), ('greenbrain'), ('wombat'), ('ydoc'), ('ict_eagleio'); insert into version values (2); + +create table if not exists data_name_map( + input_name text not null primary key, + std_name text not null +); + +create table if not exists type_name_map( + full_name text not null primary key, + short_name text not null +); + +create table if not exists word_list( + full_word text +); + +create table if not exists hash_table( + table_name text primary key, + data_hash text +); + +create or replace function update_hash_table() +returns trigger as $$ +begin + if TG_OP = 'INSERT' or TG_OP = 'UPDATE' OR TG_OP = 'DELETE' then + insert into hash_table (table_name, data_hash) + values (TG_TABLE_NAME, MD5(NEW.*::text)) + on conflict (table_name) + do update set data_hash = MD5(NEW.*::text); + return new; + end if; +end; +$$ language plpgsql; + +create trigger type_name_map_trigger +after insert or update or delete on type_name_map +for each row + execute function update_hash_table(); + +create trigger word_list_trigger +after insert or update or delete on word_list +for each row + execute function update_hash_table(); + +create index if not exists pd_src_id_idx on physical_devices using GIN (source_ids); + +insert into sources values ('ttn'), ('greenbrain'), ('wombat'), ('ydoc'), ('ict_eagleio'); + +insert into data_name_map (input_name, std_name) values + ('1_Temperature', '1_TEMPERATURE'), + ('1_VWC', '1_VWC'), + ('2_Temperature', '2_TEMPERATURE'), + ('2_VWC', '2_VWC'), + ('3_Temperature', '3_TEMPERATURE'), + ('3_VWC', '3_VWC'), + ('4_Temperature', '4_TEMPERATURE'), + ('4_VWC', '4_VWC'), + ('5_Temperature', '5_TEMPERATURE'), + ('5_VWC', '5_VWC'), + ('6_Temperature', '6_TEMPERATURE'), + ('6_VWC', '6_VWC'), + ('8_AirPressure', '8_AIR_PRESSURE'), + ('8_AirTemperature', '8_AIR_TEMPERATURE'), + ('8_HumiditySensorTemperature', '8_HUMIDITY_SENSOR_TEMPERATURE'), + ('8_Precipitation', '8_PRECIPITATION'), + ('8_RH', '8_RH'), + ('8_Solar', '8_SOLAR'), + ('8_Strikes', '8_STRIKES'), + ('8_VaporPressure', '8_VAPOR_PRESSURE'), + ('8_WindDirection', '8_WIND_DIRECTION'), + ('8_WindGustSpeed', '8_WIND_GUST_SPEED'), + ('8_WindSpeed', '8_WIND_SPEED'), + ('Access_technology', 'ACCESS_TECHNOLOGY'), + ('accMotion', 'ACC_MOTION'), + ('Actuator', 'ACTUATOR'), + ('adc_ch1', 'ADC_CH_1'), + ('adc_ch2', 'ADC_CH_2'), + ('adc_ch3', 'ADC_CH_3'), + ('adc_ch4', 'ADC_CH_4'), + ('airTemp', 'AIR_TEMPERATURE'), + ('airtemperature', 'AIR_TEMPERATURE'), + ('airTemperature', 'AIR_TEMPERATURE'), + ('altitude', 'ALTITUDE'), + ('Ana', 'ANA'), + ('atmosphericpressure', 'ATMOSPHERIC_PRESSURE'), + ('atmosphericPressure', 'ATMOSPHERIC_PRESSURE'), + ('Average_current', 'AVERAGE_CURRENT'), + ('average-flow-velocity0_0_m/s', 'AVERAGE_FLOW_VELOCITY_0_0_MS'), + ('Average_voltage', 'AVERAGE_V'), + ('Average_Voltage', 'AVERAGE_V'), + ('Average_Wind_Speed_', 'AVERAGE_WIND_SPEED'), + ('avgWindDegrees', 'AVERAGE_WIND_DEGREES'), + ('barometricPressure', 'BAROMETRIC_PRESSURE'), + ('batmv', 'BATMV'), + ('battery', 'BATTERY'), + ('Battery (A)', 'BATTERY_A'), + ('battery (v)', 'BATTERY_V'), + ('Battery (V)', 'BATTERY_V'), + ('batteryVoltage', 'BATTERY_V'), + ('battery-voltage_V', 'BATTERY_V'), + ('Battery (W)', 'BATTERY_W'), + ('Cable', 'CABLE'), + ('charging-state', 'CHARGING_STATE'), + ('Class', 'CLASS'), + ('command', 'COMMAND'), + ('conductivity', 'CONDUCTIVITY'), + ('counterValue', 'COUNTER_VALUE'), + ('current-flow-velocity0_0_m/s', 'CURRENT_FLOW_VELOCITY_0_0_MS'), + ('depth', 'DEPTH'), + ('Device', 'DEVICE'), + ('DI0', 'DI_0'), + ('DI1', 'DI_1'), + ('direction', 'DIRECTION'), + ('distance', 'DISTANCE'), + ('down630', 'DOWN_630'), + ('down800', 'DOWN_800'), + ('EC', 'EC'), + ('externalTemperature', 'EXTERNAL_TEMPERATURE'), + ('fault', 'FAULT'), + ('Fraud', 'FRAUD'), + ('gnss', 'GNSS'), + ('gustspeed', 'GUST_SPEED'), + ('gustSpeed', 'GUST_SPEED'), + ('header', 'HEADER'), + ('Humi', 'HUMI'), + ('humidity', 'HUMIDITY'), + ('Hygro', 'HYGRO'), + ('Leak', 'LEAK'), + ('linpar', 'LINPAR'), + ('Max_current', 'MAX_CURRENT'), + ('Maximum_Wind_Speed_', 'MAX_WIND_SPEED'), + ('Max_voltage', 'MAX_V'), + ('Min_current', 'MIN_CURRENT'), + ('Minimum_Wind_Speed_', 'MIN_WIND_SPEED'), + ('Min_voltage', 'MIN_V'), + ('moisture1', 'MOISTURE_1'), + ('moisture2', 'MOISTURE_2'), + ('moisture3', 'MOISTURE_3'), + ('moisture4', 'MOISTURE_4'), + ('ndvi', 'NDVI'), + ('O06 / DPI-144', 'O_06_DPI_144'), + ('Operating_cycle', 'OPERATING_CYCLE'), + ('packet-type', 'PACKET_TYPE'), + ('period', 'PERIOD'), + ('Power', 'POWER'), + ('precipitation', 'PRECIPITATION'), + ('pressure', 'PRESSURE'), + ('Processor_temperature', 'PROCESSOR_TEMPERATURE'), + ('pulse_count', 'PULSE_COUNT'), + ('Radio_channel_code', 'RADIO_CHANNEL_CODE'), + ('Rainfall', 'RAINFALL'), + ('rain_per_interval', 'RAIN_PER_INTERVAL'), + ('Rain_per_interval', 'RAIN_PER_INTERVAL'), + ('raw_depth', 'RAW_DEPTH'), + ('rawSpeedCount', 'RAW_SPEED_COUNT'), + ('relativehumidity', 'RELATIVE_HUMIDITY'), + ('relativeHumidity', 'RELATIVE_HUMIDITY'), + ('Rest_capacity', 'REST_CAPACITY'), + ('Rest_power', 'REST_POWER'), + ('rssi', 'RSSI'), + ('rtc', 'RTC'), + ('RTC', 'RTC'), + ('S1_EC', 'S_1_EC'), + ('S1_Temp', 'S_1_TEMPERATURE'), + ('S1_Temp_10cm', 'S_1_TEMPERATURE_10_CM'), + ('S1_Temp_20cm', 'S_1_TEMPERATURE_20_CM'), + ('S1_Temp_30cm', 'S_1_TEMPERATURE_30_CM'), + ('S1_Temp_40cm', 'S_1_TEMPERATURE_40_CM'), + ('S1_Temp_50cm', 'S_1_TEMPERATURE_50_CM'), + ('S1_Temp_60cm', 'S_1_TEMPERATURE_60_CM'), + ('S1_Temp_70cm', 'S_1_TEMPERATURE_70_CM'), + ('S1_Temp_80cm', 'S_1_TEMPERATURE_80_CM'), + ('S1_Temp_90cm', 'S_1_TEMPERATURE_90_CM'), + ('S1_VWC', 'S_1_VWC'), + ('s4solarRadiation', 'S_4_SOLAR_RADIATION'), + ('salinity', 'SALINITY'), + ('salinity1', 'SALINITY_1'), + ('salinity2', 'SALINITY_2'), + ('salinity3', 'SALINITY_3'), + ('salinity4', 'SALINITY_4'), + ('sensorReading', 'SENSOR_READING'), + ('shortest_pulse', 'SHORTEST_PULSE'), + ('Signal', 'SIGNAL'), + ('Signal_indication', 'SIGNAL_INDICATION'), + ('Signal_strength', 'SIGNAL_STRENGTH'), + ('snr', 'SNR'), + ('soilmoist', 'SOIL_MOISTURE'), + ('soiltemp', 'SOIL_TEMPERATURE'), + ('solar', 'SOLAR'), + ('Solar (A)', 'SOLAR_A'), + ('solarpanel', 'SOLAR_PANEL'), + ('solarPanel', 'SOLAR_PANEL'), + ('solar (v)', 'SOLAR_V'), + ('Solar (V)', 'SOLAR_V'), + ('solar-voltage_V', 'SOLAR_V'), + ('Solar (W)', 'SOLAR_W'), + ('solmv', 'SOLMV'), + ('sq110_umol', 'SQ_110_UMOL'), + ('strikes', 'STRIKES'), + ('Tamper', 'TAMPER'), + ('tdskcl', 'TDSKCL'), + ('Temp', 'TEMPERATURE'), + ('temperature', 'TEMPERATURE'), + ('Temperature', 'TEMPERATURE'), + ('temperature1', 'TEMPERATURE_1'), + ('temperature2', 'TEMPERATURE_2'), + ('temperature3', 'TEMPERATURE_3'), + ('temperature4', 'TEMPERATURE_4'), + ('temperature5', 'TEMPERATURE_5'), + ('temperature6', 'TEMPERATURE_6'), + ('temperature7', 'TEMPERATURE_7'), + ('temperature8', 'TEMPERATURE_8'), + ('temperatureReading', 'TEMPERATURE_READING'), + ('tilt-anlge0_0_Degrees', 'TILT_ANLGE_0_0_DEGREES'), + ('UNIX_time', 'UNIX_TIME'), + ('up630', 'UP_630'), + ('up800', 'UP_800'), + ('uptime_s', 'UPTIME_S'), + ('vapourpressure', 'VAPOUR_PRESSURE'), + ('vapourPressure', 'VAPOUR_PRESSURE'), + ('vdd', 'VDD'), + ('Volt', 'V'), + ('vt', 'VT'), + ('VWC', 'VWC'), + ('VWC1', 'VWC_1'), + ('winddirection', 'WIND_DIRECTION'), + ('windDirection', 'WIND_DIRECTION'), + ('windKph', 'WIND_KPH'), + ('windspeed', 'WIND_SPEED'), + ('windSpeed', 'WIND_SPEED'), + ('windStdDevDegrees', 'WIND_STD_DEV_DEGREES'); + + +insert into type_name_map (full_name, short_name) values + ('AMP', 'A'), + ('AMPERAGE', 'A'), + ('AMPS', 'A'), + ('VOLT', 'V'), + ('VOLTAGE', 'V'), + ('VOLTS', 'V'), + ('MAXIMUM', 'MAX'), + ('MINIMUM', 'MIN'), + ('CENTIMETER', 'CM'), + ('CENTIMETRE', 'CM'), + ('CENTIMETERS', 'CM'), + ('CENTIMETRES', 'CM'), + ('TEMP', 'TEMPERATURE'), + ('AVG', 'AVERAGE'), + ('MOIST', 'MOISTURE'); + +insert into word_list values + ('ACCESS'), + ('ACTUATOR'), + ('AIR'), + ('ALTITUDE'), + ('AMP'), + ('AMPERAGE'), + ('AMPS'), + ('ATMOSPHERIC'), + ('AVERAGE'), + ('AVG'), + ('BAROMETRIC'), + ('BATTERY'), + ('CABLE'), + ('CAPACITY'), + ('CHANNEL'), + ('CHARGING'), + ('CLASS'), + ('CODE'), + ('COMMAND'), + ('CONDUCTIVITY'), + ('COUNT'), + ('COUNTER'), + ('CURRENT'), + ('CYCLE'), + ('DEGREES'), + ('DEPTH'), + ('DEV'), + ('DEVICE'), + ('DISTANCE'), + ('DIRECTION'), + ('DOWN'), + ('EXTERNAL'), + ('FLOW'), + ('FRAUD'), + ('GUST'), + ('HEADER'), + ('HUMIDITY'), + ('HYGRO'), + ('INDICATION'), + ('INTERVAL'), + ('KPH'), + ('LEAK'), + ('MAX'), + ('MAXIMUM'), + ('MIN'), + ('MINIMUM'), + ('MOIST'), + ('MOISTURE'), + ('MOTION'), + ('OPERATING'), + ('PACKET'), + ('PANEL'), + ('PER'), + ('PERIOD'), + ('POWER'), + ('PRECIPITATION'), + ('PRESSURE'), + ('PROCESSOR'), + ('PULSE'), + ('RADIO'), + ('RAINFALL'), + ('RAIN'), + ('READING'), + ('RELATIVE'), + ('REST'), + ('SALINITY'), + ('SIGNAL'), + ('SOLAR'), + ('SOIL'), + ('SPEED'), + ('STRENGTH'), + ('STRIKE'), + ('STRIKES'), + ('STD'), + ('TECHNOLOGY'), + ('TILT'), + ('TIME'), + ('UNIX'), + ('UP'), + ('UPTIME'), + ('VALUE'), + ('VAPOR'), + ('VELOCITY'), + ('VOLT'), + ('VOLTS'), + ('VOLTAGE'), + ('READING'), + ('SHORTEST'), + ('SNR'), + ('SOIL'), + ('TAMPER'), + ('TILT'), + ('TIME'), + ('TEMPERATURE'), + ('TEMP'), + ('UNIX'), + ('UP'), + ('VAPOUR'), + ('WIND'); diff --git a/doc/nginx.md b/doc/nginx.md index 483b7d60..449f403f 100644 --- a/doc/nginx.md +++ b/doc/nginx.md @@ -30,6 +30,11 @@ To connect to the RabbitMQ monitor web page, use `https://hostname/rabbitmq` # Use the hostname 'restapi' if nginx is running in a container. proxy_pass http://localhost:5687/broker/; } + + location /query/ { + #use the hostname 'restapi' if nginx is running in a container. + proxy_pass http://localhost:5687/query/; + } location /rabbitmq/ { # Use the hostname 'mq' if nginx is running in a container. @@ -93,4 +98,4 @@ stream { proxy_pass 127.0.0.1:1884; } } -``` \ No newline at end of file +``` diff --git a/doc/tsdb/Programmer Documentation.md b/doc/tsdb/Programmer Documentation.md new file mode 100644 index 00000000..ba5c7ef1 --- /dev/null +++ b/doc/tsdb/Programmer Documentation.md @@ -0,0 +1,369 @@ +# Programmer Documentation +### Purpose: +` `The aim of this document is to act as a guide on how the system actually works, why it is implemented the way it is, and how to modify or maintain the system. +## ***Business Aims*** +` `The TSDB implementation aims to address to business requirements of: + +- Storing incoming sensor data in an efficient and optimal way, that can be easily retrieved or backed up +- Compatibility with existing implementation, including allowing access and use to IoTa databases +- Local hosting of database, not cloud hosting +- Graphical representation of data with a web app +## ***Change List - Initial Merge into IoTa*** +` `Here is a complete list of the files we have changed or added, their reason for change and some notes on what might happen if it is changed. + + + +|File|Change List|Reasons/Notes| +| :- | :- | :- | +|[compose/.env](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/compose/.env)|
Several new environment variables:
- TSDB\_USER
- TSDB username
- TSDB\_PASSWORD
- TSDB password
- TSDB\_PORT
- TSDB port
- TSDB\_HOST
- TSDB host
- TSDB\_DB
- TSDB database name
- TSDB\_TABLE
- TSDB table name
- NAMING\_UPDATE\_INTERVAL
- ` `interval (seconds) between the naming system checking for changes
|- Changed existing file to keep project in one place.
- Required extra environment variables for TSDB implementation
- The TSDB\_XXX needs to match the equivalent in [compose/.tsdb_env](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/compose/.tsdb_env).
| +|[compose/.tsdb_env](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/compose/.tsdb_env)|Several new environment variables:
- POSTGRES\_USER
- TSDB username
- POSTGRES\_PASSWORD
- TSDB pass
- POSTGRES\_PORT
- TSDB port
|- Since Timescale is built on postgres, there was a conflict with both databases using the same .env file to set credentials. Had to split this up into a second file.| +|[compose/docker-compose.yml](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/compose/docker-compose.yml)|Several new services:
- Iota\_tsdb\_decoder
- Message handling and inserting into TSDB
- timescaledb
- TSDB
|- The decoder handles incoming MQ messages, and inserts them into TSDB.
- timescaledb is the actual time series database.
| +|[db/init.d/init_db.sql](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/db/init.d/init_db.sql)|New Tables:
- data\_name\_map
- Stores mappings to map incoming messages
- type\_name\_map
- Stores types to dynamically process unmapped messages
- `word\_list`
- Stores words to dynamically process unmapped messages
- hash\_table
- Stores hash of tables to enable quicker syncing
Functions:
- update\_hash\_table
- Create or update hash for table\_name
Triggers:
- type\_name\_map\_trigger
- word\_list\_trigger
- Both the above trigger on any changes to the respected table, and call the update
Insertions:
- Inserts default values into data\_name\_map
|- Efficient implementation of standardising the names for time series data
- Name maps are accessible by everything that has access to dao, or database.
- word\_list and type\_name\_map can be updated with containers running and will auto sync in a set period, without too much overhead.
| +|[timescale/init.sql](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/timescale/init.sql)|Creates the times series database schema|Requirement to set up the time series database.| +|[timescale/Dockerfile](https://github.com/ZakhaevK/itc303-team3-broker/blob/inc_backup/timescale/Dockerfile)|Used for custom timescaleDB images.|Main purpose is to install pgBackRest for physical backup into the timescale image.| +|[timescale/pgbr_init.sh](https://github.com/ZakhaevK/itc303-team3-broker/blob/inc_backup/timescale/pgbr_init.sh)|Added file.|Main purpose is to establish the stanza for pgBackRest so that physical backup can be performed.| +|[timescale/postgres/postgresql.conf](https://github.com/ZakhaevK/itc303-team3-broker/blob/inc_backup/timescale/postgres/custom_postgresql.conf)|Added file.|Required for configuration of postgres in use with pgBackRest.| +|[timescale/pgbackrest/pgbackrest.conf](https://github.com/ZakhaevK/itc303-team3-broker/blob/inc_backup/timescale/pgbackrest/pgbackrest.conf)|Added file.|Required for configuration of pgBackRest.| +|[src/python/broker-cli.py](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/python/broker-cli.py)|Added logical device pretty output
Added several CRUD functions for:
- word\_list
- data\_name\_map
- type\_name\_map
|Physical devices had pretty output but logical devices did not| +|[src/python/api/client/DAO.py](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/python/api/client/DAO.py)|New Functions:
- CRUD:
- add\_name\_map
- update\_name\_map
- get\_std\_name
- `\_get\_std\_name`
|Ability to access the new table correctly.| +|[src/python/pdmodels/Models.py](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/python/pdmodels/Models.py)|Added DataNameMap class|Following current structure for IoTa.| +|[src/python/restapi/TSDBAPI.py](https://github.com/ZakhaevK/itc303-team3-broker/tree/merge_dpi/src/python/restapi)|New file for implementing the time series API|Uses same endpoint as existing REST API
Implements API requests for getting time series data from Timescale
| +|[src/python/restapi/requirements.txt](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/python/restapi/requirements.txt)|Added extra modules|Some newer features needed extra modules to work.
Made setting up a local test environment Slightly easier by using file to install requirements
| +|[src/python/timescale/TS_LTSReader.py](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/python/timescale/TS_LTSReader.py)|Added file|This is the rabbit MQ message listener that receives and handles the incoming messages| +|[src/python/timescale/Timescale.py](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/python/timescale/Timescale.py)|Added file|This parses incoming messages into the timescale instance| +|[src/python/util/NamingConstants.py](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/python/util/NamingConstants.py)|Added file|If messages come through and do not currently have a mapped name, they this module will generate the mapped name.
Uses word\_list, type\_name\_map and hash\_table to keep synced and dynamically create mapped names.
| +|[src/www/app/utils/api.py](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/www/app/utils/api.py)|New functions:
- get\_between\_dates\_ts
- get\_luid\_ts
- get\_puid\_ts
|New functions pull from REST API to fill the web graph and web table data.| +|[src/www/app/main.py](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/www/app/main.py)|New functions:
- parse\_ts\_table\_data
- parses data in format for the web table
- parse\_ts\_data
- parses data for format for the graph
- get\_data
- actually flask endpoint to get data for web table
Modified functions:
- logical\_device\_form
- added ts data variable that passes to the form
- logical\_device\_form
- added ts data variable that passes to the form
|Required functions for providing time series data to the web app| +|[src/www/app/static/ts_graph.js](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/www/app/static/ts_graph.js)|Added file|This is a bit of a template file, both p\_uid and l\_uid use it.
Generates graph with time series data
| +|[src/www/app/templates/ts_graph.html](https://github.com/ZakhaevK/itc303-team3-broker/blob/master/src/www/app/templates/ts_graph.html)|Added file|This is a bit of a template file, both p\_uid and l\_uid use it.
This largely just passes on the time series data from flask to the graph.js file and handles the html side of things.
This page also references the ts\_table.js and handles displaying the graph.
The name should be refactored as it was created prior to the idea of a table being done.
| +|[src/www/app/static/ts_table.css](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/www/app/static/ts_table.css)|Added file|Purely css for the time series template.
99% is for the ts\_table.js however, a small part may be for the show graph/table button that is always shown.
| +|[src/www/app/static/ts_table.js](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/www/app/static/ts_table.js)|Added file|This handles the drawing and updating of the time series table.
It also uses js to insert the date pickers and buttons that the table uses.
| +|[src/www/app/templates/physical_device_form.html](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/www/app/templates/physical_device_form.html)|Added reference to ts\_graph.html template file|Added reference to the ts\_graph.js pages so time series can be used| +|[src/www/app/templates/logical_device_form.html](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/src/www/app/templates/logical_device_form.html)|Added reference to ts\_graph.html template file
|Added reference to the ts\_graph.js pages so time series can be used| +|[/load-data.sh](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/load-data.sh)|Added file|Useful script for adding and mapping some devices to test.
Creates user login
By default will create 10:10 p\_uid:l\_uid and map them 1:1.
| +|[/ts_backup.sh](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/ts_backup.sh)|Added file|Used for logical backup of the time series database| +|[/ts_restore.sh](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/ts_restore.sh)|Added file|Used to restore from a logical backup file| +|[/pgbr_backup.sh](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/pgbr_backup.sh)|- added file|Used for physical backup the time series database| +|[/pgbr_restore.sh](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/pgbr_restore.sh)|- added file|Used to restore from the physical backup files| +|[/pgbr_cleanup.sh](https://github.com/ZakhaevK/itc303-team3-broker/blob/merge_dpi/pgbr_cleanup.sh)|- added file|Used for wiping and recreating PGBR data.
Best used when following a logical restore.
Version | +Date | +Remarks | +Author | +
---|---|---|---|
0.1 | +31/03/23 | +Added to the Introduction, and documents section of the Master Test +Plan. These are near completion. | +Zak K | +
0.2 | +02/04/23 | +First draft of the Test Strategy written. +Executive summary began to be written. |
+Zak K | +
0.3 | +03/04/23 | +First draft of Test Plan written. +Lacking acceptance testing section at this time. |
+Zak K | +
0.5 | +04/04/23 | +Full draft document completed. +Risks may be added/changed, and tests expanded as the project +matures. |
+Zak K | +
0.6 | +07/04/23 | +Removed blue template text. | +Zak K | +
0.9 | +09/04/23 | +Made small adjusts and additions to Test objectives/levels | +Zak K | +
Project objective +The goal of this project can be summarised with the following +points: +
|
+
+Test approach +Unit testing will primarily be necessary to make sure the service can +extract the desired data points from the messages provided by RabbitMQ. +This could involve just an example message in the format to be used, to +allow the program to parse it. Could also include the other database +formats as well. +Integration Testing should focus on interaction with specific +functionality between Docker containers, this will include from API to +service, service to TSDB, web interface to API. +System Testing will monitor the full use cases and the data that +flows between different elements of the project. This includes message +parsing, and extraction of data points, and finally the storage within +the TSDB. Also includes the retrieval of data from the TSDB via an API +request to the service from the web interface. +Acceptance Testing will be similar to System testing, but will be +focused on getting feedback from the stakeholders to confirm that the +functionality is as they expected, or if there are changes/additions +that could be made to better fit their needs. |
+
+
Test objectives +
|
+