Skip to content

Commit

Permalink
updating py_data_acq and flake
Browse files Browse the repository at this point in the history
  • Loading branch information
RCMast3r committed Dec 30, 2023
1 parent f31af10 commit 379d3a7
Show file tree
Hide file tree
Showing 9 changed files with 112 additions and 67 deletions.
14 changes: 7 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
usage:

TODO:

- [ ] write the data storage script for saving the received CAN messages locally in the mcap
- [ ] get nix-proto working with dbc input from url for creation of python lib
- [ ] get py_data_acq working in dev shell with nix-proto generated python lib for proto msg packing
- [ ] make the deserialization task for unpacking received data from CAN.
- [ ] write test script for creating a cantools constructed hytech CAN msg and sends it over a virtual CAN line
- [ ] make the deserialization task for unpacking received data from CAN in the data acq service script.
- [ ] make ability to start / stop / control in general the data recording via grpc calls for the mcap writer task
- [ ] make user script / interface for the grpc calls for ease of interaction with the service
- [ ] actually get current data from car into protobuf encoded CAN messages in an integration test
- [x] get nix-proto working with dbc input from url for creation of python lib
- [x] get py_data_acq working in dev shell with nix-proto generated python lib for proto msg packing
- [x] make service script that creates an instance of the mcap writer and the foxglove websocket
- [x] come up with a good way of associating the dbc file with the protobuf file

Expand All @@ -16,8 +18,6 @@ TODO:

- I know that I will be using cantools to create the DBC file so I might as well extend that creation script to create the proto at the same time. Additionally, I know that I will be using tim's auto-magic nix-proto for creation of the python auto-gen code.

- [ ] actually get current data from car into protobuf encoded CAN messages and send them from current TCU / SAB
- [ ] get the raspberry pi listening to CAN messages

## automation goals
- [x] dbc and proto file generation using CI
Expand Down
44 changes: 0 additions & 44 deletions broadcast.py

This file was deleted.

22 changes: 19 additions & 3 deletions flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -64,14 +64,30 @@
devShells.x86_64-linux.default = pkgs.mkShell rec {
# Update the name to something that suites your project.
name = "nix-devshell";
packages = with pkgs; [ py_data_acq_pkg py_dbc_proto_gen_pkg proto_gen_pkg cmake ];
packages = with pkgs; [ jq py_data_acq_pkg py_dbc_proto_gen_pkg proto_gen_pkg cmake ];
# Setting up the environment variables you need during
# development.
shellHook = let icon = "f121";
in ''
path=${pkgs.proto_gen_pkg}
path+="/bin"
export BIN_PATH=$path
bin_path=path+"/bin"
dbc_path=path+"/dbc"
export BIN_PATH=$bin_path
export DBC_PATH=$dbc_path
PYTHON_INTERPRETER_PATH=$(which python)
# Path to the settings.json file in your VSCode workspace
SETTINGS_JSON_FILE=".vscode/settings.json"
# Check if the settings.json file exists, if not, create it
if [ ! -f "$SETTINGS_JSON_FILE" ]; then
mkdir -p "$(dirname "$SETTINGS_JSON_FILE")"
echo "{}" > "$SETTINGS_JSON_FILE"
fi
jq --arg pythonPath "$PYTHON_INTERPRETER_PATH" '. + { "python.pythonPath": $pythonPath }' "$SETTINGS_JSON_FILE" > "$SETTINGS_JSON_FILE.tmp" && mv "$SETTINGS_JSON_FILE.tmp" "$SETTINGS_JSON_FILE"
export PS1="$(echo -e '\u${icon}') {\[$(tput sgr0)\]\[\033[38;5;228m\]\w\[$(tput sgr0)\]\[\033[38;5;15m\]} (${name}) \\$ \[$(tput sgr0)\]"
'';
};
Expand Down
65 changes: 65 additions & 0 deletions py_data_acq/broadcast-test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
#!/usr/bin/env python
import socket
import time

from hytech_np_proto_py import hytech_pb2

# Define the IP and port for the UDP socket
UDP_IP = "127.0.0.1"
UDP_PORT = 12345


def main():
# Create an instance of your message
my_message = hytech_pb2.id_dashboard_status()
my_message.start_button = True
my_message.buzzer_active = False
my_message.ssok_above_threshold = True
my_message.shutdown_h_above_threshold = True
my_message.mark_button = True
my_message.mode_button = True
my_message.motor_controller_cycle_button = True
my_message.launch_ctrl_button_ = True
my_message.torque_mode_button = True
my_message.led_dimmer_button = True

my_message.dial_state = "yo"
my_message.ams_led = "yo"
my_message.imd_led = "yo"
my_message.mode_led = "yo"
my_message.motor_controller_error_led = "yo"
my_message.start_status_led = "yo"
my_message.inertia_status_led = "yo"
my_message.mechanical_brake_led = "yo"
my_message.gen_purp_led = "yo"
my_message.bots_led = "yo"
my_message.cockpit_brb_led = "yo"
my_message.crit_charge_led = "yo"
my_message.glv_led = "yo"
my_message.launch_control_led = "yo"

# Serialize the message to bytes
serialized_message = my_message.SerializeToString()

# Create a UDP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)

while 1:
time.sleep(0.2)
try:
# Send the serialized message over the UDP socket
serialized_message = my_message.SerializeToString()
sock.sendto(serialized_message, (UDP_IP, UDP_PORT))
print(f"Message sent to {UDP_IP}:{UDP_PORT}")
except KeyboardInterrupt:
# Handle Ctrl+C to exit the loop gracefully
sock.close()
break
except Exception as e:
print(f"Error sending message: {e}")
# finally:
# sock.close()


if __name__ == "__main__":
main()
Empty file.
4 changes: 4 additions & 0 deletions py_data_acq/py_data_acq/common_types/common.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
class QueueData():
def __init__(self, schema_name: str, data: bytes):
self.name = schema_name
self.data = data
19 changes: 11 additions & 8 deletions py_data_acq/py_data_acq/foxglove_live/foxglove_ws.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
import asyncio

from py_data_acq.common_types.common import QueueData
from typing import Any

from foxglove_websocket import run_cancellable
Expand All @@ -12,33 +14,34 @@
# what I want to do with this class is extend the foxglove server to make it where it creates a protobuf schema
# based foxglove server that serves data from an asyncio queue.
class HTProtobufFoxgloveServer(FoxgloveServer):
def __init__(self, host: str, port: int, name: str, pb_bin_file_path: str):
def __init__(self, host: str, port: int, name: str, pb_bin_file_path: str, schema_names: list[str]):
super().__init__(host, port, name)
self.path = pb_bin_file_path

self.schema_names = schema_names
self.schema = standard_b64encode(open(pb_bin_file_path, "rb").read()).decode("ascii")

# this is run when we use this in a with statement for context management
async def __aenter__(self):
await super().__aenter__()
self.chan_id = await super().add_channel(
# TODO add channels for all of the msgs that are in the protobuf schema
for name in self.schema_names:
self.chan_id_dict[name] = await super().add_channel(
{
"topic": "car data",
"topic": name +"_data",
"encoding": "protobuf",
"schemaName": "ht_data",
"schemaName": name,
"schema": self.schema,
}
)

return self

async def __aexit__(self, exc_type: Any, exc_val: Any, traceback: Any):
return await super().__aexit__(exc_type, exc_val, traceback)

async def send_msgs_from_queue(self, queue):
async def send_msgs_from_queue(self, queue: asyncio.Queue[QueueData]):
try:
data = await queue.get()
if data is not None:
await super().send_message(self.chan_id, time.time_ns(), data)
await super().send_message(self.chan_id_dict[data.name], time.time_ns(), data.data)
except asyncio.CancelledError:
pass
2 changes: 1 addition & 1 deletion py_data_acq/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,5 +5,5 @@
name="py_data_acq",
version="1.0",
packages=find_packages(),
scripts=['test.py']
scripts=['test.py', 'broadcast-test.py']
)
9 changes: 5 additions & 4 deletions py_data_acq/test.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ async def write_data_to_mcap(queue, mcap_writer):
while True:
await mcw.write_data(queue)

async def consume_data(queue, foxglove_server):
async def fxglv_websocket_consume_data(queue, foxglove_server):
async with foxglove_server as fz:
while True:
await fz.send_msgs_from_queue(queue)
Expand All @@ -50,15 +50,16 @@ async def main():

receiver_task = asyncio.create_task(continuous_udp_receiver(queue, queue2))

fx_task = asyncio.create_task(consume_data(queue, fx_s))
fx_task = asyncio.create_task(fxglv_websocket_consume_data(queue, fx_s))

# in the mcap task I actually have to deserialize the any protobuf msg into the message ID and
# the encoded message for the message id. I will need to handle the same association of message id
# and schema in the foxglove websocket server.
mcap_task = asyncio.create_task(write_data_to_mcap(queue, mcap_writer))
# mcap_task = asyncio.create_task(write_data_to_mcap(queue, mcap_writer))

# TODO the data consuming MCAP file task for writing MCAP files to specific directory
await asyncio.gather(receiver_task, fx_task, mcap_task)
await asyncio.gather(receiver_task, fx_task)
# await asyncio.gather(receiver_task, fx_task, mcap_task)
# await asyncio.gather(receiver_task, mcap_task)
if __name__ == "__main__":
asyncio.run(main())

0 comments on commit 379d3a7

Please sign in to comment.