Skip to content

Commit

Permalink
move image_inference output to simple log, since it generates labelle…
Browse files Browse the repository at this point in the history
…d images itself.
  • Loading branch information
anarkiwi committed Nov 22, 2023
1 parent e6a0c76 commit 1bbd631
Show file tree
Hide file tree
Showing 10 changed files with 66 additions and 136 deletions.
2 changes: 1 addition & 1 deletion docker/Dockerfile.base
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
libvulkan-dev \
python3-numpy
WORKDIR /root
RUN git clone https://github.com/iqtlabs/gr-iqtlabs -b 1.0.46
RUN git clone https://github.com/iqtlabs/gr-iqtlabs -b 1.0.48
COPY --from=iqtlabs/gamutrf-vkfft:latest /root /root/gr-iqtlabs
WORKDIR /root/gr-iqtlabs/build
COPY --from=sigmf-builder /usr/local /usr/local
Expand Down
2 changes: 1 addition & 1 deletion docs/README-airt.md
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ install gr-iqtlabs
$ git clone https://github.com/google/flatbuffers -b v23.5.26
$ git clone https://github.com/nlohmann/json -b v3.11.2
$ git clone https://github.com/deepsig/libsigmf -b v1.0.2
$ git clone https://github.com/iqtlabs/gr-iqtlabs -b 1.0.46
$ git clone https://github.com/iqtlabs/gr-iqtlabs -b 1.0.48
$ mkdir -p flatbuffers/build && cd flatbuffers/build && cmake -DCMAKE_INSTALL_PREFIX=~/.conda/envs/$CONDA_DEFAULT_ENV .. && make -j $(nproc) && make install && cd ../..
$ mkdir -p json/build && cd json/build && cmake -DCMAKE_INSTALL_PREFIX=~/.conda/envs/$CONDA_DEFAULT_ENV .. && make -j $(nproc) && make install && cd ../..
$ mkdir -p libsigmf/build && cd libsigmf/build && cmake -DUSE_SYSTEM_JSON=ON -DUSE_SYSTEM_FLATBUFFERS=ON -DCMAKE_INSTALL_PREFIX=~/.conda/envs/$CONDA_DEFAULT_ENV -DCMAKE_CXX_FLAGS="-I $HOME/.conda/envs/$CONDA_DEFAULT_ENV/include" .. && make -j $(nproc) && make install && cd ../..
Expand Down
File renamed without changes.
51 changes: 51 additions & 0 deletions gamutrf/grinference2mqtt.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import json
import sys
import numpy as np

try:
from gnuradio import gr # pytype: disable=import-error
except ModuleNotFoundError as err: # pragma: no cover
print(
"Run from outside a supported environment, please run via Docker (https://github.com/IQTLabs/gamutRF#readme): %s"
% err
)
sys.exit(1)


DELIM = "\n\n"


class inference2mqtt(gr.sync_block):
def __init__(
self,
):
self.yaml_buffer = ""

gr.sync_block.__init__(
self,
name="inference2mqtt",
in_sig=[np.ubyte],
out_sig=None,
)

def work(self, input_items, output_items):
n = 0
for input_item in input_items:
raw_input_item = input_item.tobytes().decode("utf8")
n += len(raw_input_item)
self.yaml_buffer += raw_input_item
while True:
delim_pos = self.yaml_buffer.find(DELIM)
if delim_pos == -1:
break
raw_item = self.yaml_buffer[:delim_pos]
item = json.loads(raw_item)
self.yaml_buffer = self.yaml_buffer[delim_pos + len(DELIM) :]
self.process_item(item)
return n

Check warning on line 47 in gamutrf/grinference2mqtt.py

View check run for this annotation

Codecov / codecov/patch

gamutrf/grinference2mqtt.py#L34-L47

Added lines #L34 - L47 were not covered by tests

def process_item(self, item):
print(item)
return

Check warning on line 51 in gamutrf/grinference2mqtt.py

View check run for this annotation

Codecov / codecov/patch

gamutrf/grinference2mqtt.py#L50-L51

Added lines #L50 - L51 were not covered by tests
13 changes: 4 additions & 9 deletions gamutrf/grscan.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
sys.exit(1)

from gamutrf.grsource import get_source
from gamutrf.gryolo import yolo_bbox
from gamutrf.grinference2mqtt import inference2mqtt
from gamutrf.utils import endianstr


Expand Down Expand Up @@ -189,16 +189,14 @@ def __init__(
image_vlen = np.prod(image_shape)
prediction_shape = (1, 8, 8400)
prediction_vlen = np.prod(prediction_shape)
image_dir = Path(inference_output_dir, "images")
Path(inference_output_dir).mkdir(parents=True, exist_ok=True)
image_dir.mkdir(parents=True, exist_ok=True)
self.inference_blocks = [
self.iqtlabs.image_inference(
tag="rx_freq",
vlen=nfft,
x=x,
y=y,
image_dir=str(image_dir),
image_dir=inference_output_dir,
convert_alpha=255,
norm_alpha=0,
norm_beta=1,
Expand All @@ -209,12 +207,9 @@ def __init__(
min_peak_points=inference_min_db,
model_server=inference_model_server,
model_name=inference_model_name,
confidence=inference_min_confidence,
),
yolo_bbox(
str(Path(inference_output_dir, "predictions")),
inference_min_confidence,
inference_nms_threshold,
),
inference2mqtt(),
]

if pretune:
Expand Down
2 changes: 1 addition & 1 deletion gamutrf/grsource.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,5 +78,5 @@ def get_source(
)
if sdr == "SoapyAIRT":
workaround_start_hook = airt_workaround_start_hook

sources[0].set_thread_priority(99)
return sources, cmd_port, workaround_start_hook
122 changes: 0 additions & 122 deletions gamutrf/gryolo.py

This file was deleted.

6 changes: 6 additions & 0 deletions tests/test_grscan.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ def set_time_now(self, time_spec, _mb):
def get_time_now(self):
return self.time_spec

def set_thread_priority(self, _priority):
return


class FakeUHD:
def __init__(self):
Expand All @@ -63,6 +66,9 @@ def set_sample_rate(self, _channel, _sample_rate):
def set_bandwidth(self, _channel, _bw):
return

def set_thread_priority(self, _priority):
return


class FakeSoapy:
def __init__(self):
Expand Down
2 changes: 1 addition & 1 deletion torchserve-cuda.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ services:
ports:
- '8080:8080'
volumes:
- '${VOL_PREFIX}:/model_store'
- '${VOL_PREFIX}/model_store:/model_store'
deploy:
resources:
reservations:
Expand Down
2 changes: 1 addition & 1 deletion torchserve.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,6 @@ services:
ports:
- '8080:8080'
volumes:
- '${VOL_PREFIX}:/model_store'
- '${VOL_PREFIX}/model_store:/model_store'
command:
- --models mini2_snr=mini2_snr.mar

0 comments on commit 1bbd631

Please sign in to comment.