Skip to content

Commit

Permalink
Merge pull request #4 from BREAD5940/photon-result-timestamp
Browse files Browse the repository at this point in the history
  • Loading branch information
TheTripleV authored Mar 31, 2024
2 parents 0e4e6fa + 6fdcc1f commit 281997b
Show file tree
Hide file tree
Showing 41 changed files with 448 additions and 174 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import edu.wpi.first.networktables.NetworkTable;
import edu.wpi.first.networktables.NetworkTableEvent;
import edu.wpi.first.util.WPIUtilJNI;
import java.util.function.BooleanSupplier;
import java.util.function.Consumer;
import java.util.function.Supplier;
Expand All @@ -27,6 +28,7 @@
import org.photonvision.common.logging.LogGroup;
import org.photonvision.common.logging.Logger;
import org.photonvision.common.networktables.NTTopicSet;
import org.photonvision.common.util.math.MathUtils;
import org.photonvision.targeting.PhotonPipelineResult;
import org.photonvision.vision.pipeline.result.CVPipelineResult;
import org.photonvision.vision.target.TrackedTarget;
Expand All @@ -46,8 +48,6 @@ public class NTDataPublisher implements CVPipelineResultConsumer {
private final BooleanSupplier driverModeSupplier;
private final Consumer<Boolean> driverModeConsumer;

private long heartbeatCounter = 0;

public NTDataPublisher(
String cameraNickname,
Supplier<Integer> pipelineIndexSupplier,
Expand Down Expand Up @@ -129,9 +129,13 @@ public void updateCameraNickname(String newCameraNickname) {

@Override
public void accept(CVPipelineResult result) {
var now = WPIUtilJNI.now();
var captureMicros = MathUtils.nanosToMicros(result.getImageCaptureTimestampNanos());
var simplified =
new PhotonPipelineResult(
result.getLatencyMillis(),
result.sequenceID,
captureMicros,
now,
TrackedTarget.simpleFromTrackedTargets(result.targets),
result.multiTagResult);

Expand Down Expand Up @@ -190,7 +194,7 @@ public void accept(CVPipelineResult result) {
ts.cameraDistortionPublisher.accept(new double[] {});
}

ts.heartbeatPublisher.set(heartbeatCounter++);
ts.heartbeatPublisher.set(result.sequenceID);

// TODO...nt4... is this needed?
rootTable.getInstance().flush();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,10 @@ public static long microsToNanos(long micros) {
return micros * 1000;
}

public static long nanosToMicros(long micros) {
return micros / 1000;
}

public static double map(
double value, double in_min, double in_max, double out_min, double out_max) {
return (value - in_min) * (out_max - out_min) / (in_max - in_min) + out_min;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.photonvision.vision.opencv.Releasable;

public class Frame implements Releasable {
public final long sequenceID;
public final long timestampNanos;

// Frame should at _least_ contain the thresholded frame, and sometimes the color image
Expand All @@ -32,11 +33,13 @@ public class Frame implements Releasable {
public final FrameStaticProperties frameStaticProperties;

public Frame(
long sequenceID,
CVMat color,
CVMat processed,
FrameThresholdType type,
long timestampNanos,
FrameStaticProperties frameStaticProperties) {
this.sequenceID = sequenceID;
this.colorImage = color;
this.processedImage = processed;
this.type = type;
Expand All @@ -45,15 +48,17 @@ public Frame(
}

public Frame(
long sequenceID,
CVMat color,
CVMat processed,
FrameThresholdType processType,
FrameStaticProperties frameStaticProperties) {
this(color, processed, processType, MathUtils.wpiNanoTime(), frameStaticProperties);
this(sequenceID, color, processed, processType, MathUtils.wpiNanoTime(), frameStaticProperties);
}

public Frame() {
this(
-1,
new CVMat(),
new CVMat(),
FrameThresholdType.NONE,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,18 +21,20 @@
import org.photonvision.vision.opencv.ImageRotationMode;
import org.photonvision.vision.pipe.impl.HSVPipe;

public interface FrameProvider extends Supplier<Frame> {
String getName();
public abstract class FrameProvider implements Supplier<Frame> {
protected int sequenceID = 0;

public abstract String getName();

/** Ask the camera to produce a certain kind of processed image (e.g. HSV or greyscale) */
void requestFrameThresholdType(FrameThresholdType type);
public abstract void requestFrameThresholdType(FrameThresholdType type);

/** Ask the camera to rotate frames it outputs */
void requestFrameRotation(ImageRotationMode rotationMode);
public abstract void requestFrameRotation(ImageRotationMode rotationMode);

/** Ask the camera to provide either the input, output, or both frames. */
void requestFrameCopies(boolean copyInput, boolean copyOutput);
public abstract void requestFrameCopies(boolean copyInput, boolean copyOutput);

/** Ask the camera to rotate frames it outputs */
void requestHsvSettings(HSVPipe.HSVParams params);
public abstract void requestHsvSettings(HSVPipe.HSVParams params);
}
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.photonvision.vision.pipe.impl.HSVPipe;
import org.photonvision.vision.pipe.impl.RotateImagePipe;

public abstract class CpuImageProcessor implements FrameProvider {
public abstract class CpuImageProcessor extends FrameProvider {
protected static class CapturedFrame {
CVMat colorImage;
FrameStaticProperties staticProps;
Expand Down Expand Up @@ -86,13 +86,20 @@ public final Frame get() {
} else {
outputMat = new CVMat();
}

++sequenceID;
} else {
System.out.println("Input was empty!");
outputMat = new CVMat();
}

return new Frame(
input.colorImage, outputMat, m_processType, input.captureTimestamp, input.staticProps);
sequenceID,
input.colorImage,
outputMat,
m_processType,
input.captureTimestamp,
input.staticProps);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ public FileFrameProvider(Path path, double fov) {
@Override
public CapturedFrame getInputMat() {
var out = new CVMat();
out.copyTo(originalFrame);
out.copyFrom(originalFrame);

// block to keep FPS at a defined rate
if (System.currentTimeMillis() - lastGetMillis < millisDelay) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
import org.photonvision.vision.opencv.ImageRotationMode;
import org.photonvision.vision.pipe.impl.HSVPipe.HSVParams;

public class LibcameraGpuFrameProvider implements FrameProvider {
public class LibcameraGpuFrameProvider extends FrameProvider {
private final LibcameraGpuSettables settables;

static final Logger logger = new Logger(LibcameraGpuFrameProvider.class, LogGroup.Camera);
Expand Down Expand Up @@ -92,7 +92,11 @@ public Frame get() {

LibCameraJNI.releasePair(p_ptr);

// Know frame is good -- increment sequence
++sequenceID;

return new Frame(
sequenceID,
colorMat,
processedMat,
type,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,11 +36,11 @@ public CVMat() {
this(new Mat());
}

public void copyTo(CVMat srcMat) {
copyTo(srcMat.getMat());
public void copyFrom(CVMat srcMat) {
copyFrom(srcMat.getMat());
}

public void copyTo(Mat srcMat) {
public void copyFrom(Mat srcMat) {
srcMat.copyTo(mat);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ protected CVPipelineResult process(Frame frame, Calibration3dPipelineSettings se
Mat inputColorMat = frame.colorImage.getMat();

if (this.calibrating || inputColorMat.empty()) {
return new CVPipelineResult(0, 0, null, frame);
return new CVPipelineResult(frame.sequenceID, 0, 0, null, frame);
}

if (getSettings().inputImageRotationMode != ImageRotationMode.DEG_0) {
Expand All @@ -111,7 +111,7 @@ protected CVPipelineResult process(Frame frame, Calibration3dPipelineSettings se
logger.error(
"Input image rotation was non-zero! Calibration wasn't designed to deal with this. Attempting to manually change back to zero");
getSettings().inputImageRotationMode = ImageRotationMode.DEG_0;
return new CVPipelineResult(0, 0, List.of(), frame);
return new CVPipelineResult(frame.sequenceID, 0, 0, List.of(), frame);
}

long sumPipeNanosElapsed = 0L;
Expand Down Expand Up @@ -145,10 +145,15 @@ protected CVPipelineResult process(Frame frame, Calibration3dPipelineSettings se

// Return the drawn chessboard if corners are found, if not, then return the input image.
return new CalibrationPipelineResult(
frame.sequenceID,
sumPipeNanosElapsed,
fps, // Unused but here in case
new Frame(
new CVMat(), outputColorCVMat, FrameThresholdType.NONE, frame.frameStaticProperties),
frame.sequenceID,
new CVMat(),
outputColorCVMat,
FrameThresholdType.NONE,
frame.frameStaticProperties),
getCornersList());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ protected CVPipelineResult process(Frame frame, AprilTagPipelineSettings setting

if (frame.type != FrameThresholdType.GREYSCALE) {
// We asked for a GREYSCALE frame, but didn't get one -- best we can do is give up
return new CVPipelineResult(0, 0, List.of(), frame);
return new CVPipelineResult(frame.sequenceID, 0, 0, List.of(), frame);
}

CVPipeResult<List<AprilTagDetection>> tagDetectionPipeResult;
Expand Down Expand Up @@ -219,7 +219,8 @@ protected CVPipelineResult process(Frame frame, AprilTagPipelineSettings setting
var fpsResult = calculateFPSPipe.run(null);
var fps = fpsResult.output;

return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, multiTagResult, frame);
return new CVPipelineResult(
frame.sequenceID, sumPipeNanosElapsed, fps, targetList, multiTagResult, frame);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ protected CVPipelineResult process(Frame frame, ArucoPipelineSettings settings)

if (frame.type != FrameThresholdType.GREYSCALE) {
// We asked for a GREYSCALE frame, but didn't get one -- best we can do is give up
return new CVPipelineResult(0, 0, List.of(), frame);
return new CVPipelineResult(frame.sequenceID, 0, 0, List.of(), frame);
}

CVPipeResult<List<ArucoDetectionResult>> tagDetectionPipeResult;
Expand Down Expand Up @@ -236,7 +236,8 @@ protected CVPipelineResult process(Frame frame, ArucoPipelineSettings settings)
var fpsResult = calculateFPSPipe.run(null);
var fps = fpsResult.output;

return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, multiTagResult, frame);
return new CVPipelineResult(
frame.sequenceID, sumPipeNanosElapsed, fps, targetList, multiTagResult, frame);
}

private void drawThresholdFrame(Mat greyMat, Mat outputMat, int windowSize, double constant) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -228,6 +228,6 @@ protected CVPipelineResult process(Frame frame, ColoredShapePipelineSettings set
var fpsResult = calculateFPSPipe.run(null);
var fps = fpsResult.output;

return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame);
return new CVPipelineResult(frame.sequenceID, sumPipeNanosElapsed, fps, targetList, frame);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -84,9 +84,15 @@ public DriverModePipelineResult process(Frame frame, DriverModePipelineSettings

// Flip-flop input and output in the Frame
return new DriverModePipelineResult(
frame.sequenceID,
MathUtils.nanosToMillis(totalNanos),
fps,
new Frame(frame.processedImage, frame.colorImage, frame.type, frame.frameStaticProperties));
new Frame(
frame.sequenceID,
frame.processedImage,
frame.colorImage,
frame.type,
frame.frameStaticProperties));
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,18 +92,18 @@ protected void setPipeParamsImpl() {
}

@Override
protected CVPipelineResult process(Frame input_frame, ObjectDetectionPipelineSettings settings) {
protected CVPipelineResult process(Frame frame, ObjectDetectionPipelineSettings settings) {
long sumPipeNanosElapsed = 0;

// ***************** change based on backend ***********************

CVPipeResult<List<NeuralNetworkPipeResult>> rknnResult = rknnPipe.run(input_frame.colorImage);
CVPipeResult<List<NeuralNetworkPipeResult>> rknnResult = rknnPipe.run(frame.colorImage);
sumPipeNanosElapsed += rknnResult.nanosElapsed;
List<NeuralNetworkPipeResult> targetList;

var names = rknnPipe.getClassNames();

input_frame.colorImage.getMat().copyTo(input_frame.processedImage.getMat());
frame.colorImage.getMat().copyTo(frame.processedImage.getMat());

// ***************** change based on backend ***********************

Expand All @@ -125,7 +125,7 @@ protected CVPipelineResult process(Frame input_frame, ObjectDetectionPipelineSet
var fps = fpsResult.output;

return new CVPipelineResult(
sumPipeNanosElapsed, fps, collect2dTargetsResult.output, input_frame, names);
frame.sequenceID, sumPipeNanosElapsed, fps, collect2dTargetsResult.output, frame, names);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,7 @@ public CVPipelineResult process(
var fps = fpsResult.output;

return new CVPipelineResult(
inputAndOutputFrame.sequenceID,
sumPipeNanosElapsed,
fps, // Unused but here just in case
targetsToDraw,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,6 @@ public CVPipelineResult process(Frame frame, ReflectivePipelineSettings settings

PipelineProfiler.printReflectiveProfile(pipeProfileNanos);

return new CVPipelineResult(sumPipeNanosElapsed, fps, targetList, frame);
return new CVPipelineResult(frame.sequenceID, sumPipeNanosElapsed, fps, targetList, frame);
}
}
Loading

0 comments on commit 281997b

Please sign in to comment.