Skip to content

Commit

Permalink
encapsulate exposure
Browse files Browse the repository at this point in the history
  • Loading branch information
deanlee committed Aug 3, 2024
1 parent cafca4f commit ff7a67a
Show file tree
Hide file tree
Showing 6 changed files with 79 additions and 66 deletions.
7 changes: 2 additions & 5 deletions system/camerad/cameras/camera_common.cc
Original file line number Diff line number Diff line change
Expand Up @@ -149,10 +149,7 @@ void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &fr
framed.setMeasuredGreyFraction(frame_data.measured_grey_fraction);
framed.setTargetGreyFraction(frame_data.target_grey_fraction);
framed.setProcessingTime(frame_data.processing_time);

const float ev = c->cur_ev[frame_data.frame_id % 3];
const float perc = util::map_val(ev, c->ci->min_ev, c->ci->max_ev, 0.0f, 100.0f);
framed.setExposureValPercent(perc);
framed.setExposureValPercent(frame_data.exposure_val_percent);
framed.setSensor(c->ci->image_sensor);
}

Expand Down Expand Up @@ -256,7 +253,7 @@ void publish_thumbnail(PubMaster *pm, const CameraBuf *b) {
pm->send("thumbnail", msg);
}

float set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip) {
float CameraExposure::set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip) {
int lum_med;
uint32_t lum_binning[256] = {0};
const uint8_t *pix_ptr = b->cur_yuv_buf->y;
Expand Down
2 changes: 1 addition & 1 deletion system/camerad/cameras/camera_common.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ typedef struct FrameMetadata {
float gain;
float measured_grey_fraction;
float target_grey_fraction;
float exposure_val_percent;

float processing_time;
} FrameMetadata;
Expand Down Expand Up @@ -70,7 +71,6 @@ class CameraBuf {

void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &frame_data, CameraState *c);
kj::Array<uint8_t> get_raw_frame_image(const CameraBuf *b);
float set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip);
void publish_thumbnail(PubMaster *pm, const CameraBuf *b);
void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_id, cl_context ctx);
void cameras_open(MultiCameraState *s);
Expand Down
37 changes: 37 additions & 0 deletions system/camerad/cameras/camera_exposure.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
#pragma once

#include <mutex>
#include "common/params.h"
#include "common/util.h"
#include "system/camerad/sensors/sensor.h"

class CameraExposure {
public:
CameraExposure(int camera_num, const SensorInfo *sensor_info, int rgb_width, int rgb_height, float focal_len);
void updateFrameMetaData(FrameMetadata &meta_data);
std::vector<i2c_random_wr_payload> getExposureRegisters(const CameraBuf &buf, int x_skip, int y_skip);
static float set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip);

private:
void updateScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain);

std::mutex exp_lock;
const SensorInfo *ci = nullptr;

int exposure_time = 5;
bool dc_gain_enabled = false;
int dc_gain_weight = 0;
int gain_idx = 0;
float analog_gain_frac = 0;

float cur_ev[3] = {};
float best_ev_score = 0;
int new_exp_g = 0;
int new_exp_t = 0;

Rect ae_xywh = {};
float measured_grey_fraction = 0;
float target_grey_fraction = 0.3;
// for debugging
Params params;
};
66 changes: 34 additions & 32 deletions system/camerad/cameras/camera_qcom2.cc
Original file line number Diff line number Diff line change
Expand Up @@ -403,9 +403,14 @@ void CameraState::enqueue_req_multi(uint64_t start, int n, bool dp) {
}
}

// ******************* camera *******************
CameraExposure::CameraExposure(int camera_num, const SensorInfo *sensor_info, int rgb_width, int rgb_height, float focal_len)
: ci(sensor_info) {

void CameraState::set_exposure_rect() {
dc_gain_weight = ci->dc_gain_min_weight;
gain_idx = ci->analog_gain_rec_idx;
cur_ev[0] = cur_ev[1] = cur_ev[2] = (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight) * ci->sensor_analog_gains[gain_idx] * exposure_time;

float fl_pix = focal_len / ci->pixel_size_mm;
// set areas for each camera, shouldn't be changed
std::vector<std::pair<Rect, float>> ae_targets = {
// (Rect, F)
Expand All @@ -427,19 +432,13 @@ void CameraState::set_exposure_rect() {
float fl_ref = ae_target.second;

ae_xywh = (Rect){
std::max(0, buf.rgb_width / 2 - (int)(fl_pix / fl_ref * xywh_ref.w / 2)),
std::max(0, buf.rgb_height / 2 - (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))),
std::min((int)(fl_pix / fl_ref * xywh_ref.w), buf.rgb_width / 2 + (int)(fl_pix / fl_ref * xywh_ref.w / 2)),
std::min((int)(fl_pix / fl_ref * xywh_ref.h), buf.rgb_height / 2 + (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y)))
std::max(0, rgb_width / 2 - (int)(fl_pix / fl_ref * xywh_ref.w / 2)),
std::max(0, rgb_height / 2 - (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))),
std::min((int)(fl_pix / fl_ref * xywh_ref.w), rgb_width / 2 + (int)(fl_pix / fl_ref * xywh_ref.w / 2)),
std::min((int)(fl_pix / fl_ref * xywh_ref.h), rgb_height / 2 + (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y)))
};
}

void CameraState::sensor_set_parameters() {
dc_gain_weight = ci->dc_gain_min_weight;
gain_idx = ci->analog_gain_rec_idx;
cur_ev[0] = cur_ev[1] = cur_ev[2] = (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight) * ci->sensor_analog_gains[gain_idx] * exposure_time;
}

void CameraState::camera_map_bufs() {
for (int i = 0; i < FRAME_BUF_COUNT; i++) {
// configure ISP to put the image in place
Expand All @@ -462,8 +461,7 @@ void CameraState::camera_init(VisionIpcServer * v, cl_device_id device_id, cl_co
buf.init(device_id, ctx, this, v, FRAME_BUF_COUNT, stream_type);
camera_map_bufs();

fl_pix = focal_len / ci->pixel_size_mm;
set_exposure_rect();
exposure = std::make_unique<CameraExposure>(camera_num, ci.get(), buf.rgb_width, buf.rgb_height, focal_len);
}

void CameraState::camera_open() {
Expand Down Expand Up @@ -491,9 +489,6 @@ bool CameraState::openSensor() {
auto init_sensor_lambda = [this](SensorInfo *sensor) {
ci.reset(sensor);
int ret = sensors_init();
if (ret == 0) {
sensor_set_parameters();
}
return ret == 0;
};

Expand Down Expand Up @@ -811,13 +806,8 @@ void CameraState::handle_camera_event(void *evdat) {
meta_data.frame_id = main_id - idx_offset;
meta_data.request_id = real_id;
meta_data.timestamp_sof = timestamp;
exp_lock.lock();
meta_data.gain = analog_gain_frac * (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight);
meta_data.high_conversion_gain = dc_gain_enabled;
meta_data.integ_lines = exposure_time;
meta_data.measured_grey_fraction = measured_grey_fraction;
meta_data.target_grey_fraction = target_grey_fraction;
exp_lock.unlock();

exposure->updateFrameMetaData(meta_data);

// dispatch
enqueue_req_multi(real_id + FRAME_BUF_COUNT, 1, 1);
Expand All @@ -832,7 +822,7 @@ void CameraState::handle_camera_event(void *evdat) {
}
}

void CameraState::update_exposure_score(float desired_ev, int exp_t, int exp_g_idx, float exp_gain) {
void CameraExposure::updateScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain) {
float score = ci->getExposureScore(desired_ev, exp_t, exp_g_idx, exp_gain, gain_idx);
if (score < best_ev_score) {
new_exp_t = exp_t;
Expand All @@ -841,8 +831,17 @@ void CameraState::update_exposure_score(float desired_ev, int exp_t, int exp_g_i
}
}

void CameraState::set_camera_exposure(float grey_frac) {
if (!enabled) return;
void CameraExposure::updateFrameMetaData(FrameMetadata &meta_data) {
std::lock_guard lk(exp_lock);
meta_data.gain = analog_gain_frac * (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight);
meta_data.high_conversion_gain = dc_gain_enabled;
meta_data.integ_lines = exposure_time;
meta_data.measured_grey_fraction = measured_grey_fraction;
meta_data.target_grey_fraction = target_grey_fraction;
meta_data.exposure_val_percent = util::map_val(cur_ev[meta_data.frame_id % 3], ci->min_ev, ci->max_ev, 0.0f, 100.0f);
}

std::vector<i2c_random_wr_payload> CameraExposure::getExposureRegisters(const CameraBuf &buf, int x_skip, int y_skip) {
const float dt = 0.05;

const float ts_grey = 10.0;
Expand All @@ -856,6 +855,7 @@ void CameraState::set_camera_exposure(float grey_frac) {
// Therefore we use the target EV from 3 frames ago, the grey fraction that was just measured was the result of that control action.
// TODO: Lower latency to 2 frames, by using the histogram outputted by the sensor we can do AE before the debayering is complete

float grey_frac = set_exposure_target(&buf, ae_xywh, x_skip, y_skip);
const float cur_ev_ = cur_ev[buf.cur_frame_data.frame_id % 3];

// Scale target grey between 0.1 and 0.4 depending on lighting conditions
Expand Down Expand Up @@ -912,7 +912,7 @@ void CameraState::set_camera_exposure(float grey_frac) {
continue;
}

update_exposure_score(desired_ev, t, g, gain);
updateScore(desired_ev, t, g, gain);
}
}

Expand All @@ -939,8 +939,7 @@ void CameraState::set_camera_exposure(float grey_frac) {
}
// LOGE("ae - camera %d, cur_t %.5f, sof %.5f, dt %.5f", camera_num, 1e-9 * nanos_since_boot(), 1e-9 * buf.cur_frame_data.timestamp_sof, 1e-9 * (nanos_since_boot() - buf.cur_frame_data.timestamp_sof));

auto exp_reg_array = ci->getExposureRegisters(exposure_time, new_exp_g, dc_gain_enabled);
sensors_i2c(exp_reg_array.data(), exp_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, ci->data_word);
return ci->getExposureRegisters(exposure_time, new_exp_g, dc_gain_enabled);
}

void CameraState::run() {
Expand All @@ -963,9 +962,12 @@ void CameraState::run() {
LOGT(buf.cur_frame_data.frame_id, "%s: Image set", publish_name);
}

// Process camera registers and set camera exposure
// Process camera registers
ci->processRegisters(this, framed);
set_camera_exposure(set_exposure_target(&buf, ae_xywh, 2, stream_type != VISION_STREAM_DRIVER ? 2 : 4));

// Set camera exposure
auto exp_reg_array = exposure->getExposureRegisters(buf, 2, stream_type != VISION_STREAM_DRIVER ? 2 : 4);
sensors_i2c(exp_reg_array.data(), exp_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, ci->data_word);

// Send the message
multi_cam_state->pm->send(publish_name, msg);
Expand Down
29 changes: 3 additions & 26 deletions system/camerad/cameras/camera_qcom2.h
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
#include <utility>

#include "system/camerad/cameras/camera_common.h"
#include "system/camerad/cameras/camera_exposure.h"
#include "system/camerad/cameras/camera_util.h"
#include "system/camerad/sensors/sensor.h"
#include "common/params.h"
#include "common/util.h"

#define FRAME_BUF_COUNT 4
Expand Down Expand Up @@ -51,45 +51,25 @@ class CameraState {
public:
MultiCameraState *multi_cam_state = nullptr;
std::unique_ptr<const SensorInfo> ci;
std::unique_ptr<CameraExposure> exposure;

bool enabled = true;
VisionStreamType stream_type;
const char *publish_name = nullptr;
cereal::FrameData::Builder (cereal::Event::Builder::*init_camera_state)() = nullptr;
float focal_len = 0;

std::mutex exp_lock;

int exposure_time = 5;
bool dc_gain_enabled = false;
int dc_gain_weight = 0;
int gain_idx = 0;
float analog_gain_frac = 0;

float cur_ev[3] = {};
float best_ev_score = 0;
int new_exp_g = 0;
int new_exp_t = 0;

Rect ae_xywh = {};
float measured_grey_fraction = 0;
float target_grey_fraction = 0.3;

unique_fd sensor_fd;
unique_fd csiphy_fd;

int camera_num = 0;
float fl_pix = 0;

CameraState(MultiCameraState *multi_camera_state, const CameraConfig &config);
void handle_camera_event(void *evdat);
void update_exposure_score(float desired_ev, int exp_t, int exp_g_idx, float exp_gain);
void set_camera_exposure(float grey_frac);

void sensors_start();

void camera_open();
void set_exposure_rect();
void sensor_set_parameters();
void camera_map_bufs();
void camera_init(VisionIpcServer *v, cl_device_id device_id, cl_context ctx);
void camera_close();
Expand Down Expand Up @@ -128,9 +108,6 @@ class CameraState {
void configISP();
void configCSIPHY();
void linkDevices();

// for debugging
Params params;
};

class MultiCameraState {
Expand Down
4 changes: 2 additions & 2 deletions system/camerad/test/test_ae_gray.cc
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
#include <cstring>

#include "common/util.h"
#include "system/camerad/cameras/camera_common.h"
#include "system/camerad/cameras/camera_exposure.h"

#define W 240
#define H 160
Expand Down Expand Up @@ -61,7 +61,7 @@ TEST_CASE("camera.test_set_exposure_target") {
memset(&fb_y[h_0*W+h_1*W], l[2], h_2*W);
memset(&fb_y[h_0*W+h_1*W+h_2*W], l[3], h_3*W);
memset(&fb_y[h_0*W+h_1*W+h_2*W+h_3*W], l[4], h_4*W);
float ev = set_exposure_target((const CameraBuf*) &cb, rect, 1, 1);
float ev = CameraExposure::set_exposure_target((const CameraBuf*) &cb, rect, 1, 1);
// printf("%d/%d/%d/%d/%d ev is %f\n", h_0, h_1, h_2, h_3, h_4, ev);
// printf("%f\n", ev);

Expand Down

0 comments on commit ff7a67a

Please sign in to comment.