From ff7a67abc1c0b507538233d6b00a167995bb2c45 Mon Sep 17 00:00:00 2001 From: deanlee Date: Sat, 3 Aug 2024 19:30:04 +0800 Subject: [PATCH] encapsulate exposure --- system/camerad/cameras/camera_common.cc | 7 +-- system/camerad/cameras/camera_common.h | 2 +- system/camerad/cameras/camera_exposure.h | 37 +++++++++++++ system/camerad/cameras/camera_qcom2.cc | 66 ++++++++++++------------ system/camerad/cameras/camera_qcom2.h | 29 ++--------- system/camerad/test/test_ae_gray.cc | 4 +- 6 files changed, 79 insertions(+), 66 deletions(-) create mode 100644 system/camerad/cameras/camera_exposure.h diff --git a/system/camerad/cameras/camera_common.cc b/system/camerad/cameras/camera_common.cc index 6aec03eae195ac..620fd5d5961256 100644 --- a/system/camerad/cameras/camera_common.cc +++ b/system/camerad/cameras/camera_common.cc @@ -149,10 +149,7 @@ void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &fr framed.setMeasuredGreyFraction(frame_data.measured_grey_fraction); framed.setTargetGreyFraction(frame_data.target_grey_fraction); framed.setProcessingTime(frame_data.processing_time); - - const float ev = c->cur_ev[frame_data.frame_id % 3]; - const float perc = util::map_val(ev, c->ci->min_ev, c->ci->max_ev, 0.0f, 100.0f); - framed.setExposureValPercent(perc); + framed.setExposureValPercent(frame_data.exposure_val_percent); framed.setSensor(c->ci->image_sensor); } @@ -256,7 +253,7 @@ void publish_thumbnail(PubMaster *pm, const CameraBuf *b) { pm->send("thumbnail", msg); } -float set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip) { +float CameraExposure::set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip) { int lum_med; uint32_t lum_binning[256] = {0}; const uint8_t *pix_ptr = b->cur_yuv_buf->y; diff --git a/system/camerad/cameras/camera_common.h b/system/camerad/cameras/camera_common.h index d58f4d3195fdec..c92cce3318a6d5 100644 --- a/system/camerad/cameras/camera_common.h +++ b/system/camerad/cameras/camera_common.h @@ -35,6 +35,7 @@ typedef struct FrameMetadata { float gain; float measured_grey_fraction; float target_grey_fraction; + float exposure_val_percent; float processing_time; } FrameMetadata; @@ -70,7 +71,6 @@ class CameraBuf { void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &frame_data, CameraState *c); kj::Array get_raw_frame_image(const CameraBuf *b); -float set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip); void publish_thumbnail(PubMaster *pm, const CameraBuf *b); void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_id, cl_context ctx); void cameras_open(MultiCameraState *s); diff --git a/system/camerad/cameras/camera_exposure.h b/system/camerad/cameras/camera_exposure.h new file mode 100644 index 00000000000000..4d620c7f5f4574 --- /dev/null +++ b/system/camerad/cameras/camera_exposure.h @@ -0,0 +1,37 @@ +#pragma once + +#include +#include "common/params.h" +#include "common/util.h" +#include "system/camerad/sensors/sensor.h" + +class CameraExposure { +public: + CameraExposure(int camera_num, const SensorInfo *sensor_info, int rgb_width, int rgb_height, float focal_len); + void updateFrameMetaData(FrameMetadata &meta_data); + std::vector getExposureRegisters(const CameraBuf &buf, int x_skip, int y_skip); + static float set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip); + +private: + void updateScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain); + + std::mutex exp_lock; + const SensorInfo *ci = nullptr; + + int exposure_time = 5; + bool dc_gain_enabled = false; + int dc_gain_weight = 0; + int gain_idx = 0; + float analog_gain_frac = 0; + + float cur_ev[3] = {}; + float best_ev_score = 0; + int new_exp_g = 0; + int new_exp_t = 0; + + Rect ae_xywh = {}; + float measured_grey_fraction = 0; + float target_grey_fraction = 0.3; + // for debugging + Params params; +}; diff --git a/system/camerad/cameras/camera_qcom2.cc b/system/camerad/cameras/camera_qcom2.cc index 0648ddf4dede34..52a5eba5d735c0 100644 --- a/system/camerad/cameras/camera_qcom2.cc +++ b/system/camerad/cameras/camera_qcom2.cc @@ -403,9 +403,14 @@ void CameraState::enqueue_req_multi(uint64_t start, int n, bool dp) { } } -// ******************* camera ******************* +CameraExposure::CameraExposure(int camera_num, const SensorInfo *sensor_info, int rgb_width, int rgb_height, float focal_len) + : ci(sensor_info) { -void CameraState::set_exposure_rect() { + dc_gain_weight = ci->dc_gain_min_weight; + gain_idx = ci->analog_gain_rec_idx; + cur_ev[0] = cur_ev[1] = cur_ev[2] = (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight) * ci->sensor_analog_gains[gain_idx] * exposure_time; + + float fl_pix = focal_len / ci->pixel_size_mm; // set areas for each camera, shouldn't be changed std::vector> ae_targets = { // (Rect, F) @@ -427,19 +432,13 @@ void CameraState::set_exposure_rect() { float fl_ref = ae_target.second; ae_xywh = (Rect){ - std::max(0, buf.rgb_width / 2 - (int)(fl_pix / fl_ref * xywh_ref.w / 2)), - std::max(0, buf.rgb_height / 2 - (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))), - std::min((int)(fl_pix / fl_ref * xywh_ref.w), buf.rgb_width / 2 + (int)(fl_pix / fl_ref * xywh_ref.w / 2)), - std::min((int)(fl_pix / fl_ref * xywh_ref.h), buf.rgb_height / 2 + (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))) + std::max(0, rgb_width / 2 - (int)(fl_pix / fl_ref * xywh_ref.w / 2)), + std::max(0, rgb_height / 2 - (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))), + std::min((int)(fl_pix / fl_ref * xywh_ref.w), rgb_width / 2 + (int)(fl_pix / fl_ref * xywh_ref.w / 2)), + std::min((int)(fl_pix / fl_ref * xywh_ref.h), rgb_height / 2 + (int)(fl_pix / fl_ref * (h_ref / 2 - xywh_ref.y))) }; } -void CameraState::sensor_set_parameters() { - dc_gain_weight = ci->dc_gain_min_weight; - gain_idx = ci->analog_gain_rec_idx; - cur_ev[0] = cur_ev[1] = cur_ev[2] = (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight) * ci->sensor_analog_gains[gain_idx] * exposure_time; -} - void CameraState::camera_map_bufs() { for (int i = 0; i < FRAME_BUF_COUNT; i++) { // configure ISP to put the image in place @@ -462,8 +461,7 @@ void CameraState::camera_init(VisionIpcServer * v, cl_device_id device_id, cl_co buf.init(device_id, ctx, this, v, FRAME_BUF_COUNT, stream_type); camera_map_bufs(); - fl_pix = focal_len / ci->pixel_size_mm; - set_exposure_rect(); + exposure = std::make_unique(camera_num, ci.get(), buf.rgb_width, buf.rgb_height, focal_len); } void CameraState::camera_open() { @@ -491,9 +489,6 @@ bool CameraState::openSensor() { auto init_sensor_lambda = [this](SensorInfo *sensor) { ci.reset(sensor); int ret = sensors_init(); - if (ret == 0) { - sensor_set_parameters(); - } return ret == 0; }; @@ -811,13 +806,8 @@ void CameraState::handle_camera_event(void *evdat) { meta_data.frame_id = main_id - idx_offset; meta_data.request_id = real_id; meta_data.timestamp_sof = timestamp; - exp_lock.lock(); - meta_data.gain = analog_gain_frac * (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight); - meta_data.high_conversion_gain = dc_gain_enabled; - meta_data.integ_lines = exposure_time; - meta_data.measured_grey_fraction = measured_grey_fraction; - meta_data.target_grey_fraction = target_grey_fraction; - exp_lock.unlock(); + + exposure->updateFrameMetaData(meta_data); // dispatch enqueue_req_multi(real_id + FRAME_BUF_COUNT, 1, 1); @@ -832,7 +822,7 @@ void CameraState::handle_camera_event(void *evdat) { } } -void CameraState::update_exposure_score(float desired_ev, int exp_t, int exp_g_idx, float exp_gain) { +void CameraExposure::updateScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain) { float score = ci->getExposureScore(desired_ev, exp_t, exp_g_idx, exp_gain, gain_idx); if (score < best_ev_score) { new_exp_t = exp_t; @@ -841,8 +831,17 @@ void CameraState::update_exposure_score(float desired_ev, int exp_t, int exp_g_i } } -void CameraState::set_camera_exposure(float grey_frac) { - if (!enabled) return; +void CameraExposure::updateFrameMetaData(FrameMetadata &meta_data) { + std::lock_guard lk(exp_lock); + meta_data.gain = analog_gain_frac * (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight); + meta_data.high_conversion_gain = dc_gain_enabled; + meta_data.integ_lines = exposure_time; + meta_data.measured_grey_fraction = measured_grey_fraction; + meta_data.target_grey_fraction = target_grey_fraction; + meta_data.exposure_val_percent = util::map_val(cur_ev[meta_data.frame_id % 3], ci->min_ev, ci->max_ev, 0.0f, 100.0f); +} + +std::vector CameraExposure::getExposureRegisters(const CameraBuf &buf, int x_skip, int y_skip) { const float dt = 0.05; const float ts_grey = 10.0; @@ -856,6 +855,7 @@ void CameraState::set_camera_exposure(float grey_frac) { // Therefore we use the target EV from 3 frames ago, the grey fraction that was just measured was the result of that control action. // TODO: Lower latency to 2 frames, by using the histogram outputted by the sensor we can do AE before the debayering is complete + float grey_frac = set_exposure_target(&buf, ae_xywh, x_skip, y_skip); const float cur_ev_ = cur_ev[buf.cur_frame_data.frame_id % 3]; // Scale target grey between 0.1 and 0.4 depending on lighting conditions @@ -912,7 +912,7 @@ void CameraState::set_camera_exposure(float grey_frac) { continue; } - update_exposure_score(desired_ev, t, g, gain); + updateScore(desired_ev, t, g, gain); } } @@ -939,8 +939,7 @@ void CameraState::set_camera_exposure(float grey_frac) { } // LOGE("ae - camera %d, cur_t %.5f, sof %.5f, dt %.5f", camera_num, 1e-9 * nanos_since_boot(), 1e-9 * buf.cur_frame_data.timestamp_sof, 1e-9 * (nanos_since_boot() - buf.cur_frame_data.timestamp_sof)); - auto exp_reg_array = ci->getExposureRegisters(exposure_time, new_exp_g, dc_gain_enabled); - sensors_i2c(exp_reg_array.data(), exp_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, ci->data_word); + return ci->getExposureRegisters(exposure_time, new_exp_g, dc_gain_enabled); } void CameraState::run() { @@ -963,9 +962,12 @@ void CameraState::run() { LOGT(buf.cur_frame_data.frame_id, "%s: Image set", publish_name); } - // Process camera registers and set camera exposure + // Process camera registers ci->processRegisters(this, framed); - set_camera_exposure(set_exposure_target(&buf, ae_xywh, 2, stream_type != VISION_STREAM_DRIVER ? 2 : 4)); + + // Set camera exposure + auto exp_reg_array = exposure->getExposureRegisters(buf, 2, stream_type != VISION_STREAM_DRIVER ? 2 : 4); + sensors_i2c(exp_reg_array.data(), exp_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, ci->data_word); // Send the message multi_cam_state->pm->send(publish_name, msg); diff --git a/system/camerad/cameras/camera_qcom2.h b/system/camerad/cameras/camera_qcom2.h index 02ebeb71f31296..ba8f4bf02e6a0f 100644 --- a/system/camerad/cameras/camera_qcom2.h +++ b/system/camerad/cameras/camera_qcom2.h @@ -4,9 +4,9 @@ #include #include "system/camerad/cameras/camera_common.h" +#include "system/camerad/cameras/camera_exposure.h" #include "system/camerad/cameras/camera_util.h" #include "system/camerad/sensors/sensor.h" -#include "common/params.h" #include "common/util.h" #define FRAME_BUF_COUNT 4 @@ -51,45 +51,25 @@ class CameraState { public: MultiCameraState *multi_cam_state = nullptr; std::unique_ptr ci; + std::unique_ptr exposure; + bool enabled = true; VisionStreamType stream_type; const char *publish_name = nullptr; cereal::FrameData::Builder (cereal::Event::Builder::*init_camera_state)() = nullptr; float focal_len = 0; - std::mutex exp_lock; - - int exposure_time = 5; - bool dc_gain_enabled = false; - int dc_gain_weight = 0; - int gain_idx = 0; - float analog_gain_frac = 0; - - float cur_ev[3] = {}; - float best_ev_score = 0; - int new_exp_g = 0; - int new_exp_t = 0; - - Rect ae_xywh = {}; - float measured_grey_fraction = 0; - float target_grey_fraction = 0.3; - unique_fd sensor_fd; unique_fd csiphy_fd; int camera_num = 0; - float fl_pix = 0; CameraState(MultiCameraState *multi_camera_state, const CameraConfig &config); void handle_camera_event(void *evdat); - void update_exposure_score(float desired_ev, int exp_t, int exp_g_idx, float exp_gain); - void set_camera_exposure(float grey_frac); void sensors_start(); void camera_open(); - void set_exposure_rect(); - void sensor_set_parameters(); void camera_map_bufs(); void camera_init(VisionIpcServer *v, cl_device_id device_id, cl_context ctx); void camera_close(); @@ -128,9 +108,6 @@ class CameraState { void configISP(); void configCSIPHY(); void linkDevices(); - - // for debugging - Params params; }; class MultiCameraState { diff --git a/system/camerad/test/test_ae_gray.cc b/system/camerad/test/test_ae_gray.cc index be9a0cc59f20f8..55e8548da5d95d 100644 --- a/system/camerad/test/test_ae_gray.cc +++ b/system/camerad/test/test_ae_gray.cc @@ -7,7 +7,7 @@ #include #include "common/util.h" -#include "system/camerad/cameras/camera_common.h" +#include "system/camerad/cameras/camera_exposure.h" #define W 240 #define H 160 @@ -61,7 +61,7 @@ TEST_CASE("camera.test_set_exposure_target") { memset(&fb_y[h_0*W+h_1*W], l[2], h_2*W); memset(&fb_y[h_0*W+h_1*W+h_2*W], l[3], h_3*W); memset(&fb_y[h_0*W+h_1*W+h_2*W+h_3*W], l[4], h_4*W); - float ev = set_exposure_target((const CameraBuf*) &cb, rect, 1, 1); + float ev = CameraExposure::set_exposure_target((const CameraBuf*) &cb, rect, 1, 1); // printf("%d/%d/%d/%d/%d ev is %f\n", h_0, h_1, h_2, h_3, h_4, ev); // printf("%f\n", ev);