[libcamera-devel] [PATCH v2 2/5] ipa: rpi: agc: Reorganise code for multi-channel AGC
Jacopo Mondi
jacopo.mondi at ideasonboard.com
Tue Aug 29 17:04:57 CEST 2023
Hi David
On Wed, Aug 23, 2023 at 01:09:12PM +0100, David Plowman via libcamera-devel wrote:
> This commit does the basic reorganisation of the code in order to
> implement multi-channel AGC. The main changes are:
>
> * The previous Agc class (in agc.cpp) has become the AgcChannel class
> in (agc_channel.cpp).
>
> * A new Agc class is introduced which is a wrapper round a number of
> AgcChannels.
>
> * The basic plumbing from ipa_base.cpp to Agc is updated to include a
> channel number. All the existing controls are hardwired to talk
> directly to channel 0.
>
> There are a couple of limitations which we expect to apply to
> multi-channel AGC. We're not allowing different frame durations to be
> applied to the channels, nor are we allowing separate metering
> modes. To be fair, supporting these things is not impossible, but
> there are reasons why it may be tricky so they remain "TBD" for now.
>
> This patch only includes the basic reorganisation and plumbing. It
> does not yet update the important methods (switchMode, prepare and
> process) to implement multi-channel AGC properly. This will appear in
> a subsequent commit. For now, these functions are hard-coded just to
> use channel 0, thereby preserving the existing behaviour.
>
> Signed-off-by: David Plowman <david.plowman at raspberrypi.com>
> Reviewed-by: Naushir Patuck <naush at raspberrypi.com>
> ---
> src/ipa/rpi/common/ipa_base.cpp | 20 +-
> src/ipa/rpi/controller/agc_algorithm.h | 19 +-
> src/ipa/rpi/controller/meson.build | 1 +
> src/ipa/rpi/controller/rpi/agc.cpp | 910 +++-----------------
> src/ipa/rpi/controller/rpi/agc.h | 122 +--
> src/ipa/rpi/controller/rpi/agc_channel.cpp | 927 +++++++++++++++++++++
> src/ipa/rpi/controller/rpi/agc_channel.h | 135 +++
> 7 files changed, 1219 insertions(+), 915 deletions(-)
> create mode 100644 src/ipa/rpi/controller/rpi/agc_channel.cpp
> create mode 100644 src/ipa/rpi/controller/rpi/agc_channel.h
>
> diff --git a/src/ipa/rpi/common/ipa_base.cpp b/src/ipa/rpi/common/ipa_base.cpp
> index a47ae3a9..f7e7ad5e 100644
> --- a/src/ipa/rpi/common/ipa_base.cpp
> +++ b/src/ipa/rpi/common/ipa_base.cpp
> @@ -699,9 +699,9 @@ void IpaBase::applyControls(const ControlList &controls)
> }
>
> if (ctrl.second.get<bool>() == false)
> - agc->disableAuto();
> + agc->disableAuto(0);
> else
> - agc->enableAuto();
> + agc->enableAuto(0);
>
> libcameraMetadata_.set(controls::AeEnable, ctrl.second.get<bool>());
> break;
> @@ -717,7 +717,7 @@ void IpaBase::applyControls(const ControlList &controls)
> }
>
> /* The control provides units of microseconds. */
> - agc->setFixedShutter(ctrl.second.get<int32_t>() * 1.0us);
> + agc->setFixedShutter(0, ctrl.second.get<int32_t>() * 1.0us);
>
> libcameraMetadata_.set(controls::ExposureTime, ctrl.second.get<int32_t>());
> break;
> @@ -732,7 +732,7 @@ void IpaBase::applyControls(const ControlList &controls)
> break;
> }
>
> - agc->setFixedAnalogueGain(ctrl.second.get<float>());
> + agc->setFixedAnalogueGain(0, ctrl.second.get<float>());
>
> libcameraMetadata_.set(controls::AnalogueGain,
> ctrl.second.get<float>());
> @@ -770,7 +770,7 @@ void IpaBase::applyControls(const ControlList &controls)
>
> int32_t idx = ctrl.second.get<int32_t>();
> if (ConstraintModeTable.count(idx)) {
> - agc->setConstraintMode(ConstraintModeTable.at(idx));
> + agc->setConstraintMode(0, ConstraintModeTable.at(idx));
> libcameraMetadata_.set(controls::AeConstraintMode, idx);
> } else {
> LOG(IPARPI, Error) << "Constraint mode " << idx
> @@ -790,7 +790,7 @@ void IpaBase::applyControls(const ControlList &controls)
>
> int32_t idx = ctrl.second.get<int32_t>();
> if (ExposureModeTable.count(idx)) {
> - agc->setExposureMode(ExposureModeTable.at(idx));
> + agc->setExposureMode(0, ExposureModeTable.at(idx));
> libcameraMetadata_.set(controls::AeExposureMode, idx);
> } else {
> LOG(IPARPI, Error) << "Exposure mode " << idx
> @@ -813,7 +813,7 @@ void IpaBase::applyControls(const ControlList &controls)
> * So convert to 2^EV
> */
> double ev = pow(2.0, ctrl.second.get<float>());
> - agc->setEv(ev);
> + agc->setEv(0, ev);
> libcameraMetadata_.set(controls::ExposureValue,
> ctrl.second.get<float>());
> break;
> @@ -833,12 +833,12 @@ void IpaBase::applyControls(const ControlList &controls)
>
> switch (mode) {
> case controls::FlickerOff:
> - agc->setFlickerPeriod(0us);
> + agc->setFlickerPeriod(0, 0us);
>
> break;
>
> case controls::FlickerManual:
> - agc->setFlickerPeriod(flickerState_.manualPeriod);
> + agc->setFlickerPeriod(0, flickerState_.manualPeriod);
>
> break;
>
> @@ -872,7 +872,7 @@ void IpaBase::applyControls(const ControlList &controls)
> * first, and the period updated after, or vice versa.
> */
> if (flickerState_.mode == controls::FlickerManual)
> - agc->setFlickerPeriod(flickerState_.manualPeriod);
> + agc->setFlickerPeriod(0, flickerState_.manualPeriod);
>
> break;
> }
> diff --git a/src/ipa/rpi/controller/agc_algorithm.h b/src/ipa/rpi/controller/agc_algorithm.h
> index b6949daa..b8986560 100644
> --- a/src/ipa/rpi/controller/agc_algorithm.h
> +++ b/src/ipa/rpi/controller/agc_algorithm.h
> @@ -21,16 +21,19 @@ public:
> /* An AGC algorithm must provide the following: */
> virtual unsigned int getConvergenceFrames() const = 0;
> virtual std::vector<double> const &getWeights() const = 0;
> - virtual void setEv(double ev) = 0;
> - virtual void setFlickerPeriod(libcamera::utils::Duration flickerPeriod) = 0;
> - virtual void setFixedShutter(libcamera::utils::Duration fixedShutter) = 0;
> + virtual void setEv(unsigned int channel, double ev) = 0;
> + virtual void setFlickerPeriod(unsigned int channel,
> + libcamera::utils::Duration flickerPeriod) = 0;
> + virtual void setFixedShutter(unsigned int channel,
> + libcamera::utils::Duration fixedShutter) = 0;
> virtual void setMaxShutter(libcamera::utils::Duration maxShutter) = 0;
> - virtual void setFixedAnalogueGain(double fixedAnalogueGain) = 0;
> + virtual void setFixedAnalogueGain(unsigned int channel, double fixedAnalogueGain) = 0;
> virtual void setMeteringMode(std::string const &meteringModeName) = 0;
> - virtual void setExposureMode(std::string const &exposureModeName) = 0;
> - virtual void setConstraintMode(std::string const &contraintModeName) = 0;
> - virtual void enableAuto() = 0;
> - virtual void disableAuto() = 0;
> + virtual void setExposureMode(unsigned int channel, std::string const &exposureModeName) = 0;
> + virtual void setConstraintMode(unsigned int channel, std::string const &contraintModeName) = 0;
> + virtual void enableAuto(unsigned int channel) = 0;
> + virtual void disableAuto(unsigned int channel) = 0;
> + virtual void setActiveChannels(const std::vector<unsigned int> &activeChannels) = 0;
> };
>
> } /* namespace RPiController */
> diff --git a/src/ipa/rpi/controller/meson.build b/src/ipa/rpi/controller/meson.build
> index feb0334e..20b9cda9 100644
> --- a/src/ipa/rpi/controller/meson.build
> +++ b/src/ipa/rpi/controller/meson.build
> @@ -8,6 +8,7 @@ rpi_ipa_controller_sources = files([
> 'pwl.cpp',
> 'rpi/af.cpp',
> 'rpi/agc.cpp',
> + 'rpi/agc_channel.cpp',
> 'rpi/alsc.cpp',
> 'rpi/awb.cpp',
> 'rpi/black_level.cpp',
> diff --git a/src/ipa/rpi/controller/rpi/agc.cpp b/src/ipa/rpi/controller/rpi/agc.cpp
> index 7b02972a..c9c9c297 100644
> --- a/src/ipa/rpi/controller/rpi/agc.cpp
> +++ b/src/ipa/rpi/controller/rpi/agc.cpp
> @@ -5,20 +5,12 @@
> * agc.cpp - AGC/AEC control algorithm
> */
>
> -#include <algorithm>
> -#include <map>
> -#include <tuple>
> +#include "agc.h"
>
> #include <libcamera/base/log.h>
>
> -#include "../awb_status.h"
> -#include "../device_status.h"
> -#include "../histogram.h"
> -#include "../lux_status.h"
> #include "../metadata.h"
>
> -#include "agc.h"
> -
> using namespace RPiController;
> using namespace libcamera;
> using libcamera::utils::Duration;
> @@ -28,881 +20,203 @@ LOG_DEFINE_CATEGORY(RPiAgc)
>
> #define NAME "rpi.agc"
>
> -int AgcMeteringMode::read(const libcamera::YamlObject ¶ms)
> +Agc::Agc(Controller *controller)
> + : AgcAlgorithm(controller),
> + activeChannels_({ 0 })
> {
> - const YamlObject &yamlWeights = params["weights"];
> -
> - for (const auto &p : yamlWeights.asList()) {
> - auto value = p.get<double>();
> - if (!value)
> - return -EINVAL;
> - weights.push_back(*value);
> - }
> -
> - return 0;
> }
>
> -static std::tuple<int, std::string>
> -readMeteringModes(std::map<std::string, AgcMeteringMode> &metering_modes,
> - const libcamera::YamlObject ¶ms)
> +char const *Agc::name() const
> {
> - std::string first;
> - int ret;
> -
> - for (const auto &[key, value] : params.asDict()) {
> - AgcMeteringMode meteringMode;
> - ret = meteringMode.read(value);
> - if (ret)
> - return { ret, {} };
> -
> - metering_modes[key] = std::move(meteringMode);
> - if (first.empty())
> - first = key;
> - }
> -
> - return { 0, first };
> + return NAME;
> }
>
> -int AgcExposureMode::read(const libcamera::YamlObject ¶ms)
> +int Agc::read(const libcamera::YamlObject ¶ms)
> {
> - auto value = params["shutter"].getList<double>();
> - if (!value)
> - return -EINVAL;
> - std::transform(value->begin(), value->end(), std::back_inserter(shutter),
> - [](double v) { return v * 1us; });
> -
> - value = params["gain"].getList<double>();
> - if (!value)
> - return -EINVAL;
> - gain = std::move(*value);
> -
> - if (shutter.size() < 2 || gain.size() < 2) {
> - LOG(RPiAgc, Error)
> - << "AgcExposureMode: must have at least two entries in exposure profile";
> - return -EINVAL;
> - }
> -
> - if (shutter.size() != gain.size()) {
> - LOG(RPiAgc, Error)
> - << "AgcExposureMode: expect same number of exposure and gain entries in exposure profile";
> - return -EINVAL;
> + /*
> + * When there is only a single channel we can read the old style syntax.
> + * Otherwise we expect a "channels" keyword followed by a list of configurations.
> + */
> + if (!params.contains("channels")) {
> + LOG(RPiAgc, Debug) << "Single channel only";
> + channelData_.emplace_back();
> + return channelData_.back().channel.read(params, getHardwareConfig());
> }
>
> - return 0;
> -}
> -
> -static std::tuple<int, std::string>
> -readExposureModes(std::map<std::string, AgcExposureMode> &exposureModes,
> - const libcamera::YamlObject ¶ms)
> -{
> - std::string first;
> - int ret;
> -
> - for (const auto &[key, value] : params.asDict()) {
> - AgcExposureMode exposureMode;
> - ret = exposureMode.read(value);
> + const auto &channels = params["channels"].asList();
> + for (auto ch = channels.begin(); ch != channels.end(); ch++) {
> + LOG(RPiAgc, Debug) << "Read AGC channel";
> + channelData_.emplace_back();
> + int ret = channelData_.back().channel.read(*ch, getHardwareConfig());
> if (ret)
> - return { ret, {} };
> -
> - exposureModes[key] = std::move(exposureMode);
> - if (first.empty())
> - first = key;
> + return ret;
> }
>
> - return { 0, first };
> -}
> -
> -int AgcConstraint::read(const libcamera::YamlObject ¶ms)
> -{
> - std::string boundString = params["bound"].get<std::string>("");
> - transform(boundString.begin(), boundString.end(),
> - boundString.begin(), ::toupper);
> - if (boundString != "UPPER" && boundString != "LOWER") {
> - LOG(RPiAgc, Error) << "AGC constraint type should be UPPER or LOWER";
> - return -EINVAL;
> + LOG(RPiAgc, Debug) << "Read " << channelData_.size() << " channel(s)";
> + if (channelData_.empty()) {
> + LOG(RPiAgc, Error) << "No AGC channels provided";
> + return -1;
> }
> - bound = boundString == "UPPER" ? Bound::UPPER : Bound::LOWER;
> -
> - auto value = params["q_lo"].get<double>();
> - if (!value)
> - return -EINVAL;
> - qLo = *value;
> -
> - value = params["q_hi"].get<double>();
> - if (!value)
> - return -EINVAL;
> - qHi = *value;
> -
> - return yTarget.read(params["y_target"]);
> -}
>
> -static std::tuple<int, AgcConstraintMode>
> -readConstraintMode(const libcamera::YamlObject ¶ms)
> -{
> - AgcConstraintMode mode;
> - int ret;
> -
> - for (const auto &p : params.asList()) {
> - AgcConstraint constraint;
> - ret = constraint.read(p);
> - if (ret)
> - return { ret, {} };
> -
> - mode.push_back(std::move(constraint));
> - }
> -
> - return { 0, mode };
> + return 0;
> }
>
> -static std::tuple<int, std::string>
> -readConstraintModes(std::map<std::string, AgcConstraintMode> &constraintModes,
> - const libcamera::YamlObject ¶ms)
> +int Agc::checkChannel(unsigned int channelIndex) const
> {
> - std::string first;
> - int ret;
> -
> - for (const auto &[key, value] : params.asDict()) {
> - std::tie(ret, constraintModes[key]) = readConstraintMode(value);
> - if (ret)
> - return { ret, {} };
> -
> - if (first.empty())
> - first = key;
> + if (channelIndex >= channelData_.size()) {
> + LOG(RPiAgc, Warning) << "AGC channel " << channelIndex << " not available";
> + return -1;
> }
>
> - return { 0, first };
> -}
> -
> -int AgcConfig::read(const libcamera::YamlObject ¶ms)
> -{
> - LOG(RPiAgc, Debug) << "AgcConfig";
> - int ret;
> -
> - std::tie(ret, defaultMeteringMode) =
> - readMeteringModes(meteringModes, params["metering_modes"]);
> - if (ret)
> - return ret;
> - std::tie(ret, defaultExposureMode) =
> - readExposureModes(exposureModes, params["exposure_modes"]);
> - if (ret)
> - return ret;
> - std::tie(ret, defaultConstraintMode) =
> - readConstraintModes(constraintModes, params["constraint_modes"]);
> - if (ret)
> - return ret;
> -
> - ret = yTarget.read(params["y_target"]);
> - if (ret)
> - return ret;
> -
> - speed = params["speed"].get<double>(0.2);
> - startupFrames = params["startup_frames"].get<uint16_t>(10);
> - convergenceFrames = params["convergence_frames"].get<unsigned int>(6);
> - fastReduceThreshold = params["fast_reduce_threshold"].get<double>(0.4);
> - baseEv = params["base_ev"].get<double>(1.0);
> -
> - /* Start with quite a low value as ramping up is easier than ramping down. */
> - defaultExposureTime = params["default_exposure_time"].get<double>(1000) * 1us;
> - defaultAnalogueGain = params["default_analogue_gain"].get<double>(1.0);
> -
> return 0;
> }
>
> -Agc::ExposureValues::ExposureValues()
> - : shutter(0s), analogueGain(0),
> - totalExposure(0s), totalExposureNoDG(0s)
> +void Agc::disableAuto(unsigned int channelIndex)
> {
> -}
> -
> -Agc::Agc(Controller *controller)
> - : AgcAlgorithm(controller), meteringMode_(nullptr),
> - exposureMode_(nullptr), constraintMode_(nullptr),
> - frameCount_(0), lockCount_(0),
> - lastTargetExposure_(0s), ev_(1.0), flickerPeriod_(0s),
> - maxShutter_(0s), fixedShutter_(0s), fixedAnalogueGain_(0.0)
> -{
> - memset(&awb_, 0, sizeof(awb_));
> - /*
> - * Setting status_.totalExposureValue_ to zero initially tells us
> - * it's not been calculated yet (i.e. Process hasn't yet run).
> - */
> - status_ = {};
> - status_.ev = ev_;
> -}
> + if (checkChannel(channelIndex))
> + return;
>
> -char const *Agc::name() const
> -{
> - return NAME;
> + LOG(RPiAgc, Debug) << "disableAuto for channel " << channelIndex;
> + channelData_[channelIndex].channel.disableAuto();
> }
>
> -int Agc::read(const libcamera::YamlObject ¶ms)
> +void Agc::enableAuto(unsigned int channelIndex)
> {
> - LOG(RPiAgc, Debug) << "Agc";
> -
> - int ret = config_.read(params);
> - if (ret)
> - return ret;
> -
> - const Size &size = getHardwareConfig().agcZoneWeights;
> - for (auto const &modes : config_.meteringModes) {
> - if (modes.second.weights.size() != size.width * size.height) {
> - LOG(RPiAgc, Error) << "AgcMeteringMode: Incorrect number of weights";
> - return -EINVAL;
> - }
> - }
> + if (checkChannel(channelIndex))
> + return;
>
> - /*
> - * Set the config's defaults (which are the first ones it read) as our
> - * current modes, until someone changes them. (they're all known to
> - * exist at this point)
> - */
> - meteringModeName_ = config_.defaultMeteringMode;
> - meteringMode_ = &config_.meteringModes[meteringModeName_];
> - exposureModeName_ = config_.defaultExposureMode;
> - exposureMode_ = &config_.exposureModes[exposureModeName_];
> - constraintModeName_ = config_.defaultConstraintMode;
> - constraintMode_ = &config_.constraintModes[constraintModeName_];
> - /* Set up the "last shutter/gain" values, in case AGC starts "disabled". */
> - status_.shutterTime = config_.defaultExposureTime;
> - status_.analogueGain = config_.defaultAnalogueGain;
> - return 0;
> -}
> -
> -void Agc::disableAuto()
> -{
> - fixedShutter_ = status_.shutterTime;
> - fixedAnalogueGain_ = status_.analogueGain;
> -}
> -
> -void Agc::enableAuto()
> -{
> - fixedShutter_ = 0s;
> - fixedAnalogueGain_ = 0;
> + LOG(RPiAgc, Debug) << "enableAuto for channel " << channelIndex;
> + channelData_[channelIndex].channel.enableAuto();
> }
>
> unsigned int Agc::getConvergenceFrames() const
> {
> - /*
> - * If shutter and gain have been explicitly set, there is no
> - * convergence to happen, so no need to drop any frames - return zero.
> - */
> - if (fixedShutter_ && fixedAnalogueGain_)
> - return 0;
> - else
> - return config_.convergenceFrames;
> + /* If there are n channels, it presumably takes n times as long to converge. */
> + return channelData_[0].channel.getConvergenceFrames() * activeChannels_.size();
> }
>
> std::vector<double> const &Agc::getWeights() const
> {
> /*
> - * In case someone calls setMeteringMode and then this before the
> - * algorithm has run and updated the meteringMode_ pointer.
> + * A limitation is that we're going to have to use the same weights across
> + * all channels.
Why can't this function take a channel index instead ? I understand
for now we default to channel 0, but other functions accept an index
already ?
> */
> - auto it = config_.meteringModes.find(meteringModeName_);
> - if (it == config_.meteringModes.end())
> - return meteringMode_->weights;
> - return it->second.weights;
> + return channelData_[0].channel.getWeights();
> }
>
> -void Agc::setEv(double ev)
> +void Agc::setEv(unsigned int channelIndex, double ev)
> {
> - ev_ = ev;
> -}
> + if (checkChannel(channelIndex))
> + return;
>
> -void Agc::setFlickerPeriod(Duration flickerPeriod)
> -{
> - flickerPeriod_ = flickerPeriod;
> + LOG(RPiAgc, Debug) << "setEv " << ev << " for channel " << channelIndex;
> + channelData_[channelIndex].channel.setEv(ev);
> }
>
> -void Agc::setMaxShutter(Duration maxShutter)
> +void Agc::setFlickerPeriod(unsigned int channelIndex, Duration flickerPeriod)
> {
> - maxShutter_ = maxShutter;
> -}
> + if (checkChannel(channelIndex))
> + return;
>
> -void Agc::setFixedShutter(Duration fixedShutter)
> -{
> - fixedShutter_ = fixedShutter;
> - /* Set this in case someone calls disableAuto() straight after. */
> - status_.shutterTime = limitShutter(fixedShutter_);
> + LOG(RPiAgc, Debug) << "setFlickerPeriod " << flickerPeriod
> + << " for channel " << channelIndex;
> + channelData_[channelIndex].channel.setFlickerPeriod(flickerPeriod);
> }
>
> -void Agc::setFixedAnalogueGain(double fixedAnalogueGain)
> -{
> - fixedAnalogueGain_ = fixedAnalogueGain;
> - /* Set this in case someone calls disableAuto() straight after. */
> - status_.analogueGain = limitGain(fixedAnalogueGain);
> -}
> -
> -void Agc::setMeteringMode(std::string const &meteringModeName)
> -{
> - meteringModeName_ = meteringModeName;
> -}
> -
> -void Agc::setExposureMode(std::string const &exposureModeName)
> -{
> - exposureModeName_ = exposureModeName;
> -}
> -
> -void Agc::setConstraintMode(std::string const &constraintModeName)
> -{
> - constraintModeName_ = constraintModeName;
> -}
> -
> -void Agc::switchMode(CameraMode const &cameraMode,
> - Metadata *metadata)
> +void Agc::setMaxShutter(Duration maxShutter)
> {
> - /* AGC expects the mode sensitivity always to be non-zero. */
> - ASSERT(cameraMode.sensitivity);
> -
> - housekeepConfig();
> -
> - /*
> - * Store the mode in the local state. We must cache the sensitivity of
> - * of the previous mode for the calculations below.
> - */
> - double lastSensitivity = mode_.sensitivity;
> - mode_ = cameraMode;
> -
> - Duration fixedShutter = limitShutter(fixedShutter_);
> - if (fixedShutter && fixedAnalogueGain_) {
> - /* We're going to reset the algorithm here with these fixed values. */
> -
> - fetchAwbStatus(metadata);
> - double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
> - ASSERT(minColourGain != 0.0);
> -
> - /* This is the equivalent of computeTargetExposure and applyDigitalGain. */
> - target_.totalExposureNoDG = fixedShutter_ * fixedAnalogueGain_;
> - target_.totalExposure = target_.totalExposureNoDG / minColourGain;
> -
> - /* Equivalent of filterExposure. This resets any "history". */
> - filtered_ = target_;
> -
> - /* Equivalent of divideUpExposure. */
> - filtered_.shutter = fixedShutter;
> - filtered_.analogueGain = fixedAnalogueGain_;
> - } else if (status_.totalExposureValue) {
> - /*
> - * On a mode switch, various things could happen:
> - * - the exposure profile might change
> - * - a fixed exposure or gain might be set
> - * - the new mode's sensitivity might be different
> - * We cope with the last of these by scaling the target values. After
> - * that we just need to re-divide the exposure/gain according to the
> - * current exposure profile, which takes care of everything else.
> - */
> -
> - double ratio = lastSensitivity / cameraMode.sensitivity;
> - target_.totalExposureNoDG *= ratio;
> - target_.totalExposure *= ratio;
> - filtered_.totalExposureNoDG *= ratio;
> - filtered_.totalExposure *= ratio;
> -
> - divideUpExposure();
> - } else {
> - /*
> - * We come through here on startup, when at least one of the shutter
> - * or gain has not been fixed. We must still write those values out so
> - * that they will be applied immediately. We supply some arbitrary defaults
> - * for any that weren't set.
> - */
> -
> - /* Equivalent of divideUpExposure. */
> - filtered_.shutter = fixedShutter ? fixedShutter : config_.defaultExposureTime;
> - filtered_.analogueGain = fixedAnalogueGain_ ? fixedAnalogueGain_ : config_.defaultAnalogueGain;
> - }
> -
> - writeAndFinish(metadata, false);
> + /* Frame durations will be the same across all channels too. */
> + for (auto &data : channelData_)
> + data.channel.setMaxShutter(maxShutter);
> }
>
> -void Agc::prepare(Metadata *imageMetadata)
> +void Agc::setFixedShutter(unsigned int channelIndex, Duration fixedShutter)
> {
> - Duration totalExposureValue = status_.totalExposureValue;
> - AgcStatus delayedStatus;
> - AgcPrepareStatus prepareStatus;
> -
> - if (!imageMetadata->get("agc.delayed_status", delayedStatus))
> - totalExposureValue = delayedStatus.totalExposureValue;
> -
> - prepareStatus.digitalGain = 1.0;
> - prepareStatus.locked = false;
> -
> - if (status_.totalExposureValue) {
> - /* Process has run, so we have meaningful values. */
> - DeviceStatus deviceStatus;
> - if (imageMetadata->get("device.status", deviceStatus) == 0) {
> - Duration actualExposure = deviceStatus.shutterSpeed *
> - deviceStatus.analogueGain;
> - if (actualExposure) {
> - double digitalGain = totalExposureValue / actualExposure;
> - LOG(RPiAgc, Debug) << "Want total exposure " << totalExposureValue;
> - /*
> - * Never ask for a gain < 1.0, and also impose
> - * some upper limit. Make it customisable?
> - */
> - prepareStatus.digitalGain = std::max(1.0, std::min(digitalGain, 4.0));
> - LOG(RPiAgc, Debug) << "Actual exposure " << actualExposure;
> - LOG(RPiAgc, Debug) << "Use digitalGain " << prepareStatus.digitalGain;
> - LOG(RPiAgc, Debug) << "Effective exposure "
> - << actualExposure * prepareStatus.digitalGain;
> - /* Decide whether AEC/AGC has converged. */
> - prepareStatus.locked = updateLockStatus(deviceStatus);
> - }
> - } else
> - LOG(RPiAgc, Warning) << name() << ": no device metadata";
> - imageMetadata->set("agc.prepare_status", prepareStatus);
> - }
> -}
> + if (checkChannel(channelIndex))
> + return;
>
> -void Agc::process(StatisticsPtr &stats, Metadata *imageMetadata)
> -{
> - frameCount_++;
> - /*
> - * First a little bit of housekeeping, fetching up-to-date settings and
> - * configuration, that kind of thing.
> - */
> - housekeepConfig();
> - /* Fetch the AWB status immediately, so that we can assume it's there. */
> - fetchAwbStatus(imageMetadata);
> - /* Get the current exposure values for the frame that's just arrived. */
> - fetchCurrentExposure(imageMetadata);
> - /* Compute the total gain we require relative to the current exposure. */
> - double gain, targetY;
> - computeGain(stats, imageMetadata, gain, targetY);
> - /* Now compute the target (final) exposure which we think we want. */
> - computeTargetExposure(gain);
> - /* The results have to be filtered so as not to change too rapidly. */
> - filterExposure();
> - /*
> - * Some of the exposure has to be applied as digital gain, so work out
> - * what that is. This function also tells us whether it's decided to
> - * "desaturate" the image more quickly.
> - */
> - bool desaturate = applyDigitalGain(gain, targetY);
> - /*
> - * The last thing is to divide up the exposure value into a shutter time
> - * and analogue gain, according to the current exposure mode.
> - */
> - divideUpExposure();
> - /* Finally advertise what we've done. */
> - writeAndFinish(imageMetadata, desaturate);
> + LOG(RPiAgc, Debug) << "setFixedShutter " << fixedShutter
> + << " for channel " << channelIndex;
> + channelData_[channelIndex].channel.setFixedShutter(fixedShutter);
> }
>
> -bool Agc::updateLockStatus(DeviceStatus const &deviceStatus)
> +void Agc::setFixedAnalogueGain(unsigned int channelIndex, double fixedAnalogueGain)
> {
> - const double errorFactor = 0.10; /* make these customisable? */
> - const int maxLockCount = 5;
> - /* Reset "lock count" when we exceed this multiple of errorFactor */
> - const double resetMargin = 1.5;
> + if (checkChannel(channelIndex))
> + return;
>
> - /* Add 200us to the exposure time error to allow for line quantisation. */
> - Duration exposureError = lastDeviceStatus_.shutterSpeed * errorFactor + 200us;
> - double gainError = lastDeviceStatus_.analogueGain * errorFactor;
> - Duration targetError = lastTargetExposure_ * errorFactor;
> -
> - /*
> - * Note that we don't know the exposure/gain limits of the sensor, so
> - * the values we keep requesting may be unachievable. For this reason
> - * we only insist that we're close to values in the past few frames.
> - */
> - if (deviceStatus.shutterSpeed > lastDeviceStatus_.shutterSpeed - exposureError &&
> - deviceStatus.shutterSpeed < lastDeviceStatus_.shutterSpeed + exposureError &&
> - deviceStatus.analogueGain > lastDeviceStatus_.analogueGain - gainError &&
> - deviceStatus.analogueGain < lastDeviceStatus_.analogueGain + gainError &&
> - status_.targetExposureValue > lastTargetExposure_ - targetError &&
> - status_.targetExposureValue < lastTargetExposure_ + targetError)
> - lockCount_ = std::min(lockCount_ + 1, maxLockCount);
> - else if (deviceStatus.shutterSpeed < lastDeviceStatus_.shutterSpeed - resetMargin * exposureError ||
> - deviceStatus.shutterSpeed > lastDeviceStatus_.shutterSpeed + resetMargin * exposureError ||
> - deviceStatus.analogueGain < lastDeviceStatus_.analogueGain - resetMargin * gainError ||
> - deviceStatus.analogueGain > lastDeviceStatus_.analogueGain + resetMargin * gainError ||
> - status_.targetExposureValue < lastTargetExposure_ - resetMargin * targetError ||
> - status_.targetExposureValue > lastTargetExposure_ + resetMargin * targetError)
> - lockCount_ = 0;
> -
> - lastDeviceStatus_ = deviceStatus;
> - lastTargetExposure_ = status_.targetExposureValue;
> -
> - LOG(RPiAgc, Debug) << "Lock count updated to " << lockCount_;
> - return lockCount_ == maxLockCount;
> + LOG(RPiAgc, Debug) << "setFixedAnalogueGain " << fixedAnalogueGain
> + << " for channel " << channelIndex;
> + channelData_[channelIndex].channel.setFixedAnalogueGain(fixedAnalogueGain);
> }
>
> -void Agc::housekeepConfig()
> +void Agc::setMeteringMode(std::string const &meteringModeName)
> {
> - /* First fetch all the up-to-date settings, so no one else has to do it. */
> - status_.ev = ev_;
> - status_.fixedShutter = limitShutter(fixedShutter_);
> - status_.fixedAnalogueGain = fixedAnalogueGain_;
> - status_.flickerPeriod = flickerPeriod_;
> - LOG(RPiAgc, Debug) << "ev " << status_.ev << " fixedShutter "
> - << status_.fixedShutter << " fixedAnalogueGain "
> - << status_.fixedAnalogueGain;
> - /*
> - * Make sure the "mode" pointers point to the up-to-date things, if
> - * they've changed.
> - */
> - if (meteringModeName_ != status_.meteringMode) {
> - auto it = config_.meteringModes.find(meteringModeName_);
> - if (it == config_.meteringModes.end()) {
> - LOG(RPiAgc, Warning) << "No metering mode " << meteringModeName_;
> - meteringModeName_ = status_.meteringMode;
> - } else {
> - meteringMode_ = &it->second;
> - status_.meteringMode = meteringModeName_;
> - }
> - }
> - if (exposureModeName_ != status_.exposureMode) {
> - auto it = config_.exposureModes.find(exposureModeName_);
> - if (it == config_.exposureModes.end()) {
> - LOG(RPiAgc, Warning) << "No exposure profile " << exposureModeName_;
> - exposureModeName_ = status_.exposureMode;
> - } else {
> - exposureMode_ = &it->second;
> - status_.exposureMode = exposureModeName_;
> - }
> - }
> - if (constraintModeName_ != status_.constraintMode) {
> - auto it = config_.constraintModes.find(constraintModeName_);
> - if (it == config_.constraintModes.end()) {
> - LOG(RPiAgc, Warning) << "No constraint list " << constraintModeName_;
> - constraintModeName_ = status_.constraintMode;
> - } else {
> - constraintMode_ = &it->second;
> - status_.constraintMode = constraintModeName_;
> - }
> - }
> - LOG(RPiAgc, Debug) << "exposureMode "
> - << exposureModeName_ << " constraintMode "
> - << constraintModeName_ << " meteringMode "
> - << meteringModeName_;
> + /* Metering modes will be the same across all channels too. */
> + for (auto &data : channelData_)
> + data.channel.setMeteringMode(meteringModeName);
> }
>
> -void Agc::fetchCurrentExposure(Metadata *imageMetadata)
> +void Agc::setExposureMode(unsigned int channelIndex, std::string const &exposureModeName)
> {
> - std::unique_lock<Metadata> lock(*imageMetadata);
> - DeviceStatus *deviceStatus =
> - imageMetadata->getLocked<DeviceStatus>("device.status");
> - if (!deviceStatus)
> - LOG(RPiAgc, Fatal) << "No device metadata";
> - current_.shutter = deviceStatus->shutterSpeed;
> - current_.analogueGain = deviceStatus->analogueGain;
> - AgcStatus *agcStatus =
> - imageMetadata->getLocked<AgcStatus>("agc.status");
> - current_.totalExposure = agcStatus ? agcStatus->totalExposureValue : 0s;
> - current_.totalExposureNoDG = current_.shutter * current_.analogueGain;
> -}
> + if (checkChannel(channelIndex))
> + return;
>
> -void Agc::fetchAwbStatus(Metadata *imageMetadata)
> -{
> - awb_.gainR = 1.0; /* in case not found in metadata */
> - awb_.gainG = 1.0;
> - awb_.gainB = 1.0;
> - if (imageMetadata->get("awb.status", awb_) != 0)
> - LOG(RPiAgc, Debug) << "No AWB status found";
> + LOG(RPiAgc, Debug) << "setExposureMode " << exposureModeName
> + << " for channel " << channelIndex;
> + channelData_[channelIndex].channel.setExposureMode(exposureModeName);
> }
>
> -static double computeInitialY(StatisticsPtr &stats, AwbStatus const &awb,
> - std::vector<double> &weights, double gain)
> +void Agc::setConstraintMode(unsigned int channelIndex, std::string const &constraintModeName)
> {
> - constexpr uint64_t maxVal = 1 << Statistics::NormalisationFactorPow2;
> + if (checkChannel(channelIndex))
> + return;
>
> - ASSERT(weights.size() == stats->agcRegions.numRegions());
> -
> - /*
> - * Note that the weights are applied by the IPA to the statistics directly,
> - * before they are given to us here.
> - */
> - double rSum = 0, gSum = 0, bSum = 0, pixelSum = 0;
> - for (unsigned int i = 0; i < stats->agcRegions.numRegions(); i++) {
> - auto ®ion = stats->agcRegions.get(i);
> - rSum += std::min<double>(region.val.rSum * gain, (maxVal - 1) * region.counted);
> - gSum += std::min<double>(region.val.gSum * gain, (maxVal - 1) * region.counted);
> - bSum += std::min<double>(region.val.bSum * gain, (maxVal - 1) * region.counted);
> - pixelSum += region.counted;
> - }
> - if (pixelSum == 0.0) {
> - LOG(RPiAgc, Warning) << "computeInitialY: pixelSum is zero";
> - return 0;
> - }
> - double ySum = rSum * awb.gainR * .299 +
> - gSum * awb.gainG * .587 +
> - bSum * awb.gainB * .114;
> - return ySum / pixelSum / maxVal;
> + channelData_[channelIndex].channel.setConstraintMode(constraintModeName);
> }
>
> -/*
> - * We handle extra gain through EV by adjusting our Y targets. However, you
> - * simply can't monitor histograms once they get very close to (or beyond!)
> - * saturation, so we clamp the Y targets to this value. It does mean that EV
> - * increases don't necessarily do quite what you might expect in certain
> - * (contrived) cases.
> - */
> -
> -static constexpr double EvGainYTargetLimit = 0.9;
> -
> -static double constraintComputeGain(AgcConstraint &c, const Histogram &h, double lux,
> - double evGain, double &targetY)
> +template<typename T>
> +std::ostream &operator<<(std::ostream &os, const std::vector<T> &v)
> {
> - targetY = c.yTarget.eval(c.yTarget.domain().clip(lux));
> - targetY = std::min(EvGainYTargetLimit, targetY * evGain);
> - double iqm = h.interQuantileMean(c.qLo, c.qHi);
> - return (targetY * h.bins()) / iqm;
> + os << "{";
> + for (const auto &e : v)
> + os << " " << e;
> + os << " }";
> + return os;
> }
>
> -void Agc::computeGain(StatisticsPtr &statistics, Metadata *imageMetadata,
> - double &gain, double &targetY)
> +void Agc::setActiveChannels(const std::vector<unsigned int> &activeChannels)
> {
> - struct LuxStatus lux = {};
> - lux.lux = 400; /* default lux level to 400 in case no metadata found */
> - if (imageMetadata->get("lux.status", lux) != 0)
> - LOG(RPiAgc, Warning) << "No lux level found";
> - const Histogram &h = statistics->yHist;
> - double evGain = status_.ev * config_.baseEv;
> - /*
> - * The initial gain and target_Y come from some of the regions. After
> - * that we consider the histogram constraints.
> - */
> - targetY = config_.yTarget.eval(config_.yTarget.domain().clip(lux.lux));
> - targetY = std::min(EvGainYTargetLimit, targetY * evGain);
> -
> - /*
> - * Do this calculation a few times as brightness increase can be
> - * non-linear when there are saturated regions.
> - */
> - gain = 1.0;
> - for (int i = 0; i < 8; i++) {
> - double initialY = computeInitialY(statistics, awb_, meteringMode_->weights, gain);
> - double extraGain = std::min(10.0, targetY / (initialY + .001));
> - gain *= extraGain;
> - LOG(RPiAgc, Debug) << "Initial Y " << initialY << " target " << targetY
> - << " gives gain " << gain;
> - if (extraGain < 1.01) /* close enough */
> - break;
> - }
> -
> - for (auto &c : *constraintMode_) {
> - double newTargetY;
> - double newGain = constraintComputeGain(c, h, lux.lux, evGain, newTargetY);
> - LOG(RPiAgc, Debug) << "Constraint has target_Y "
> - << newTargetY << " giving gain " << newGain;
> - if (c.bound == AgcConstraint::Bound::LOWER && newGain > gain) {
> - LOG(RPiAgc, Debug) << "Lower bound constraint adopted";
> - gain = newGain;
> - targetY = newTargetY;
> - } else if (c.bound == AgcConstraint::Bound::UPPER && newGain < gain) {
> - LOG(RPiAgc, Debug) << "Upper bound constraint adopted";
> - gain = newGain;
> - targetY = newTargetY;
> - }
> + if (activeChannels.empty()) {
> + LOG(RPiAgc, Warning) << "No active AGC channels supplied";
> + return;
> }
> - LOG(RPiAgc, Debug) << "Final gain " << gain << " (target_Y " << targetY << " ev "
> - << status_.ev << " base_ev " << config_.baseEv
> - << ")";
> -}
> -
> -void Agc::computeTargetExposure(double gain)
> -{
> - if (status_.fixedShutter && status_.fixedAnalogueGain) {
> - /*
> - * When ag and shutter are both fixed, we need to drive the
> - * total exposure so that we end up with a digital gain of at least
> - * 1/minColourGain. Otherwise we'd desaturate channels causing
> - * white to go cyan or magenta.
> - */
> - double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
> - ASSERT(minColourGain != 0.0);
> - target_.totalExposure =
> - status_.fixedShutter * status_.fixedAnalogueGain / minColourGain;
> - } else {
> - /*
> - * The statistics reflect the image without digital gain, so the final
> - * total exposure we're aiming for is:
> - */
> - target_.totalExposure = current_.totalExposureNoDG * gain;
> - /* The final target exposure is also limited to what the exposure mode allows. */
> - Duration maxShutter = status_.fixedShutter
> - ? status_.fixedShutter
> - : exposureMode_->shutter.back();
> - maxShutter = limitShutter(maxShutter);
> - Duration maxTotalExposure =
> - maxShutter *
> - (status_.fixedAnalogueGain != 0.0
> - ? status_.fixedAnalogueGain
> - : exposureMode_->gain.back());
> - target_.totalExposure = std::min(target_.totalExposure, maxTotalExposure);
> - }
> - LOG(RPiAgc, Debug) << "Target totalExposure " << target_.totalExposure;
> -}
>
> -bool Agc::applyDigitalGain(double gain, double targetY)
> -{
> - double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
> - ASSERT(minColourGain != 0.0);
> - double dg = 1.0 / minColourGain;
> - /*
> - * I think this pipeline subtracts black level and rescales before we
> - * get the stats, so no need to worry about it.
> - */
> - LOG(RPiAgc, Debug) << "after AWB, target dg " << dg << " gain " << gain
> - << " target_Y " << targetY;
> - /*
> - * Finally, if we're trying to reduce exposure but the target_Y is
> - * "close" to 1.0, then the gain computed for that constraint will be
> - * only slightly less than one, because the measured Y can never be
> - * larger than 1.0. When this happens, demand a large digital gain so
> - * that the exposure can be reduced, de-saturating the image much more
> - * quickly (and we then approach the correct value more quickly from
> - * below).
> - */
> - bool desaturate = targetY > config_.fastReduceThreshold &&
> - gain < sqrt(targetY);
> - if (desaturate)
> - dg /= config_.fastReduceThreshold;
> - LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate;
> - filtered_.totalExposureNoDG = filtered_.totalExposure / dg;
> - LOG(RPiAgc, Debug) << "Target totalExposureNoDG " << filtered_.totalExposureNoDG;
> - return desaturate;
> -}
> -
> -void Agc::filterExposure()
> -{
> - double speed = config_.speed;
> - /*
> - * AGC adapts instantly if both shutter and gain are directly specified
> - * or we're in the startup phase.
> - */
> - if ((status_.fixedShutter && status_.fixedAnalogueGain) ||
> - frameCount_ <= config_.startupFrames)
> - speed = 1.0;
> - if (!filtered_.totalExposure) {
> - filtered_.totalExposure = target_.totalExposure;
> - } else {
> - /*
> - * If close to the result go faster, to save making so many
> - * micro-adjustments on the way. (Make this customisable?)
> - */
> - if (filtered_.totalExposure < 1.2 * target_.totalExposure &&
> - filtered_.totalExposure > 0.8 * target_.totalExposure)
> - speed = sqrt(speed);
> - filtered_.totalExposure = speed * target_.totalExposure +
> - filtered_.totalExposure * (1.0 - speed);
> - }
> - LOG(RPiAgc, Debug) << "After filtering, totalExposure " << filtered_.totalExposure
> - << " no dg " << filtered_.totalExposureNoDG;
> -}
> + for (auto index : activeChannels)
> + if (checkChannel(index))
> + return;
>
> -void Agc::divideUpExposure()
> -{
> - /*
> - * Sending the fixed shutter/gain cases through the same code may seem
> - * unnecessary, but it will make more sense when extend this to cover
> - * variable aperture.
> - */
> - Duration exposureValue = filtered_.totalExposureNoDG;
> - Duration shutterTime;
> - double analogueGain;
> - shutterTime = status_.fixedShutter ? status_.fixedShutter
> - : exposureMode_->shutter[0];
> - shutterTime = limitShutter(shutterTime);
> - analogueGain = status_.fixedAnalogueGain != 0.0 ? status_.fixedAnalogueGain
> - : exposureMode_->gain[0];
> - analogueGain = limitGain(analogueGain);
> - if (shutterTime * analogueGain < exposureValue) {
> - for (unsigned int stage = 1;
> - stage < exposureMode_->gain.size(); stage++) {
> - if (!status_.fixedShutter) {
> - Duration stageShutter =
> - limitShutter(exposureMode_->shutter[stage]);
> - if (stageShutter * analogueGain >= exposureValue) {
> - shutterTime = exposureValue / analogueGain;
> - break;
> - }
> - shutterTime = stageShutter;
> - }
> - if (status_.fixedAnalogueGain == 0.0) {
> - if (exposureMode_->gain[stage] * shutterTime >= exposureValue) {
> - analogueGain = exposureValue / shutterTime;
> - break;
> - }
> - analogueGain = exposureMode_->gain[stage];
> - analogueGain = limitGain(analogueGain);
> - }
> - }
> - }
> - LOG(RPiAgc, Debug) << "Divided up shutter and gain are " << shutterTime << " and "
> - << analogueGain;
> - /*
> - * Finally adjust shutter time for flicker avoidance (require both
> - * shutter and gain not to be fixed).
> - */
> - if (!status_.fixedShutter && !status_.fixedAnalogueGain &&
> - status_.flickerPeriod) {
> - int flickerPeriods = shutterTime / status_.flickerPeriod;
> - if (flickerPeriods) {
> - Duration newShutterTime = flickerPeriods * status_.flickerPeriod;
> - analogueGain *= shutterTime / newShutterTime;
> - /*
> - * We should still not allow the ag to go over the
> - * largest value in the exposure mode. Note that this
> - * may force more of the total exposure into the digital
> - * gain as a side-effect.
> - */
> - analogueGain = std::min(analogueGain, exposureMode_->gain.back());
> - analogueGain = limitGain(analogueGain);
> - shutterTime = newShutterTime;
> - }
> - LOG(RPiAgc, Debug) << "After flicker avoidance, shutter "
> - << shutterTime << " gain " << analogueGain;
> - }
> - filtered_.shutter = shutterTime;
> - filtered_.analogueGain = analogueGain;
> + LOG(RPiAgc, Debug) << "setActiveChannels " << activeChannels;
> + activeChannels_ = activeChannels;
> }
>
> -void Agc::writeAndFinish(Metadata *imageMetadata, bool desaturate)
> +void Agc::switchMode(CameraMode const &cameraMode,
> + Metadata *metadata)
> {
> - status_.totalExposureValue = filtered_.totalExposure;
> - status_.targetExposureValue = desaturate ? 0s : target_.totalExposureNoDG;
> - status_.shutterTime = filtered_.shutter;
> - status_.analogueGain = filtered_.analogueGain;
> - /*
> - * Write to metadata as well, in case anyone wants to update the camera
> - * immediately.
> - */
> - imageMetadata->set("agc.status", status_);
> - LOG(RPiAgc, Debug) << "Output written, total exposure requested is "
> - << filtered_.totalExposure;
> - LOG(RPiAgc, Debug) << "Camera exposure update: shutter time " << filtered_.shutter
> - << " analogue gain " << filtered_.analogueGain;
> + LOG(RPiAgc, Debug) << "switchMode for channel 0";
> + channelData_[0].channel.switchMode(cameraMode, metadata);
> }
>
> -Duration Agc::limitShutter(Duration shutter)
> +void Agc::prepare(Metadata *imageMetadata)
> {
> - /*
> - * shutter == 0 is a special case for fixed shutter values, and must pass
> - * through unchanged
> - */
> - if (!shutter)
> - return shutter;
> -
> - shutter = std::clamp(shutter, mode_.minShutter, maxShutter_);
> - return shutter;
> + LOG(RPiAgc, Debug) << "prepare for channel 0";
> + channelData_[0].channel.prepare(imageMetadata);
> }
>
> -double Agc::limitGain(double gain) const
> +void Agc::process(StatisticsPtr &stats, Metadata *imageMetadata)
> {
> - /*
> - * Only limit the lower bounds of the gain value to what the sensor limits.
> - * The upper bound on analogue gain will be made up with additional digital
> - * gain applied by the ISP.
> - *
> - * gain == 0.0 is a special case for fixed shutter values, and must pass
> - * through unchanged
> - */
> - if (!gain)
> - return gain;
> -
> - gain = std::max(gain, mode_.minAnalogueGain);
> - return gain;
> + LOG(RPiAgc, Debug) << "process for channel 0";
> + channelData_[0].channel.process(stats, imageMetadata);
> }
>
> /* Register algorithm with the system. */
> diff --git a/src/ipa/rpi/controller/rpi/agc.h b/src/ipa/rpi/controller/rpi/agc.h
> index aaf77c8f..a9158910 100644
> --- a/src/ipa/rpi/controller/rpi/agc.h
> +++ b/src/ipa/rpi/controller/rpi/agc.h
> @@ -6,60 +6,19 @@
> */
> #pragma once
>
> +#include <optional>
and <string>
> #include <vector>
> -#include <mutex>
> -
> -#include <libcamera/base/utils.h>
>
> #include "../agc_algorithm.h"
> -#include "../agc_status.h"
> -#include "../pwl.h"
>
> -/* This is our implementation of AGC. */
> +#include "agc_channel.h"
>
> namespace RPiController {
>
> -struct AgcMeteringMode {
> - std::vector<double> weights;
> - int read(const libcamera::YamlObject ¶ms);
> -};
> -
> -struct AgcExposureMode {
> - std::vector<libcamera::utils::Duration> shutter;
> - std::vector<double> gain;
> - int read(const libcamera::YamlObject ¶ms);
> -};
> -
> -struct AgcConstraint {
> - enum class Bound { LOWER = 0, UPPER = 1 };
> - Bound bound;
> - double qLo;
> - double qHi;
> - Pwl yTarget;
> - int read(const libcamera::YamlObject ¶ms);
> -};
> -
> -typedef std::vector<AgcConstraint> AgcConstraintMode;
> -
> -struct AgcConfig {
> - int read(const libcamera::YamlObject ¶ms);
> - std::map<std::string, AgcMeteringMode> meteringModes;
> - std::map<std::string, AgcExposureMode> exposureModes;
> - std::map<std::string, AgcConstraintMode> constraintModes;
> - Pwl yTarget;
> - double speed;
> - uint16_t startupFrames;
> - unsigned int convergenceFrames;
> - double maxChange;
> - double minChange;
> - double fastReduceThreshold;
> - double speedUpThreshold;
> - std::string defaultMeteringMode;
> - std::string defaultExposureMode;
> - std::string defaultConstraintMode;
> - double baseEv;
> - libcamera::utils::Duration defaultExposureTime;
> - double defaultAnalogueGain;
> +struct AgcChannelData {
> + AgcChannel channel;
> + std::optional<DeviceStatus> deviceStatus;
> + StatisticsPtr statistics;
are deviceStatus and statistics used ?
> };
>
> class Agc : public AgcAlgorithm
> @@ -70,65 +29,30 @@ public:
> int read(const libcamera::YamlObject ¶ms) override;
> unsigned int getConvergenceFrames() const override;
> std::vector<double> const &getWeights() const override;
> - void setEv(double ev) override;
> - void setFlickerPeriod(libcamera::utils::Duration flickerPeriod) override;
> + void setEv(unsigned int channel, double ev) override;
> + void setFlickerPeriod(unsigned int channelIndex,
> + libcamera::utils::Duration flickerPeriod) override;
> void setMaxShutter(libcamera::utils::Duration maxShutter) override;
> - void setFixedShutter(libcamera::utils::Duration fixedShutter) override;
> - void setFixedAnalogueGain(double fixedAnalogueGain) override;
> + void setFixedShutter(unsigned int channelIndex,
> + libcamera::utils::Duration fixedShutter) override;
> + void setFixedAnalogueGain(unsigned int channelIndex,
> + double fixedAnalogueGain) override;
> void setMeteringMode(std::string const &meteringModeName) override;
> - void setExposureMode(std::string const &exposureModeName) override;
> - void setConstraintMode(std::string const &contraintModeName) override;
> - void enableAuto() override;
> - void disableAuto() override;
> + void setExposureMode(unsigned int channelIndex,
> + std::string const &exposureModeName) override;
> + void setConstraintMode(unsigned int channelIndex,
> + std::string const &contraintModeName) override;
> + void enableAuto(unsigned int channelIndex) override;
> + void disableAuto(unsigned int channelIndex) override;
> void switchMode(CameraMode const &cameraMode, Metadata *metadata) override;
> void prepare(Metadata *imageMetadata) override;
> void process(StatisticsPtr &stats, Metadata *imageMetadata) override;
> + void setActiveChannels(const std::vector<unsigned int> &activeChannels) override;
This doesn't seem to be used in the series and the semantic is a
little weird... are the channels consecutive ?
>
> private:
> - bool updateLockStatus(DeviceStatus const &deviceStatus);
> - AgcConfig config_;
> - void housekeepConfig();
> - void fetchCurrentExposure(Metadata *imageMetadata);
> - void fetchAwbStatus(Metadata *imageMetadata);
> - void computeGain(StatisticsPtr &statistics, Metadata *imageMetadata,
> - double &gain, double &targetY);
> - void computeTargetExposure(double gain);
> - void filterExposure();
> - bool applyDigitalGain(double gain, double targetY);
> - void divideUpExposure();
> - void writeAndFinish(Metadata *imageMetadata, bool desaturate);
> - libcamera::utils::Duration limitShutter(libcamera::utils::Duration shutter);
> - double limitGain(double gain) const;
> - AgcMeteringMode *meteringMode_;
> - AgcExposureMode *exposureMode_;
> - AgcConstraintMode *constraintMode_;
> - CameraMode mode_;
> - uint64_t frameCount_;
> - AwbStatus awb_;
> - struct ExposureValues {
> - ExposureValues();
> -
> - libcamera::utils::Duration shutter;
> - double analogueGain;
> - libcamera::utils::Duration totalExposure;
> - libcamera::utils::Duration totalExposureNoDG; /* without digital gain */
> - };
> - ExposureValues current_; /* values for the current frame */
> - ExposureValues target_; /* calculate the values we want here */
> - ExposureValues filtered_; /* these values are filtered towards target */
> - AgcStatus status_;
> - int lockCount_;
> - DeviceStatus lastDeviceStatus_;
> - libcamera::utils::Duration lastTargetExposure_;
> - /* Below here the "settings" that applications can change. */
> - std::string meteringModeName_;
> - std::string exposureModeName_;
> - std::string constraintModeName_;
> - double ev_;
> - libcamera::utils::Duration flickerPeriod_;
> - libcamera::utils::Duration maxShutter_;
> - libcamera::utils::Duration fixedShutter_;
> - double fixedAnalogueGain_;
> + int checkChannel(unsigned int channel) const;
> + std::vector<AgcChannelData> channelData_;
> + std::vector<unsigned int> activeChannels_;
> };
>
> } /* namespace RPiController */
> diff --git a/src/ipa/rpi/controller/rpi/agc_channel.cpp b/src/ipa/rpi/controller/rpi/agc_channel.cpp
> new file mode 100644
> index 00000000..d6e30ef2
> --- /dev/null
> +++ b/src/ipa/rpi/controller/rpi/agc_channel.cpp
> @@ -0,0 +1,927 @@
> +/* SPDX-License-Identifier: BSD-2-Clause */
> +/*
> + * Copyright (C) 2019, Raspberry Pi Ltd
time flies here too
> + *
> + * agc.cpp - AGC/AEC control algorithm
> + */
> +
> +#include <algorithm>
> +#include <map>
> +#include <tuple>
> +
> +#include <libcamera/base/log.h>
> +
> +#include "../awb_status.h"
> +#include "../device_status.h"
> +#include "../histogram.h"
> +#include "../lux_status.h"
> +#include "../metadata.h"
> +
> +#include "agc.h"
How come this doesn't include "agc_channel.h" instead ?
> +
> +using namespace RPiController;
> +using namespace libcamera;
> +using libcamera::utils::Duration;
> +using namespace std::literals::chrono_literals;
> +
> +LOG_DECLARE_CATEGORY(RPiAgc)
> +
> +#define NAME "rpi.agc"
> +
Not used it seems
> +int AgcMeteringMode::read(const libcamera::YamlObject ¶ms)
> +{
> + const YamlObject &yamlWeights = params["weights"];
> +
> + for (const auto &p : yamlWeights.asList()) {
> + auto value = p.get<double>();
> + if (!value)
> + return -EINVAL;
> + weights.push_back(*value);
> + }
> +
> + return 0;
> +}
> +
> +static std::tuple<int, std::string>
why aren't the functions here in the RPiController namespace ?
> +readMeteringModes(std::map<std::string, AgcMeteringMode> &metering_modes,
> + const libcamera::YamlObject ¶ms)
> +{
> + std::string first;
> + int ret;
> +
> + for (const auto &[key, value] : params.asDict()) {
> + AgcMeteringMode meteringMode;
> + ret = meteringMode.read(value);
> + if (ret)
> + return { ret, {} };
> +
> + metering_modes[key] = std::move(meteringMode);
> + if (first.empty())
> + first = key;
> + }
> +
> + return { 0, first };
> +}
> +
> +int AgcExposureMode::read(const libcamera::YamlObject ¶ms)
mixing helpers and class functions ? (helpers which are called from a
single class function are actually class functions too ?)
I see what happens here: config class and struct have a read()
function, which uses a static helper which uses another sub-class with
a read() function that uses a static helper... It's a big rework so up
to you
> +{
> + auto value = params["shutter"].getList<double>();
> + if (!value)
> + return -EINVAL;
> + std::transform(value->begin(), value->end(), std::back_inserter(shutter),
> + [](double v) { return v * 1us; });
> +
> + value = params["gain"].getList<double>();
> + if (!value)
> + return -EINVAL;
> + gain = std::move(*value);
> +
> + if (shutter.size() < 2 || gain.size() < 2) {
> + LOG(RPiAgc, Error)
> + << "AgcExposureMode: must have at least two entries in exposure profile";
> + return -EINVAL;
> + }
> +
> + if (shutter.size() != gain.size()) {
> + LOG(RPiAgc, Error)
> + << "AgcExposureMode: expect same number of exposure and gain entries in exposure profile";
> + return -EINVAL;
> + }
> +
> + return 0;
> +}
> +
> +static std::tuple<int, std::string>
> +readExposureModes(std::map<std::string, AgcExposureMode> &exposureModes,
> + const libcamera::YamlObject ¶ms)
> +{
> + std::string first;
> + int ret;
> +
> + for (const auto &[key, value] : params.asDict()) {
> + AgcExposureMode exposureMode;
> + ret = exposureMode.read(value);
> + if (ret)
> + return { ret, {} };
> +
> + exposureModes[key] = std::move(exposureMode);
> + if (first.empty())
> + first = key;
> + }
> +
> + return { 0, first };
> +}
> +
> +int AgcConstraint::read(const libcamera::YamlObject ¶ms)
> +{
> + std::string boundString = params["bound"].get<std::string>("");
> + transform(boundString.begin(), boundString.end(),
> + boundString.begin(), ::toupper);
> + if (boundString != "UPPER" && boundString != "LOWER") {
> + LOG(RPiAgc, Error) << "AGC constraint type should be UPPER or LOWER";
> + return -EINVAL;
> + }
> + bound = boundString == "UPPER" ? Bound::UPPER : Bound::LOWER;
> +
> + auto value = params["q_lo"].get<double>();
> + if (!value)
> + return -EINVAL;
> + qLo = *value;
> +
> + value = params["q_hi"].get<double>();
> + if (!value)
> + return -EINVAL;
> + qHi = *value;
> +
> + return yTarget.read(params["y_target"]);
> +}
> +
> +static std::tuple<int, AgcConstraintMode>
> +readConstraintMode(const libcamera::YamlObject ¶ms)
> +{
> + AgcConstraintMode mode;
> + int ret;
> +
> + for (const auto &p : params.asList()) {
> + AgcConstraint constraint;
> + ret = constraint.read(p);
> + if (ret)
> + return { ret, {} };
> +
> + mode.push_back(std::move(constraint));
> + }
> +
> + return { 0, mode };
> +}
> +
> +static std::tuple<int, std::string>
> +readConstraintModes(std::map<std::string, AgcConstraintMode> &constraintModes,
> + const libcamera::YamlObject ¶ms)
> +{
> + std::string first;
> + int ret;
> +
> + for (const auto &[key, value] : params.asDict()) {
> + std::tie(ret, constraintModes[key]) = readConstraintMode(value);
> + if (ret)
> + return { ret, {} };
> +
> + if (first.empty())
> + first = key;
> + }
> +
> + return { 0, first };
> +}
> +
> +int AgcConfig::read(const libcamera::YamlObject ¶ms)
> +{
> + LOG(RPiAgc, Debug) << "AgcConfig";
> + int ret;
> +
> + std::tie(ret, defaultMeteringMode) =
> + readMeteringModes(meteringModes, params["metering_modes"]);
> + if (ret)
> + return ret;
> + std::tie(ret, defaultExposureMode) =
> + readExposureModes(exposureModes, params["exposure_modes"]);
> + if (ret)
> + return ret;
> + std::tie(ret, defaultConstraintMode) =
> + readConstraintModes(constraintModes, params["constraint_modes"]);
> + if (ret)
> + return ret;
> +
> + ret = yTarget.read(params["y_target"]);
> + if (ret)
> + return ret;
> +
> + speed = params["speed"].get<double>(0.2);
> + startupFrames = params["startup_frames"].get<uint16_t>(10);
> + convergenceFrames = params["convergence_frames"].get<unsigned int>(6);
> + fastReduceThreshold = params["fast_reduce_threshold"].get<double>(0.4);
> + baseEv = params["base_ev"].get<double>(1.0);
> +
> + /* Start with quite a low value as ramping up is easier than ramping down. */
> + defaultExposureTime = params["default_exposure_time"].get<double>(1000) * 1us;
> + defaultAnalogueGain = params["default_analogue_gain"].get<double>(1.0);
> +
> + return 0;
> +}
> +
> +AgcChannel::ExposureValues::ExposureValues()
> + : shutter(0s), analogueGain(0),
> + totalExposure(0s), totalExposureNoDG(0s)
> +{
> +}
> +
> +AgcChannel::AgcChannel()
> + : meteringMode_(nullptr), exposureMode_(nullptr), constraintMode_(nullptr),
> + frameCount_(0), lockCount_(0),
> + lastTargetExposure_(0s), ev_(1.0), flickerPeriod_(0s),
> + maxShutter_(0s), fixedShutter_(0s), fixedAnalogueGain_(0.0)
> +{
> + memset(&awb_, 0, sizeof(awb_));
> + /*
> + * Setting status_.totalExposureValue_ to zero initially tells us
> + * it's not been calculated yet (i.e. Process hasn't yet run).
> + */
> + status_ = {};
> + status_.ev = ev_;
> +}
> +
> +int AgcChannel::read(const libcamera::YamlObject ¶ms,
> + const Controller::HardwareConfig &hardwareConfig)
> +{
> + int ret = config_.read(params);
> + if (ret)
> + return ret;
> +
> + const Size &size = hardwareConfig.agcZoneWeights;
> + for (auto const &modes : config_.meteringModes) {
> + if (modes.second.weights.size() != size.width * size.height) {
> + LOG(RPiAgc, Error) << "AgcMeteringMode: Incorrect number of weights";
> + return -EINVAL;
> + }
> + }
> +
> + /*
> + * Set the config's defaults (which are the first ones it read) as our
> + * current modes, until someone changes them. (they're all known to
> + * exist at this point)
> + */
> + meteringModeName_ = config_.defaultMeteringMode;
> + meteringMode_ = &config_.meteringModes[meteringModeName_];
> + exposureModeName_ = config_.defaultExposureMode;
> + exposureMode_ = &config_.exposureModes[exposureModeName_];
> + constraintModeName_ = config_.defaultConstraintMode;
> + constraintMode_ = &config_.constraintModes[constraintModeName_];
> + /* Set up the "last shutter/gain" values, in case AGC starts "disabled". */
> + status_.shutterTime = config_.defaultExposureTime;
> + status_.analogueGain = config_.defaultAnalogueGain;
> + return 0;
> +}
> +
> +void AgcChannel::disableAuto()
> +{
> + fixedShutter_ = status_.shutterTime;
> + fixedAnalogueGain_ = status_.analogueGain;
> +}
> +
> +void AgcChannel::enableAuto()
> +{
> + fixedShutter_ = 0s;
> + fixedAnalogueGain_ = 0;
> +}
> +
> +unsigned int AgcChannel::getConvergenceFrames() const
> +{
> + /*
> + * If shutter and gain have been explicitly set, there is no
> + * convergence to happen, so no need to drop any frames - return zero.
> + */
> + if (fixedShutter_ && fixedAnalogueGain_)
> + return 0;
> + else
> + return config_.convergenceFrames;
> +}
> +
> +std::vector<double> const &AgcChannel::getWeights() const
> +{
> + /*
> + * In case someone calls setMeteringMode and then this before the
> + * algorithm has run and updated the meteringMode_ pointer.
> + */
> + auto it = config_.meteringModes.find(meteringModeName_);
> + if (it == config_.meteringModes.end())
> + return meteringMode_->weights;
> + return it->second.weights;
> +}
> +
> +void AgcChannel::setEv(double ev)
> +{
> + ev_ = ev;
> +}
> +
> +void AgcChannel::setFlickerPeriod(Duration flickerPeriod)
> +{
> + flickerPeriod_ = flickerPeriod;
> +}
> +
> +void AgcChannel::setMaxShutter(Duration maxShutter)
> +{
> + maxShutter_ = maxShutter;
> +}
> +
> +void AgcChannel::setFixedShutter(Duration fixedShutter)
> +{
> + fixedShutter_ = fixedShutter;
> + /* Set this in case someone calls disableAuto() straight after. */
> + status_.shutterTime = limitShutter(fixedShutter_);
> +}
> +
> +void AgcChannel::setFixedAnalogueGain(double fixedAnalogueGain)
> +{
> + fixedAnalogueGain_ = fixedAnalogueGain;
> + /* Set this in case someone calls disableAuto() straight after. */
> + status_.analogueGain = limitGain(fixedAnalogueGain);
> +}
> +
> +void AgcChannel::setMeteringMode(std::string const &meteringModeName)
> +{
> + meteringModeName_ = meteringModeName;
> +}
> +
> +void AgcChannel::setExposureMode(std::string const &exposureModeName)
> +{
> + exposureModeName_ = exposureModeName;
> +}
> +
> +void AgcChannel::setConstraintMode(std::string const &constraintModeName)
> +{
> + constraintModeName_ = constraintModeName;
> +}
> +
> +void AgcChannel::switchMode(CameraMode const &cameraMode,
> + Metadata *metadata)
> +{
> + /* AGC expects the mode sensitivity always to be non-zero. */
> + ASSERT(cameraMode.sensitivity);
> +
> + housekeepConfig();
> +
> + /*
> + * Store the mode in the local state. We must cache the sensitivity of
> + * of the previous mode for the calculations below.
> + */
> + double lastSensitivity = mode_.sensitivity;
> + mode_ = cameraMode;
> +
> + Duration fixedShutter = limitShutter(fixedShutter_);
> + if (fixedShutter && fixedAnalogueGain_) {
> + /* We're going to reset the algorithm here with these fixed values. */
> +
> + fetchAwbStatus(metadata);
> + double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
> + ASSERT(minColourGain != 0.0);
> +
> + /* This is the equivalent of computeTargetExposure and applyDigitalGain. */
> + target_.totalExposureNoDG = fixedShutter_ * fixedAnalogueGain_;
> + target_.totalExposure = target_.totalExposureNoDG / minColourGain;
> +
> + /* Equivalent of filterExposure. This resets any "history". */
> + filtered_ = target_;
> +
> + /* Equivalent of divideUpExposure. */
> + filtered_.shutter = fixedShutter;
> + filtered_.analogueGain = fixedAnalogueGain_;
> + } else if (status_.totalExposureValue) {
> + /*
> + * On a mode switch, various things could happen:
> + * - the exposure profile might change
> + * - a fixed exposure or gain might be set
> + * - the new mode's sensitivity might be different
> + * We cope with the last of these by scaling the target values. After
> + * that we just need to re-divide the exposure/gain according to the
> + * current exposure profile, which takes care of everything else.
> + */
> +
> + double ratio = lastSensitivity / cameraMode.sensitivity;
> + target_.totalExposureNoDG *= ratio;
> + target_.totalExposure *= ratio;
> + filtered_.totalExposureNoDG *= ratio;
> + filtered_.totalExposure *= ratio;
> +
> + divideUpExposure();
> + } else {
> + /*
> + * We come through here on startup, when at least one of the shutter
> + * or gain has not been fixed. We must still write those values out so
> + * that they will be applied immediately. We supply some arbitrary defaults
> + * for any that weren't set.
> + */
> +
> + /* Equivalent of divideUpExposure. */
> + filtered_.shutter = fixedShutter ? fixedShutter : config_.defaultExposureTime;
> + filtered_.analogueGain = fixedAnalogueGain_ ? fixedAnalogueGain_ : config_.defaultAnalogueGain;
> + }
> +
> + writeAndFinish(metadata, false);
> +}
> +
> +void AgcChannel::prepare(Metadata *imageMetadata)
> +{
> + Duration totalExposureValue = status_.totalExposureValue;
> + AgcStatus delayedStatus;
> + AgcPrepareStatus prepareStatus;
> +
> + if (!imageMetadata->get("agc.delayed_status", delayedStatus))
> + totalExposureValue = delayedStatus.totalExposureValue;
> +
> + prepareStatus.digitalGain = 1.0;
> + prepareStatus.locked = false;
> +
> + if (status_.totalExposureValue) {
> + /* Process has run, so we have meaningful values. */
> + DeviceStatus deviceStatus;
> + if (imageMetadata->get("device.status", deviceStatus) == 0) {
> + Duration actualExposure = deviceStatus.shutterSpeed *
> + deviceStatus.analogueGain;
> + if (actualExposure) {
> + double digitalGain = totalExposureValue / actualExposure;
> + LOG(RPiAgc, Debug) << "Want total exposure " << totalExposureValue;
> + /*
> + * Never ask for a gain < 1.0, and also impose
> + * some upper limit. Make it customisable?
> + */
> + prepareStatus.digitalGain = std::max(1.0, std::min(digitalGain, 4.0));
> + LOG(RPiAgc, Debug) << "Actual exposure " << actualExposure;
> + LOG(RPiAgc, Debug) << "Use digitalGain " << prepareStatus.digitalGain;
> + LOG(RPiAgc, Debug) << "Effective exposure "
> + << actualExposure * prepareStatus.digitalGain;
> + /* Decide whether AEC/AGC has converged. */
> + prepareStatus.locked = updateLockStatus(deviceStatus);
> + }
> + } else
> + LOG(RPiAgc, Warning) << "AgcChannel: no device metadata";
> + imageMetadata->set("agc.prepare_status", prepareStatus);
> + }
> +}
> +
> +void AgcChannel::process(StatisticsPtr &stats, Metadata *imageMetadata)
> +{
> + frameCount_++;
> + /*
> + * First a little bit of housekeeping, fetching up-to-date settings and
> + * configuration, that kind of thing.
> + */
> + housekeepConfig();
> + /* Fetch the AWB status immediately, so that we can assume it's there. */
> + fetchAwbStatus(imageMetadata);
> + /* Get the current exposure values for the frame that's just arrived. */
> + fetchCurrentExposure(imageMetadata);
> + /* Compute the total gain we require relative to the current exposure. */
> + double gain, targetY;
> + computeGain(stats, imageMetadata, gain, targetY);
> + /* Now compute the target (final) exposure which we think we want. */
> + computeTargetExposure(gain);
> + /* The results have to be filtered so as not to change too rapidly. */
> + filterExposure();
> + /*
> + * Some of the exposure has to be applied as digital gain, so work out
> + * what that is. This function also tells us whether it's decided to
> + * "desaturate" the image more quickly.
> + */
> + bool desaturate = applyDigitalGain(gain, targetY);
> + /*
> + * The last thing is to divide up the exposure value into a shutter time
> + * and analogue gain, according to the current exposure mode.
> + */
> + divideUpExposure();
> + /* Finally advertise what we've done. */
> + writeAndFinish(imageMetadata, desaturate);
> +}
> +
> +bool AgcChannel::updateLockStatus(DeviceStatus const &deviceStatus)
> +{
> + const double errorFactor = 0.10; /* make these customisable? */
> + const int maxLockCount = 5;
> + /* Reset "lock count" when we exceed this multiple of errorFactor */
> + const double resetMargin = 1.5;
> +
> + /* Add 200us to the exposure time error to allow for line quantisation. */
> + Duration exposureError = lastDeviceStatus_.shutterSpeed * errorFactor + 200us;
> + double gainError = lastDeviceStatus_.analogueGain * errorFactor;
> + Duration targetError = lastTargetExposure_ * errorFactor;
> +
> + /*
> + * Note that we don't know the exposure/gain limits of the sensor, so
> + * the values we keep requesting may be unachievable. For this reason
> + * we only insist that we're close to values in the past few frames.
> + */
> + if (deviceStatus.shutterSpeed > lastDeviceStatus_.shutterSpeed - exposureError &&
> + deviceStatus.shutterSpeed < lastDeviceStatus_.shutterSpeed + exposureError &&
> + deviceStatus.analogueGain > lastDeviceStatus_.analogueGain - gainError &&
> + deviceStatus.analogueGain < lastDeviceStatus_.analogueGain + gainError &&
> + status_.targetExposureValue > lastTargetExposure_ - targetError &&
> + status_.targetExposureValue < lastTargetExposure_ + targetError)
> + lockCount_ = std::min(lockCount_ + 1, maxLockCount);
> + else if (deviceStatus.shutterSpeed < lastDeviceStatus_.shutterSpeed - resetMargin * exposureError ||
> + deviceStatus.shutterSpeed > lastDeviceStatus_.shutterSpeed + resetMargin * exposureError ||
> + deviceStatus.analogueGain < lastDeviceStatus_.analogueGain - resetMargin * gainError ||
> + deviceStatus.analogueGain > lastDeviceStatus_.analogueGain + resetMargin * gainError ||
> + status_.targetExposureValue < lastTargetExposure_ - resetMargin * targetError ||
> + status_.targetExposureValue > lastTargetExposure_ + resetMargin * targetError)
> + lockCount_ = 0;
> +
> + lastDeviceStatus_ = deviceStatus;
> + lastTargetExposure_ = status_.targetExposureValue;
> +
> + LOG(RPiAgc, Debug) << "Lock count updated to " << lockCount_;
> + return lockCount_ == maxLockCount;
> +}
> +
> +void AgcChannel::housekeepConfig()
> +{
> + /* First fetch all the up-to-date settings, so no one else has to do it. */
> + status_.ev = ev_;
> + status_.fixedShutter = limitShutter(fixedShutter_);
> + status_.fixedAnalogueGain = fixedAnalogueGain_;
> + status_.flickerPeriod = flickerPeriod_;
> + LOG(RPiAgc, Debug) << "ev " << status_.ev << " fixedShutter "
> + << status_.fixedShutter << " fixedAnalogueGain "
> + << status_.fixedAnalogueGain;
> + /*
> + * Make sure the "mode" pointers point to the up-to-date things, if
> + * they've changed.
> + */
> + if (meteringModeName_ != status_.meteringMode) {
> + auto it = config_.meteringModes.find(meteringModeName_);
> + if (it == config_.meteringModes.end()) {
> + LOG(RPiAgc, Warning) << "No metering mode " << meteringModeName_;
> + meteringModeName_ = status_.meteringMode;
> + } else {
> + meteringMode_ = &it->second;
> + status_.meteringMode = meteringModeName_;
> + }
> + }
> + if (exposureModeName_ != status_.exposureMode) {
> + auto it = config_.exposureModes.find(exposureModeName_);
> + if (it == config_.exposureModes.end()) {
> + LOG(RPiAgc, Warning) << "No exposure profile " << exposureModeName_;
> + exposureModeName_ = status_.exposureMode;
> + } else {
> + exposureMode_ = &it->second;
> + status_.exposureMode = exposureModeName_;
> + }
> + }
> + if (constraintModeName_ != status_.constraintMode) {
> + auto it = config_.constraintModes.find(constraintModeName_);
> + if (it == config_.constraintModes.end()) {
> + LOG(RPiAgc, Warning) << "No constraint list " << constraintModeName_;
> + constraintModeName_ = status_.constraintMode;
> + } else {
> + constraintMode_ = &it->second;
> + status_.constraintMode = constraintModeName_;
> + }
> + }
> + LOG(RPiAgc, Debug) << "exposureMode "
> + << exposureModeName_ << " constraintMode "
> + << constraintModeName_ << " meteringMode "
> + << meteringModeName_;
> +}
> +
> +void AgcChannel::fetchCurrentExposure(Metadata *imageMetadata)
> +{
> + std::unique_lock<Metadata> lock(*imageMetadata);
> + DeviceStatus *deviceStatus =
> + imageMetadata->getLocked<DeviceStatus>("device.status");
> + if (!deviceStatus)
> + LOG(RPiAgc, Fatal) << "No device metadata";
> + current_.shutter = deviceStatus->shutterSpeed;
> + current_.analogueGain = deviceStatus->analogueGain;
> + AgcStatus *agcStatus =
> + imageMetadata->getLocked<AgcStatus>("agc.status");
> + current_.totalExposure = agcStatus ? agcStatus->totalExposureValue : 0s;
> + current_.totalExposureNoDG = current_.shutter * current_.analogueGain;
> +}
> +
> +void AgcChannel::fetchAwbStatus(Metadata *imageMetadata)
> +{
> + awb_.gainR = 1.0; /* in case not found in metadata */
> + awb_.gainG = 1.0;
> + awb_.gainB = 1.0;
> + if (imageMetadata->get("awb.status", awb_) != 0)
> + LOG(RPiAgc, Debug) << "No AWB status found";
> +}
> +
> +static double computeInitialY(StatisticsPtr &stats, AwbStatus const &awb,
> + std::vector<double> &weights, double gain)
> +{
> + constexpr uint64_t maxVal = 1 << Statistics::NormalisationFactorPow2;
> +
> + /*
> + * If we have no AGC region stats, but do have a a Y histogram, use that
> + * directly to caluclate the mean Y value of the image.
> + */
> + if (!stats->agcRegions.numRegions() && stats->yHist.bins()) {
> + /*
> + * When the gain is applied to the histogram, anything below minBin
> + * will scale up directly with the gain, but anything above that
> + * will saturate into the top bin.
> + */
> + auto &hist = stats->yHist;
> + double minBin = std::min(1.0, 1.0 / gain) * hist.bins();
> + double binMean = hist.interBinMean(0.0, minBin);
> + double numUnsaturated = hist.cumulativeFreq(minBin);
> + /* This term is from all the pixels that won't saturate. */
> + double ySum = binMean * gain * numUnsaturated;
> + /* And add the ones that will saturate. */
> + ySum += (hist.total() - numUnsaturated) * hist.bins();
> + return ySum / hist.total() / hist.bins();
> + }
> +
> + ASSERT(weights.size() == stats->agcRegions.numRegions());
> +
> + /*
> + * Note that the weights are applied by the IPA to the statistics directly,
> + * before they are given to us here.
> + */
> + double rSum = 0, gSum = 0, bSum = 0, pixelSum = 0;
> + for (unsigned int i = 0; i < stats->agcRegions.numRegions(); i++) {
> + auto ®ion = stats->agcRegions.get(i);
> + rSum += std::min<double>(region.val.rSum * gain, (maxVal - 1) * region.counted);
> + gSum += std::min<double>(region.val.gSum * gain, (maxVal - 1) * region.counted);
> + bSum += std::min<double>(region.val.bSum * gain, (maxVal - 1) * region.counted);
> + pixelSum += region.counted;
> + }
> + if (pixelSum == 0.0) {
> + LOG(RPiAgc, Warning) << "computeInitialY: pixelSum is zero";
> + return 0;
> + }
> +
> + double ySum;
> + /* Factor in the AWB correction if needed. */
> + if (stats->agcStatsPos == Statistics::AgcStatsPos::PreWb) {
> + ySum = rSum * awb.gainR * .299 +
> + gSum * awb.gainG * .587 +
> + gSum * awb.gainB * .114;
> + } else
> + ySum = rSum * .299 + gSum * .587 + gSum * .114;
> +
> + return ySum / pixelSum / (1 << 16);
> +}
> +
> +/*
> + * We handle extra gain through EV by adjusting our Y targets. However, you
> + * simply can't monitor histograms once they get very close to (or beyond!)
> + * saturation, so we clamp the Y targets to this value. It does mean that EV
> + * increases don't necessarily do quite what you might expect in certain
> + * (contrived) cases.
> + */
> +
> +static constexpr double EvGainYTargetLimit = 0.9;
> +
> +static double constraintComputeGain(AgcConstraint &c, const Histogram &h, double lux,
> + double evGain, double &targetY)
> +{
> + targetY = c.yTarget.eval(c.yTarget.domain().clip(lux));
> + targetY = std::min(EvGainYTargetLimit, targetY * evGain);
> + double iqm = h.interQuantileMean(c.qLo, c.qHi);
> + return (targetY * h.bins()) / iqm;
> +}
> +
> +void AgcChannel::computeGain(StatisticsPtr &statistics, Metadata *imageMetadata,
> + double &gain, double &targetY)
> +{
> + struct LuxStatus lux = {};
> + lux.lux = 400; /* default lux level to 400 in case no metadata found */
> + if (imageMetadata->get("lux.status", lux) != 0)
> + LOG(RPiAgc, Warning) << "No lux level found";
> + const Histogram &h = statistics->yHist;
> + double evGain = status_.ev * config_.baseEv;
> + /*
> + * The initial gain and target_Y come from some of the regions. After
> + * that we consider the histogram constraints.
> + */
> + targetY = config_.yTarget.eval(config_.yTarget.domain().clip(lux.lux));
> + targetY = std::min(EvGainYTargetLimit, targetY * evGain);
> +
> + /*
> + * Do this calculation a few times as brightness increase can be
> + * non-linear when there are saturated regions.
> + */
> + gain = 1.0;
> + for (int i = 0; i < 8; i++) {
> + double initialY = computeInitialY(statistics, awb_, meteringMode_->weights, gain);
> + double extraGain = std::min(10.0, targetY / (initialY + .001));
> + gain *= extraGain;
> + LOG(RPiAgc, Debug) << "Initial Y " << initialY << " target " << targetY
> + << " gives gain " << gain;
> + if (extraGain < 1.01) /* close enough */
> + break;
> + }
> +
> + for (auto &c : *constraintMode_) {
> + double newTargetY;
> + double newGain = constraintComputeGain(c, h, lux.lux, evGain, newTargetY);
> + LOG(RPiAgc, Debug) << "Constraint has target_Y "
> + << newTargetY << " giving gain " << newGain;
> + if (c.bound == AgcConstraint::Bound::LOWER && newGain > gain) {
> + LOG(RPiAgc, Debug) << "Lower bound constraint adopted";
> + gain = newGain;
> + targetY = newTargetY;
> + } else if (c.bound == AgcConstraint::Bound::UPPER && newGain < gain) {
> + LOG(RPiAgc, Debug) << "Upper bound constraint adopted";
> + gain = newGain;
> + targetY = newTargetY;
> + }
> + }
> + LOG(RPiAgc, Debug) << "Final gain " << gain << " (target_Y " << targetY << " ev "
> + << status_.ev << " base_ev " << config_.baseEv
> + << ")";
> +}
> +
> +void AgcChannel::computeTargetExposure(double gain)
> +{
> + if (status_.fixedShutter && status_.fixedAnalogueGain) {
> + /*
> + * When ag and shutter are both fixed, we need to drive the
> + * total exposure so that we end up with a digital gain of at least
> + * 1/minColourGain. Otherwise we'd desaturate channels causing
> + * white to go cyan or magenta.
> + */
> + double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
> + ASSERT(minColourGain != 0.0);
> + target_.totalExposure =
> + status_.fixedShutter * status_.fixedAnalogueGain / minColourGain;
> + } else {
> + /*
> + * The statistics reflect the image without digital gain, so the final
> + * total exposure we're aiming for is:
> + */
> + target_.totalExposure = current_.totalExposureNoDG * gain;
> + /* The final target exposure is also limited to what the exposure mode allows. */
> + Duration maxShutter = status_.fixedShutter
> + ? status_.fixedShutter
> + : exposureMode_->shutter.back();
> + maxShutter = limitShutter(maxShutter);
> + Duration maxTotalExposure =
> + maxShutter *
> + (status_.fixedAnalogueGain != 0.0
> + ? status_.fixedAnalogueGain
> + : exposureMode_->gain.back());
> + target_.totalExposure = std::min(target_.totalExposure, maxTotalExposure);
> + }
> + LOG(RPiAgc, Debug) << "Target totalExposure " << target_.totalExposure;
> +}
> +
> +bool AgcChannel::applyDigitalGain(double gain, double targetY)
> +{
> + double minColourGain = std::min({ awb_.gainR, awb_.gainG, awb_.gainB, 1.0 });
> + ASSERT(minColourGain != 0.0);
> + double dg = 1.0 / minColourGain;
> + /*
> + * I think this pipeline subtracts black level and rescales before we
> + * get the stats, so no need to worry about it.
> + */
> + LOG(RPiAgc, Debug) << "after AWB, target dg " << dg << " gain " << gain
> + << " target_Y " << targetY;
> + /*
> + * Finally, if we're trying to reduce exposure but the target_Y is
> + * "close" to 1.0, then the gain computed for that constraint will be
> + * only slightly less than one, because the measured Y can never be
> + * larger than 1.0. When this happens, demand a large digital gain so
> + * that the exposure can be reduced, de-saturating the image much more
> + * quickly (and we then approach the correct value more quickly from
> + * below).
> + */
> + bool desaturate = targetY > config_.fastReduceThreshold &&
> + gain < sqrt(targetY);
> + if (desaturate)
> + dg /= config_.fastReduceThreshold;
> + LOG(RPiAgc, Debug) << "Digital gain " << dg << " desaturate? " << desaturate;
> + filtered_.totalExposureNoDG = filtered_.totalExposure / dg;
> + LOG(RPiAgc, Debug) << "Target totalExposureNoDG " << filtered_.totalExposureNoDG;
> + return desaturate;
> +}
> +
> +void AgcChannel::filterExposure()
> +{
> + double speed = config_.speed;
> + /*
> + * AGC adapts instantly if both shutter and gain are directly specified
> + * or we're in the startup phase.
> + */
> + if ((status_.fixedShutter && status_.fixedAnalogueGain) ||
> + frameCount_ <= config_.startupFrames)
> + speed = 1.0;
> + if (!filtered_.totalExposure) {
> + filtered_.totalExposure = target_.totalExposure;
> + } else {
> + /*
> + * If close to the result go faster, to save making so many
> + * micro-adjustments on the way. (Make this customisable?)
> + */
> + if (filtered_.totalExposure < 1.2 * target_.totalExposure &&
> + filtered_.totalExposure > 0.8 * target_.totalExposure)
> + speed = sqrt(speed);
> + filtered_.totalExposure = speed * target_.totalExposure +
> + filtered_.totalExposure * (1.0 - speed);
> + }
> + LOG(RPiAgc, Debug) << "After filtering, totalExposure " << filtered_.totalExposure
> + << " no dg " << filtered_.totalExposureNoDG;
> +}
> +
> +void AgcChannel::divideUpExposure()
> +{
> + /*
> + * Sending the fixed shutter/gain cases through the same code may seem
> + * unnecessary, but it will make more sense when extend this to cover
> + * variable aperture.
> + */
> + Duration exposureValue = filtered_.totalExposureNoDG;
> + Duration shutterTime;
> + double analogueGain;
> + shutterTime = status_.fixedShutter ? status_.fixedShutter
> + : exposureMode_->shutter[0];
> + shutterTime = limitShutter(shutterTime);
> + analogueGain = status_.fixedAnalogueGain != 0.0 ? status_.fixedAnalogueGain
> + : exposureMode_->gain[0];
> + analogueGain = limitGain(analogueGain);
> + if (shutterTime * analogueGain < exposureValue) {
> + for (unsigned int stage = 1;
> + stage < exposureMode_->gain.size(); stage++) {
> + if (!status_.fixedShutter) {
> + Duration stageShutter =
> + limitShutter(exposureMode_->shutter[stage]);
> + if (stageShutter * analogueGain >= exposureValue) {
> + shutterTime = exposureValue / analogueGain;
> + break;
> + }
> + shutterTime = stageShutter;
> + }
> + if (status_.fixedAnalogueGain == 0.0) {
> + if (exposureMode_->gain[stage] * shutterTime >= exposureValue) {
> + analogueGain = exposureValue / shutterTime;
> + break;
> + }
> + analogueGain = exposureMode_->gain[stage];
> + analogueGain = limitGain(analogueGain);
> + }
> + }
> + }
> + LOG(RPiAgc, Debug) << "Divided up shutter and gain are " << shutterTime << " and "
> + << analogueGain;
> + /*
> + * Finally adjust shutter time for flicker avoidance (require both
> + * shutter and gain not to be fixed).
> + */
> + if (!status_.fixedShutter && !status_.fixedAnalogueGain &&
> + status_.flickerPeriod) {
> + int flickerPeriods = shutterTime / status_.flickerPeriod;
> + if (flickerPeriods) {
> + Duration newShutterTime = flickerPeriods * status_.flickerPeriod;
> + analogueGain *= shutterTime / newShutterTime;
> + /*
> + * We should still not allow the ag to go over the
> + * largest value in the exposure mode. Note that this
> + * may force more of the total exposure into the digital
> + * gain as a side-effect.
> + */
> + analogueGain = std::min(analogueGain, exposureMode_->gain.back());
> + analogueGain = limitGain(analogueGain);
> + shutterTime = newShutterTime;
> + }
> + LOG(RPiAgc, Debug) << "After flicker avoidance, shutter "
> + << shutterTime << " gain " << analogueGain;
> + }
> + filtered_.shutter = shutterTime;
> + filtered_.analogueGain = analogueGain;
> +}
> +
> +void AgcChannel::writeAndFinish(Metadata *imageMetadata, bool desaturate)
> +{
> + status_.totalExposureValue = filtered_.totalExposure;
> + status_.targetExposureValue = desaturate ? 0s : target_.totalExposureNoDG;
> + status_.shutterTime = filtered_.shutter;
> + status_.analogueGain = filtered_.analogueGain;
> + /*
> + * Write to metadata as well, in case anyone wants to update the camera
> + * immediately.
> + */
> + imageMetadata->set("agc.status", status_);
> + LOG(RPiAgc, Debug) << "Output written, total exposure requested is "
> + << filtered_.totalExposure;
> + LOG(RPiAgc, Debug) << "Camera exposure update: shutter time " << filtered_.shutter
> + << " analogue gain " << filtered_.analogueGain;
> +}
> +
> +Duration AgcChannel::limitShutter(Duration shutter)
> +{
> + /*
> + * shutter == 0 is a special case for fixed shutter values, and must pass
> + * through unchanged
> + */
> + if (!shutter)
> + return shutter;
> +
> + shutter = std::clamp(shutter, mode_.minShutter, maxShutter_);
> + return shutter;
> +}
> +
> +double AgcChannel::limitGain(double gain) const
> +{
> + /*
> + * Only limit the lower bounds of the gain value to what the sensor limits.
> + * The upper bound on analogue gain will be made up with additional digital
> + * gain applied by the ISP.
> + *
> + * gain == 0.0 is a special case for fixed shutter values, and must pass
> + * through unchanged
> + */
> + if (!gain)
> + return gain;
> +
> + gain = std::max(gain, mode_.minAnalogueGain);
> + return gain;
> +}
I presume the above code is a copy of the agc implementation and I'll
skip a complete review...
> diff --git a/src/ipa/rpi/controller/rpi/agc_channel.h b/src/ipa/rpi/controller/rpi/agc_channel.h
> new file mode 100644
> index 00000000..dc4356f3
> --- /dev/null
> +++ b/src/ipa/rpi/controller/rpi/agc_channel.h
> @@ -0,0 +1,135 @@
> +/* SPDX-License-Identifier: BSD-2-Clause */
> +/*
> + * Copyright (C) 2019, Raspberry Pi Ltd
Time flies :(
> + *
> + * agc.h - AGC/AEC control algorithm
> + */
> +#pragma once
> +
> +#include <mutex>
> +#include <vector>
<map> and <string> as well
(and remove <map> from the .cpp file)
> +
> +#include <libcamera/base/utils.h>
> +
> +#include "../agc_status.h"
> +#include "../awb_status.h"
> +#include "../pwl.h"
> +
> +/* This is our implementation of AGC. */
> +
> +namespace RPiController {
> +
> +struct AgcMeteringMode {
> + std::vector<double> weights;
> + int read(const libcamera::YamlObject ¶ms);
> +};
> +
> +struct AgcExposureMode {
> + std::vector<libcamera::utils::Duration> shutter;
> + std::vector<double> gain;
> + int read(const libcamera::YamlObject ¶ms);
> +};
> +
> +struct AgcConstraint {
> + enum class Bound { LOWER = 0,
> + UPPER = 1 };
> + Bound bound;
> + double qLo;
> + double qHi;
> + Pwl yTarget;
> + int read(const libcamera::YamlObject ¶ms);
> +};
> +
> +typedef std::vector<AgcConstraint> AgcConstraintMode;
> +
> +struct AgcConfig {
> + int read(const libcamera::YamlObject ¶ms);
> + std::map<std::string, AgcMeteringMode> meteringModes;
> + std::map<std::string, AgcExposureMode> exposureModes;
> + std::map<std::string, AgcConstraintMode> constraintModes;
> + Pwl yTarget;
> + double speed;
> + uint16_t startupFrames;
> + unsigned int convergenceFrames;
> + double maxChange;
> + double minChange;
> + double fastReduceThreshold;
> + double speedUpThreshold;
> + std::string defaultMeteringMode;
> + std::string defaultExposureMode;
> + std::string defaultConstraintMode;
> + double baseEv;
> + libcamera::utils::Duration defaultExposureTime;
> + double defaultAnalogueGain;
> +};
> +
> +class AgcChannel
> +{
> +public:
> + AgcChannel();
> + int read(const libcamera::YamlObject ¶ms,
> + const Controller::HardwareConfig &hardwareConfig);
> + unsigned int getConvergenceFrames() const;
> + std::vector<double> const &getWeights() const;
> + void setEv(double ev);
> + void setFlickerPeriod(libcamera::utils::Duration flickerPeriod);
> + void setMaxShutter(libcamera::utils::Duration maxShutter);
> + void setFixedShutter(libcamera::utils::Duration fixedShutter);
> + void setFixedAnalogueGain(double fixedAnalogueGain);
> + void setMeteringMode(std::string const &meteringModeName);
> + void setExposureMode(std::string const &exposureModeName);
> + void setConstraintMode(std::string const &contraintModeName);
> + void enableAuto();
> + void disableAuto();
> + void switchMode(CameraMode const &cameraMode, Metadata *metadata);
> + void prepare(Metadata *imageMetadata);
> + void process(StatisticsPtr &stats, Metadata *imageMetadata);
> +
> +private:
> + bool updateLockStatus(DeviceStatus const &deviceStatus);
> + AgcConfig config_;
> + void housekeepConfig();
> + void fetchCurrentExposure(Metadata *imageMetadata);
> + void fetchAwbStatus(Metadata *imageMetadata);
> + void computeGain(StatisticsPtr &statistics, Metadata *imageMetadata,
> + double &gain, double &targetY);
> + void computeTargetExposure(double gain);
> + void filterExposure();
> + bool applyDigitalGain(double gain, double targetY);
> + void divideUpExposure();
> + void writeAndFinish(Metadata *imageMetadata, bool desaturate);
> + libcamera::utils::Duration limitShutter(libcamera::utils::Duration shutter);
> + double limitGain(double gain) const;
> + AgcMeteringMode *meteringMode_;
> + AgcExposureMode *exposureMode_;
> + AgcConstraintMode *constraintMode_;
> + CameraMode mode_;
> + uint64_t frameCount_;
> + AwbStatus awb_;
> + struct ExposureValues {
> + ExposureValues();
My compiler seems to be able to generate a default constructor
> +
> + libcamera::utils::Duration shutter;
> + double analogueGain;
> + libcamera::utils::Duration totalExposure;
> + libcamera::utils::Duration totalExposureNoDG; /* without digital gain */
> + };
> + ExposureValues current_; /* values for the current frame */
> + ExposureValues target_; /* calculate the values we want here */
> + ExposureValues filtered_; /* these values are filtered towards target */
> + AgcStatus status_;
> + int lockCount_;
> + DeviceStatus lastDeviceStatus_;
> + libcamera::utils::Duration lastTargetExposure_;
> + /* Below here the "settings" that applications can change. */
> + std::string meteringModeName_;
> + std::string exposureModeName_;
> + std::string constraintModeName_;
> + double ev_;
> + libcamera::utils::Duration flickerPeriod_;
> + libcamera::utils::Duration maxShutter_;
> + libcamera::utils::Duration fixedShutter_;
> + double fixedAnalogueGain_;
> +};
> +
> +} /* namespace RPiController */
> --
> 2.30.2
>
More information about the libcamera-devel
mailing list