[libcamera-devel] [PATCH v3 1/3] ipa: ipu3: Rework IPAFrameContext

Umang Jain umang.jain at ideasonboard.com
Wed May 18 11:31:04 CEST 2022


Hi Jacopo,

On 5/18/22 09:20, Jacopo Mondi wrote:
> Hi Umang
>
> On Tue, May 17, 2022 at 09:18:31PM +0200, Umang Jain via libcamera-devel wrote:
>> Currently, IPAFrameContext consolidates the values computed by the
>> active state of the algorithms, along with the values applied on
>> the sensor.
>>
>> Moving ahead, we want to have a frame context associated with each
>> incoming request (or frame to be captured). This not necessarily should
>> be tied to the "active state" of the algorithms hence:
>> 	- Rename current IPAFrameContext -> IPAActiveState
>> 	  This will now reflect the latest active state of the
>> 	  algorithms and has nothing to do with any frame-related
>> 	  ops/values.
>> 	- Re-instate IPAFrameContext with a sub-structure 'sensor'
>> 	  currently storing the exposure and gain value.
>>
>> Adapt the various access to the frame context to the new changes
>> as described above.
>>
>> Subsequently, the re-instated IPAFrameContext will be extended to
>> contain a frame number and ControlList to remember the incoming
>> request controls provided by the application. A ring-buffer will
>> be introduced to store these frame contexts for a certain number
>> of frames.
>>
>> Signed-off-by: Umang Jain <umang.jain at ideasonboard.com>
>> ---
>>   src/ipa/ipu3/algorithms/af.cpp           | 42 ++++++------
>>   src/ipa/ipu3/algorithms/agc.cpp          | 21 +++---
>>   src/ipa/ipu3/algorithms/agc.h            |  2 +-
>>   src/ipa/ipu3/algorithms/awb.cpp          | 16 ++---
>>   src/ipa/ipu3/algorithms/tone_mapping.cpp | 10 +--
>>   src/ipa/ipu3/ipa_context.cpp             | 84 ++++++++++++++----------
>>   src/ipa/ipu3/ipa_context.h               | 16 +++--
>>   src/ipa/ipu3/ipu3.cpp                    | 11 ++--
>>   8 files changed, 110 insertions(+), 92 deletions(-)
>>
>> diff --git a/src/ipa/ipu3/algorithms/af.cpp b/src/ipa/ipu3/algorithms/af.cpp
>> index f700b01f..8a5a6b1a 100644
>> --- a/src/ipa/ipu3/algorithms/af.cpp
>> +++ b/src/ipa/ipu3/algorithms/af.cpp
>> @@ -185,11 +185,11 @@ int Af::configure(IPAContext &context, const IPAConfigInfo &configInfo)
>>   	afIgnoreFrameReset();
>>
>>   	/* Initial focus value */
>> -	context.frameContext.af.focus = 0;
>> +	context.activeState.af.focus = 0;
>>   	/* Maximum variance of the AF statistics */
>> -	context.frameContext.af.maxVariance = 0;
>> +	context.activeState.af.maxVariance = 0;
>>   	/* The stable AF value flag. if it is true, the AF should be in a stable state. */
>> -	context.frameContext.af.stable = false;
>> +	context.activeState.af.stable = false;
>>
>>   	return 0;
>>   }
>> @@ -211,10 +211,10 @@ void Af::afCoarseScan(IPAContext &context)
>>
>>   	if (afScan(context, kCoarseSearchStep)) {
>>   		coarseCompleted_ = true;
>> -		context.frameContext.af.maxVariance = 0;
>> -		focus_ = context.frameContext.af.focus -
>> -			 (context.frameContext.af.focus * kFineRange);
>> -		context.frameContext.af.focus = focus_;
>> +		context.activeState.af.maxVariance = 0;
>> +		focus_ = context.activeState.af.focus -
>> +			 (context.activeState.af.focus * kFineRange);
>> +		context.activeState.af.focus = focus_;
>>   		previousVariance_ = 0;
>>   		maxStep_ = std::clamp(focus_ + static_cast<uint32_t>((focus_ * kFineRange)),
>>   				      0U, kMaxFocusSteps);
>> @@ -237,7 +237,7 @@ void Af::afFineScan(IPAContext &context)
>>   		return;
>>
>>   	if (afScan(context, kFineSearchStep)) {
>> -		context.frameContext.af.stable = true;
>> +		context.activeState.af.stable = true;
>>   		fineCompleted_ = true;
>>   	}
>>   }
>> @@ -254,10 +254,10 @@ void Af::afReset(IPAContext &context)
>>   	if (afNeedIgnoreFrame())
>>   		return;
>>
>> -	context.frameContext.af.maxVariance = 0;
>> -	context.frameContext.af.focus = 0;
>> +	context.activeState.af.maxVariance = 0;
>> +	context.activeState.af.focus = 0;
>>   	focus_ = 0;
>> -	context.frameContext.af.stable = false;
>> +	context.activeState.af.stable = false;
>>   	ignoreCounter_ = kIgnoreFrame;
>>   	previousVariance_ = 0.0;
>>   	coarseCompleted_ = false;
>> @@ -280,7 +280,7 @@ bool Af::afScan(IPAContext &context, int min_step)
>>   {
>>   	if (focus_ > maxStep_) {
>>   		/* If reach the max step, move lens to the position. */
>> -		context.frameContext.af.focus = bestFocus_;
>> +		context.activeState.af.focus = bestFocus_;
>>   		return true;
>>   	} else {
>>   		/*
>> @@ -288,8 +288,8 @@ bool Af::afScan(IPAContext &context, int min_step)
>>   		 * derivative. If the direction changes, it means we have
>>   		 * passed a maximum one step before.
>>   		 */
>> -		if ((currentVariance_ - context.frameContext.af.maxVariance) >=
>> -		    -(context.frameContext.af.maxVariance * 0.1)) {
>> +		if ((currentVariance_ - context.activeState.af.maxVariance) >=
>> +		    -(context.activeState.af.maxVariance * 0.1)) {
>>   			/*
>>   			 * Positive and zero derivative:
>>   			 * The variance is still increasing. The focus could be
>> @@ -298,8 +298,8 @@ bool Af::afScan(IPAContext &context, int min_step)
>>   			 */
>>   			bestFocus_ = focus_;
>>   			focus_ += min_step;
>> -			context.frameContext.af.focus = focus_;
>> -			context.frameContext.af.maxVariance = currentVariance_;
>> +			context.activeState.af.focus = focus_;
>> +			context.activeState.af.maxVariance = currentVariance_;
>>   		} else {
>>   			/*
>>   			 * Negative derivative:
>> @@ -307,7 +307,7 @@ bool Af::afScan(IPAContext &context, int min_step)
>>   			 * variance is found. Set focus step to previous good one
>>   			 * then return immediately.
>>   			 */
>> -			context.frameContext.af.focus = bestFocus_;
>> +			context.activeState.af.focus = bestFocus_;
>>   			return true;
>>   		}
>>   	}
>> @@ -389,13 +389,13 @@ double Af::afEstimateVariance(Span<const y_table_item_t> y_items, bool isY1)
>>   bool Af::afIsOutOfFocus(IPAContext context)
>>   {
>>   	const uint32_t diff_var = std::abs(currentVariance_ -
>> -					   context.frameContext.af.maxVariance);
>> -	const double var_ratio = diff_var / context.frameContext.af.maxVariance;
>> +					   context.activeState.af.maxVariance);
>> +	const double var_ratio = diff_var / context.activeState.af.maxVariance;
>>
>>   	LOG(IPU3Af, Debug) << "Variance change rate: "
>>   			   << var_ratio
>>   			   << " Current VCM step: "
>> -			   << context.frameContext.af.focus;
>> +			   << context.activeState.af.focus;
>>
>>   	if (var_ratio > kMaxChange)
>>   		return true;
>> @@ -437,7 +437,7 @@ void Af::process(IPAContext &context, const ipu3_uapi_stats_3a *stats)
>>   	 */
>>   	currentVariance_ = afEstimateVariance(y_items, !coarseCompleted_);
>>
>> -	if (!context.frameContext.af.stable) {
>> +	if (!context.activeState.af.stable) {
>>   		afCoarseScan(context);
>>   		afFineScan(context);
>>   	} else {
>> diff --git a/src/ipa/ipu3/algorithms/agc.cpp b/src/ipa/ipu3/algorithms/agc.cpp
>> index 7d4b3503..fdeec09d 100644
>> --- a/src/ipa/ipu3/algorithms/agc.cpp
>> +++ b/src/ipa/ipu3/algorithms/agc.cpp
>> @@ -87,7 +87,7 @@ int Agc::configure(IPAContext &context,
>>   		   [[maybe_unused]] const IPAConfigInfo &configInfo)
>>   {
>>   	const IPASessionConfiguration &configuration = context.configuration;
>> -	IPAFrameContext &frameContext = context.frameContext;
>> +	IPAActiveState &activeState = context.activeState;
>>
>>   	stride_ = configuration.grid.stride;
>>
>> @@ -99,8 +99,8 @@ int Agc::configure(IPAContext &context,
>>   	maxAnalogueGain_ = std::min(configuration.agc.maxAnalogueGain, kMaxAnalogueGain);
>>
>>   	/* Configure the default exposure and gain. */
>> -	frameContext.agc.gain = std::max(minAnalogueGain_, kMinAnalogueGain);
>> -	frameContext.agc.exposure = 10ms / configuration.sensor.lineDuration;
>> +	activeState.agc.gain = std::max(minAnalogueGain_, kMinAnalogueGain);
>> +	activeState.agc.exposure = 10ms / configuration.sensor.lineDuration;
>>
>>   	frameCount_ = 0;
>>   	return 0;
>> @@ -249,9 +249,10 @@ void Agc::computeExposure(IPAContext &context, double yGain,
>>   			    << shutterTime << " and "
>>   			    << stepGain;
>>
>> +	IPAActiveState &activeState = context.activeState;
>>   	/* Update the estimated exposure and gain. */
>> -	frameContext.agc.exposure = shutterTime / configuration.sensor.lineDuration;
>> -	frameContext.agc.gain = stepGain;
>> +	activeState.agc.exposure = shutterTime / configuration.sensor.lineDuration;
>> +	activeState.agc.gain = stepGain;
>>   }
>>
>>   /**
>> @@ -279,7 +280,7 @@ void Agc::computeExposure(IPAContext &context, double yGain,
>>    * More detailed information can be found in:
>>    * https://en.wikipedia.org/wiki/Relative_luminance
>>    */
>> -double Agc::estimateLuminance(IPAFrameContext &frameContext,
>> +double Agc::estimateLuminance(IPAActiveState &activeState,
>>   			      const ipu3_uapi_grid_config &grid,
>>   			      const ipu3_uapi_stats_3a *stats,
>>   			      double gain)
>> @@ -307,9 +308,9 @@ double Agc::estimateLuminance(IPAFrameContext &frameContext,
>>   	 * Apply the AWB gains to approximate colours correctly, use the Rec.
>>   	 * 601 formula to calculate the relative luminance, and normalize it.
>>   	 */
>> -	double ySum = redSum * frameContext.awb.gains.red * 0.299
>> -		    + greenSum * frameContext.awb.gains.green * 0.587
>> -		    + blueSum * frameContext.awb.gains.blue * 0.114;
>> +	double ySum = redSum * activeState.awb.gains.red * 0.299
>> +		    + greenSum * activeState.awb.gains.green * 0.587
>> +		    + blueSum * activeState.awb.gains.blue * 0.114;
>>
>>   	return ySum / (grid.height * grid.width) / 255;
>>   }
>> @@ -344,7 +345,7 @@ void Agc::process(IPAContext &context, const ipu3_uapi_stats_3a *stats)
>>   	double yTarget = kRelativeLuminanceTarget;
>>
>>   	for (unsigned int i = 0; i < 8; i++) {
>> -		double yValue = estimateLuminance(context.frameContext,
>> +		double yValue = estimateLuminance(context.activeState,
>>   						  context.configuration.grid.bdsGrid,
>>   						  stats, yGain);
>>   		double extraGain = std::min(10.0, yTarget / (yValue + .001));
>> diff --git a/src/ipa/ipu3/algorithms/agc.h b/src/ipa/ipu3/algorithms/agc.h
>> index ad705605..31420841 100644
>> --- a/src/ipa/ipu3/algorithms/agc.h
>> +++ b/src/ipa/ipu3/algorithms/agc.h
>> @@ -36,7 +36,7 @@ private:
>>   	utils::Duration filterExposure(utils::Duration currentExposure);
>>   	void computeExposure(IPAContext &context, double yGain,
>>   			     double iqMeanGain);
>> -	double estimateLuminance(IPAFrameContext &frameContext,
>> +	double estimateLuminance(IPAActiveState &activeState,
>>   				 const ipu3_uapi_grid_config &grid,
>>   				 const ipu3_uapi_stats_3a *stats,
>>   				 double gain);
>> diff --git a/src/ipa/ipu3/algorithms/awb.cpp b/src/ipa/ipu3/algorithms/awb.cpp
>> index 87a6cc7a..ab6924eb 100644
>> --- a/src/ipa/ipu3/algorithms/awb.cpp
>> +++ b/src/ipa/ipu3/algorithms/awb.cpp
>> @@ -396,10 +396,10 @@ void Awb::process(IPAContext &context, const ipu3_uapi_stats_3a *stats)
>>   	 * The results are cached, so if no results were calculated, we set the
>>   	 * cached values from asyncResults_ here.
>>   	 */
>> -	context.frameContext.awb.gains.blue = asyncResults_.blueGain;
>> -	context.frameContext.awb.gains.green = asyncResults_.greenGain;
>> -	context.frameContext.awb.gains.red = asyncResults_.redGain;
>> -	context.frameContext.awb.temperatureK = asyncResults_.temperatureK;
>> +	context.activeState.awb.gains.blue = asyncResults_.blueGain;
>> +	context.activeState.awb.gains.green = asyncResults_.greenGain;
>> +	context.activeState.awb.gains.red = asyncResults_.redGain;
>> +	context.activeState.awb.temperatureK = asyncResults_.temperatureK;
>>   }
>>
>>   constexpr uint16_t Awb::threshold(float value)
>> @@ -450,10 +450,10 @@ void Awb::prepare(IPAContext &context, ipu3_uapi_params *params)
>>   	params->acc_param.bnr.opt_center_sqr.y_sqr_reset = params->acc_param.bnr.opt_center.y_reset
>>   							* params->acc_param.bnr.opt_center.y_reset;
>>   	/* Convert to u3.13 fixed point values */
>> -	params->acc_param.bnr.wb_gains.gr = 8192 * context.frameContext.awb.gains.green;
>> -	params->acc_param.bnr.wb_gains.r  = 8192 * context.frameContext.awb.gains.red;
>> -	params->acc_param.bnr.wb_gains.b  = 8192 * context.frameContext.awb.gains.blue;
>> -	params->acc_param.bnr.wb_gains.gb = 8192 * context.frameContext.awb.gains.green;
>> +	params->acc_param.bnr.wb_gains.gr = 8192 * context.activeState.awb.gains.green;
>> +	params->acc_param.bnr.wb_gains.r  = 8192 * context.activeState.awb.gains.red;
>> +	params->acc_param.bnr.wb_gains.b  = 8192 * context.activeState.awb.gains.blue;
>> +	params->acc_param.bnr.wb_gains.gb = 8192 * context.activeState.awb.gains.green;
>>
>>   	LOG(IPU3Awb, Debug) << "Color temperature estimated: " << asyncResults_.temperatureK;
>>
>> diff --git a/src/ipa/ipu3/algorithms/tone_mapping.cpp b/src/ipa/ipu3/algorithms/tone_mapping.cpp
>> index 2040eda5..7c78d0d9 100644
>> --- a/src/ipa/ipu3/algorithms/tone_mapping.cpp
>> +++ b/src/ipa/ipu3/algorithms/tone_mapping.cpp
>> @@ -42,7 +42,7 @@ int ToneMapping::configure(IPAContext &context,
>>   			   [[maybe_unused]] const IPAConfigInfo &configInfo)
>>   {
>>   	/* Initialise tone mapping gamma value. */
>> -	context.frameContext.toneMapping.gamma = 0.0;
>> +	context.activeState.toneMapping.gamma = 0.0;
>>
>>   	return 0;
>>   }
>> @@ -60,7 +60,7 @@ void ToneMapping::prepare([[maybe_unused]] IPAContext &context,
>>   {
>>   	/* Copy the calculated LUT into the parameters buffer. */
>>   	memcpy(params->acc_param.gamma.gc_lut.lut,
>> -	       context.frameContext.toneMapping.gammaCorrection.lut,
>> +	       context.activeState.toneMapping.gammaCorrection.lut,
>>   	       IPU3_UAPI_GAMMA_CORR_LUT_ENTRIES *
>>   	       sizeof(params->acc_param.gamma.gc_lut.lut[0]));
>>
>> @@ -87,11 +87,11 @@ void ToneMapping::process(IPAContext &context,
>>   	 */
>>   	gamma_ = 1.1;
>>
>> -	if (context.frameContext.toneMapping.gamma == gamma_)
>> +	if (context.activeState.toneMapping.gamma == gamma_)
>>   		return;
>>
>>   	struct ipu3_uapi_gamma_corr_lut &lut =
>> -		context.frameContext.toneMapping.gammaCorrection;
>> +		context.activeState.toneMapping.gammaCorrection;
>>
>>   	for (uint32_t i = 0; i < std::size(lut.lut); i++) {
>>   		double j = static_cast<double>(i) / (std::size(lut.lut) - 1);
>> @@ -101,7 +101,7 @@ void ToneMapping::process(IPAContext &context,
>>   		lut.lut[i] = gamma * 8191;
>>   	}
>>
>> -	context.frameContext.toneMapping.gamma = gamma_;
>> +	context.activeState.toneMapping.gamma = gamma_;
>>   }
>>
>>   } /* namespace ipa::ipu3::algorithms */
>> diff --git a/src/ipa/ipu3/ipa_context.cpp b/src/ipa/ipu3/ipa_context.cpp
>> index b1570dde..06eb2776 100644
>> --- a/src/ipa/ipu3/ipa_context.cpp
>> +++ b/src/ipa/ipu3/ipa_context.cpp
>> @@ -24,9 +24,20 @@ namespace libcamera::ipa::ipu3 {
>>    * may also be updated in the start() operation.
>>    */
>>
>> +/**
>> + * \struct IPAActiveState
>> + * \brief The active state of the IPA algorithms
>> + *
>> + * The IPA is fed with the statistics generated from the latest frame captured
>> + * by the hardware. The statistics are then processed by the IPA algorithms to
>> + * compute ISP parameters required for the next frame capture. The current state
>> + * of the algorithms is reflected through IPAActiveState. The latest computed
>> + * values by the IPA algorithms are stored in this structure.
>> + */
>> +
>>   /**
>>    * \struct IPAFrameContext
>> - * \brief Per-frame context for algorithms
>> + * \brief Context for a frame
>>    *
>>    * The frame context stores data specific to a single frame processed by the
>>    * IPA. Each frame processed by the IPA has a context associated with it,
>> @@ -34,9 +45,10 @@ namespace libcamera::ipa::ipu3 {
>>    *
>>    * \todo Detail how to access contexts for a particular frame
>>    *
>> - * Each of the fields in the frame context belongs to either a specific
>> - * algorithm, or to the top-level IPA module. A field may be read by any
>> - * algorithm, but should only be written by its owner.
>> + * Fields in the frame context should reflect the values with which the frame
>> + * was processed on the hardware ISP. A field may be read by any algorithm as
> Isn't the context storing the sensor parameters rather than the ISP
> configuration ?


You are correct! I'll update the doc to replace this with sensor controls

>
> Rogue 'a' at the end of the phrase


Are you pointing the - " 'A' field may be read by any algorithm... " ? 
or Something else (sorry I can't notice the rogue 'a' here...)

>
>> + * and when required, or can be used to prepare the metadata container of the
> Also "and" seems not required ?
>
> Just "metadata for the frame" ?


Ack.

>
>
>> + * frame.
>>    */
>>
>>   /**
>> @@ -49,10 +61,10 @@ namespace libcamera::ipa::ipu3 {
>>    * \var IPAContext::frameContext
>>    * \brief The frame context for the frame being processed
>>    *
>> - * \todo While the frame context is supposed to be per-frame, this
>> - * single frame context stores data related to both the current frame
>> - * and the previous frames, with fields being updated as the algorithms
>> - * are run. This needs to be turned into real per-frame data storage.
>> + * \var IPAContext::activeState
>> + * \brief The current state of IPA algorithms
>> + *
>> + * \todo The frame context needs to be turned into real per-frame data storage.
>>    */
>>
>>   /**
>> @@ -78,17 +90,17 @@ namespace libcamera::ipa::ipu3 {
>>    */
>>
>>   /**
>> - * \var IPAFrameContext::af
>> + * \var IPAActiveState::af
>>    * \brief Context for the Automatic Focus algorithm
>>    *
>> - * \struct  IPAFrameContext::af
>> - * \var IPAFrameContext::af.focus
>> + * \struct IPAActiveState::af
>> + * \var IPAActiveState::af.focus
>>    * \brief Current position of the lens
>>    *
>> - * \var IPAFrameContext::af.maxVariance
>> + * \var IPAActiveState::af.maxVariance
>>    * \brief The maximum variance of the current image.
>>    *
>> - * \var IPAFrameContext::af.stable
>> + * \var IPAActiveState::af.stable
>>    * \brief It is set to true, if the best focus is found.
>>    */
>>
>> @@ -121,64 +133,64 @@ namespace libcamera::ipa::ipu3 {
>>    */
>>
>>   /**
>> - * \var IPAFrameContext::agc
>> + * \var IPAActiveState::agc
>>    * \brief Context for the Automatic Gain Control algorithm
>>    *
>>    * The exposure and gain determined are expected to be applied to the sensor
>>    * at the earliest opportunity.
>>    *
>> - * \var IPAFrameContext::agc.exposure
>> + * \var IPAActiveState::agc.exposure
>>    * \brief Exposure time expressed as a number of lines
>>    *
>> - * \var IPAFrameContext::agc.gain
>> + * \var IPAActiveState::agc.gain
>>    * \brief Analogue gain multiplier
>>    *
>>    * The gain should be adapted to the sensor specific gain code before applying.
>>    */
>>
>>   /**
>> - * \var IPAFrameContext::awb
>> + * \var IPAActiveState::awb
>>    * \brief Context for the Automatic White Balance algorithm
>>    *
>> - * \struct IPAFrameContext::awb.gains
>> + * \struct IPAActiveState::awb.gains
>>    * \brief White balance gains
>>    *
>> - * \var IPAFrameContext::awb.gains.red
>> + * \var IPAActiveState::awb.gains.red
>>    * \brief White balance gain for R channel
>>    *
>> - * \var IPAFrameContext::awb.gains.green
>> + * \var IPAActiveState::awb.gains.green
>>    * \brief White balance gain for G channel
>>    *
>> - * \var IPAFrameContext::awb.gains.blue
>> + * \var IPAActiveState::awb.gains.blue
>>    * \brief White balance gain for B channel
>>    *
>> - * \var IPAFrameContext::awb.temperatureK
>> + * \var IPAActiveState::awb.temperatureK
>>    * \brief Estimated color temperature
>>    */
>>
>>   /**
>> - * \var IPAFrameContext::sensor
>> - * \brief Effective sensor values
>> - *
>> - * \var IPAFrameContext::sensor.exposure
>> - * \brief Exposure time expressed as a number of lines
>> - *
>> - * \var IPAFrameContext::sensor.gain
>> - * \brief Analogue gain multiplier
>> - */
>> -
>> -/**
>> - * \var IPAFrameContext::toneMapping
>> + * \var IPAActiveState::toneMapping
>>    * \brief Context for ToneMapping and Gamma control
>>    *
>> - * \var IPAFrameContext::toneMapping.gamma
>> + * \var IPAActiveState::toneMapping.gamma
>>    * \brief Gamma value for the LUT
>>    *
>> - * \var IPAFrameContext::toneMapping.gammaCorrection
>> + * \var IPAActiveState::toneMapping.gammaCorrection
>>    * \brief Per-pixel tone mapping implemented as a LUT
>>    *
>>    * The LUT structure is defined by the IPU3 kernel interface. See
>>    * <linux/intel-ipu3.h> struct ipu3_uapi_gamma_corr_lut for further details.
>>    */
>>
>> +/**
>> + * \var IPAFrameContext::sensor
>> + * \brief Effective sensor values that were applied for the frame
>> + *
>> + * \var IPAFrameContext::sensor.exposure
>> + * \brief Exposure time expressed as a number of lines
>> + *
>> + * \var IPAFrameContext::sensor.gain
>> + * \brief Analogue gain multiplier
> Is this the gain code as the V4L2 controls expects it ?
> Is it worth mentioning a unit here ?

Well, I am using the already documented sensor.gain (just moving it here)...

Nevermind, I'll check for correct-ness

No, this is not the gainCode, but the gain value returned by 
CameraSensorHelper::gain(gainCode).

So, we query the gainCode from the sensorControls (that were applied on 
the sensor when frame was captured) - ran it through the camera sensor 
helper and set it as:

IPAFrameContext.sensor.gain = camHelper->gain(gainCode)

So, discussing with Jean-Michel, this is the "decoded" gain.

I am now starting to think we should align the definition around gain. 
One is coded gain (aka gainCode) and another is just gain.
What we have right now is: "gain code", "real gain", "gain multiplier" 
... I think it has been confusing to follow. Probably can I sort this 
out in a out-of-series patch on top?


>
> All minors:
> Reviewed-by: Jacopo Mondi <jacopo at jmondi.org>


Thanks!

>
> Thanks
>     j
>
>> + */
>> +
>>   } /* namespace libcamera::ipa::ipu3 */
>> diff --git a/src/ipa/ipu3/ipa_context.h b/src/ipa/ipu3/ipa_context.h
>> index 103498ef..8d681131 100644
>> --- a/src/ipa/ipu3/ipa_context.h
>> +++ b/src/ipa/ipu3/ipa_context.h
>> @@ -42,7 +42,7 @@ struct IPASessionConfiguration {
>>   	} sensor;
>>   };
>>
>> -struct IPAFrameContext {
>> +struct IPAActiveState {
>>   	struct {
>>   		uint32_t focus;
>>   		double maxVariance;
>> @@ -64,19 +64,23 @@ struct IPAFrameContext {
>>   		double temperatureK;
>>   	} awb;
>>
>> -	struct {
>> -		uint32_t exposure;
>> -		double gain;
>> -	} sensor;
>> -
>>   	struct {
>>   		double gamma;
>>   		struct ipu3_uapi_gamma_corr_lut gammaCorrection;
>>   	} toneMapping;
>>   };
>>
>> +struct IPAFrameContext {
>> +	struct {
>> +		uint32_t exposure;
>> +		double gain;
>> +	} sensor;
>> +};
>> +
>>   struct IPAContext {
>>   	IPASessionConfiguration configuration;
>> +	IPAActiveState activeState;
>> +
>>   	IPAFrameContext frameContext;
>>   };
>>
>> diff --git a/src/ipa/ipu3/ipu3.cpp b/src/ipa/ipu3/ipu3.cpp
>> index dd6cfd79..3b4fc911 100644
>> --- a/src/ipa/ipu3/ipu3.cpp
>> +++ b/src/ipa/ipu3/ipu3.cpp
>> @@ -454,7 +454,8 @@ int IPAIPU3::configure(const IPAConfigInfo &configInfo,
>>
>>   	calculateBdsGrid(configInfo.bdsOutputSize);
>>
>> -	/* Clean frameContext at each reconfiguration. */
>> +	/* Clean IPAActiveState at each reconfiguration. */
>> +	context_.activeState = {};
>>   	context_.frameContext = {};
>>
>>   	if (!validateSensorControls()) {
>> @@ -585,7 +586,7 @@ void IPAIPU3::processStatsBuffer(const uint32_t frame,
>>
>>   	ctrls.set(controls::AnalogueGain, context_.frameContext.sensor.gain);
>>
>> -	ctrls.set(controls::ColourTemperature, context_.frameContext.awb.temperatureK);
>> +	ctrls.set(controls::ColourTemperature, context_.activeState.awb.temperatureK);
>>
>>   	ctrls.set(controls::ExposureTime, context_.frameContext.sensor.exposure * lineDuration);
>>
>> @@ -623,8 +624,8 @@ void IPAIPU3::queueRequest([[maybe_unused]] const uint32_t frame,
>>    */
>>   void IPAIPU3::setControls(unsigned int frame)
>>   {
>> -	int32_t exposure = context_.frameContext.agc.exposure;
>> -	int32_t gain = camHelper_->gainCode(context_.frameContext.agc.gain);
>> +	int32_t exposure = context_.activeState.agc.exposure;
>> +	int32_t gain = camHelper_->gainCode(context_.activeState.agc.gain);
>>
>>   	ControlList ctrls(sensorCtrls_);
>>   	ctrls.set(V4L2_CID_EXPOSURE, exposure);
>> @@ -632,7 +633,7 @@ void IPAIPU3::setControls(unsigned int frame)
>>
>>   	ControlList lensCtrls(lensCtrls_);
>>   	lensCtrls.set(V4L2_CID_FOCUS_ABSOLUTE,
>> -		      static_cast<int32_t>(context_.frameContext.af.focus));
>> +		      static_cast<int32_t>(context_.activeState.af.focus));
>>
>>   	setSensorControls.emit(frame, ctrls, lensCtrls);
>>   }
>> --
>> 2.31.0
>>


More information about the libcamera-devel mailing list