[libcamera-devel] [PATCH v2 1/4] pipeline: ipa: raspberrypi: Pass exposure/gain values to IPA though controls

Naushir Patuck naush at raspberrypi.com
Wed Feb 17 13:50:05 CET 2021


When running with sensors that had no embedded data, the pipeline handler
would fill a dummy embedded data buffer with gain/exposure values, and
pass this buffer to the IPA together with the bayer buffer. The IPA would
extract these values for use in the controller algorithms.

Rework this logic entirely by having a new RPiCameraData::BayerFrame
queue to replace the existing bayer queue. In addition to storing the
FrameBuffer pointer, this also stores all the controls tracked by
DelayedControls for that frame in a ControlList. This includes include
exposure and gain values. On signalling RPi::IPA_EVENT_SIGNAL_ISP_PREPARE
IPA event, the pipeline handler now passes this ControlList from the
RPiCameraData::BayerFrame queue.

The IPA now extracts the gain and exposure values from the ControlList
instead of using RPiController::MdParserRPi to parse the embedded data
buffer.

Signed-off-by: Naushir Patuck <naush at raspberrypi.com>
---
 include/libcamera/ipa/raspberrypi.mojom       |   2 +
 src/ipa/raspberrypi/raspberrypi.cpp           | 132 +++++++++++-------
 .../pipeline/raspberrypi/raspberrypi.cpp      |  58 ++++----
 3 files changed, 108 insertions(+), 84 deletions(-)

diff --git a/include/libcamera/ipa/raspberrypi.mojom b/include/libcamera/ipa/raspberrypi.mojom
index 5a27b1e4fc2d..f733a2cd2a40 100644
--- a/include/libcamera/ipa/raspberrypi.mojom
+++ b/include/libcamera/ipa/raspberrypi.mojom
@@ -29,6 +29,8 @@ struct SensorConfig {
 struct ISPConfig {
 	uint32 embeddedBufferId;
 	uint32 bayerBufferId;
+	bool embeddedBufferPresent;
+	ControlList controls;
 };
 
 struct ConfigInput {
diff --git a/src/ipa/raspberrypi/raspberrypi.cpp b/src/ipa/raspberrypi/raspberrypi.cpp
index 1226ea514521..f9ab6417866e 100644
--- a/src/ipa/raspberrypi/raspberrypi.cpp
+++ b/src/ipa/raspberrypi/raspberrypi.cpp
@@ -1,6 +1,6 @@
 /* SPDX-License-Identifier: BSD-2-Clause */
 /*
- * Copyright (C) 2019-2020, Raspberry Pi (Trading) Ltd.
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Ltd.
  *
  * rpi.cpp - Raspberry Pi Image Processing Algorithms
  */
@@ -101,9 +101,11 @@ private:
 	bool validateIspControls();
 	void queueRequest(const ControlList &controls);
 	void returnEmbeddedBuffer(unsigned int bufferId);
-	void prepareISP(unsigned int bufferId);
+	void prepareISP(const ipa::RPi::ISPConfig &data);
 	void reportMetadata();
 	bool parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &deviceStatus);
+	void fillDeviceStatus(uint32_t exposureLines, uint32_t gainCode,
+			      struct DeviceStatus &deviceStatus);
 	void processStats(unsigned int bufferId);
 	void applyFrameDurations(double minFrameDuration, double maxFrameDuration);
 	void applyAGC(const struct AgcStatus *agcStatus, ControlList &ctrls);
@@ -447,7 +449,7 @@ void IPARPi::signalIspPrepare(const ipa::RPi::ISPConfig &data)
 	 * avoid running the control algos for a few frames in case
 	 * they are "unreliable".
 	 */
-	prepareISP(data.embeddedBufferId);
+	prepareISP(data);
 	frameCount_++;
 
 	/* Ready to push the input buffer into the ISP. */
@@ -909,67 +911,84 @@ void IPARPi::returnEmbeddedBuffer(unsigned int bufferId)
 	embeddedComplete.emit(bufferId & ipa::RPi::MaskID);
 }
 
-void IPARPi::prepareISP(unsigned int bufferId)
+void IPARPi::prepareISP(const ipa::RPi::ISPConfig &data)
 {
 	struct DeviceStatus deviceStatus = {};
-	bool success = parseEmbeddedData(bufferId, deviceStatus);
+	bool success = false;
 
-	/* Done with embedded data now, return to pipeline handler asap. */
-	returnEmbeddedBuffer(bufferId);
+	if (data.embeddedBufferPresent) {
+		/*
+		 * Pipeline handler has supplied us with an embedded data buffer,
+		 * so parse it and extract the exposure and gain.
+		 */
+		success = parseEmbeddedData(data.embeddedBufferId, deviceStatus);
 
-	if (success) {
-		ControlList ctrls(ispCtrls_);
+		/* Done with embedded data now, return to pipeline handler asap. */
+		returnEmbeddedBuffer(data.embeddedBufferId);
+	}
 
-		rpiMetadata_.Clear();
-		rpiMetadata_.Set("device.status", deviceStatus);
-		controller_.Prepare(&rpiMetadata_);
+	if (!success) {
+		/*
+		 * Pipeline handler has not supplied an embedded data buffer,
+		 * or embedded data buffer parsing has failed for some reason,
+		 * so pull the exposure and gain values from the control list.
+		 */
+		int32_t exposureLines = data.controls.get(V4L2_CID_EXPOSURE).get<int32_t>();
+		int32_t gainCode = data.controls.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>();
+		fillDeviceStatus(exposureLines, gainCode, deviceStatus);
+	}
 
-		/* Lock the metadata buffer to avoid constant locks/unlocks. */
-		std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
+	ControlList ctrls(ispCtrls_);
 
-		AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
-		if (awbStatus)
-			applyAWB(awbStatus, ctrls);
+	rpiMetadata_.Clear();
+	rpiMetadata_.Set("device.status", deviceStatus);
+	controller_.Prepare(&rpiMetadata_);
 
-		CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
-		if (ccmStatus)
-			applyCCM(ccmStatus, ctrls);
+	/* Lock the metadata buffer to avoid constant locks/unlocks. */
+	std::unique_lock<RPiController::Metadata> lock(rpiMetadata_);
 
-		AgcStatus *dgStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
-		if (dgStatus)
-			applyDG(dgStatus, ctrls);
+	AwbStatus *awbStatus = rpiMetadata_.GetLocked<AwbStatus>("awb.status");
+	if (awbStatus)
+		applyAWB(awbStatus, ctrls);
 
-		AlscStatus *lsStatus = rpiMetadata_.GetLocked<AlscStatus>("alsc.status");
-		if (lsStatus)
-			applyLS(lsStatus, ctrls);
+	CcmStatus *ccmStatus = rpiMetadata_.GetLocked<CcmStatus>("ccm.status");
+	if (ccmStatus)
+		applyCCM(ccmStatus, ctrls);
 
-		ContrastStatus *contrastStatus = rpiMetadata_.GetLocked<ContrastStatus>("contrast.status");
-		if (contrastStatus)
-			applyGamma(contrastStatus, ctrls);
+	AgcStatus *dgStatus = rpiMetadata_.GetLocked<AgcStatus>("agc.status");
+	if (dgStatus)
+		applyDG(dgStatus, ctrls);
 
-		BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
-		if (blackLevelStatus)
-			applyBlackLevel(blackLevelStatus, ctrls);
+	AlscStatus *lsStatus = rpiMetadata_.GetLocked<AlscStatus>("alsc.status");
+	if (lsStatus)
+		applyLS(lsStatus, ctrls);
 
-		GeqStatus *geqStatus = rpiMetadata_.GetLocked<GeqStatus>("geq.status");
-		if (geqStatus)
-			applyGEQ(geqStatus, ctrls);
+	ContrastStatus *contrastStatus = rpiMetadata_.GetLocked<ContrastStatus>("contrast.status");
+	if (contrastStatus)
+		applyGamma(contrastStatus, ctrls);
 
-		DenoiseStatus *denoiseStatus = rpiMetadata_.GetLocked<DenoiseStatus>("denoise.status");
-		if (denoiseStatus)
-			applyDenoise(denoiseStatus, ctrls);
+	BlackLevelStatus *blackLevelStatus = rpiMetadata_.GetLocked<BlackLevelStatus>("black_level.status");
+	if (blackLevelStatus)
+		applyBlackLevel(blackLevelStatus, ctrls);
 
-		SharpenStatus *sharpenStatus = rpiMetadata_.GetLocked<SharpenStatus>("sharpen.status");
-		if (sharpenStatus)
-			applySharpen(sharpenStatus, ctrls);
+	GeqStatus *geqStatus = rpiMetadata_.GetLocked<GeqStatus>("geq.status");
+	if (geqStatus)
+		applyGEQ(geqStatus, ctrls);
 
-		DpcStatus *dpcStatus = rpiMetadata_.GetLocked<DpcStatus>("dpc.status");
-		if (dpcStatus)
-			applyDPC(dpcStatus, ctrls);
+	DenoiseStatus *denoiseStatus = rpiMetadata_.GetLocked<DenoiseStatus>("denoise.status");
+	if (denoiseStatus)
+		applyDenoise(denoiseStatus, ctrls);
 
-		if (!ctrls.empty())
-			setIspControls.emit(ctrls);
-	}
+	SharpenStatus *sharpenStatus = rpiMetadata_.GetLocked<SharpenStatus>("sharpen.status");
+	if (sharpenStatus)
+		applySharpen(sharpenStatus, ctrls);
+
+	DpcStatus *dpcStatus = rpiMetadata_.GetLocked<DpcStatus>("dpc.status");
+	if (dpcStatus)
+		applyDPC(dpcStatus, ctrls);
+
+	if (!ctrls.empty())
+		setIspControls.emit(ctrls);
 }
 
 bool IPARPi::parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &deviceStatus)
@@ -985,6 +1004,7 @@ bool IPARPi::parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &devic
 	RPiController::MdParser::Status status = helper_->Parser().Parse(mem.data());
 	if (status != RPiController::MdParser::Status::OK) {
 		LOG(IPARPI, Error) << "Embedded Buffer parsing failed, error " << status;
+		return false;
 	} else {
 		uint32_t exposureLines, gainCode;
 		if (helper_->Parser().GetExposureLines(exposureLines) != RPiController::MdParser::Status::OK) {
@@ -992,21 +1012,29 @@ bool IPARPi::parseEmbeddedData(unsigned int bufferId, struct DeviceStatus &devic
 			return false;
 		}
 
-		deviceStatus.shutter_speed = helper_->Exposure(exposureLines);
 		if (helper_->Parser().GetGainCode(gainCode) != RPiController::MdParser::Status::OK) {
 			LOG(IPARPI, Error) << "Gain failed";
 			return false;
 		}
 
-		deviceStatus.analogue_gain = helper_->Gain(gainCode);
-		LOG(IPARPI, Debug) << "Metadata - Exposure : "
-				   << deviceStatus.shutter_speed << " Gain : "
-				   << deviceStatus.analogue_gain;
+		fillDeviceStatus(exposureLines, gainCode, deviceStatus);
 	}
 
 	return true;
 }
 
+void IPARPi::fillDeviceStatus(uint32_t exposureLines, uint32_t gainCode,
+			      struct DeviceStatus &deviceStatus)
+{
+	deviceStatus.shutter_speed = helper_->Exposure(exposureLines);
+	deviceStatus.analogue_gain = helper_->Gain(gainCode);
+
+	LOG(IPARPI, Debug) << "Metadata - Exposure : "
+			   << deviceStatus.shutter_speed
+			   << " Gain : "
+			   << deviceStatus.analogue_gain;
+}
+
 void IPARPi::processStats(unsigned int bufferId)
 {
 	auto it = buffers_.find(bufferId);
diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
index ba74ace183db..b43d86166c63 100644
--- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
@@ -1,6 +1,6 @@
 /* SPDX-License-Identifier: LGPL-2.1-or-later */
 /*
- * Copyright (C) 2019-2020, Raspberry Pi (Trading) Ltd.
+ * Copyright (C) 2019-2021, Raspberry Pi (Trading) Ltd.
  *
  * raspberrypi.cpp - Pipeline handler for Raspberry Pi devices
  */
@@ -197,7 +197,13 @@ public:
 	 */
 	enum class State { Stopped, Idle, Busy, IpaComplete };
 	State state_;
-	std::queue<FrameBuffer *> bayerQueue_;
+
+	struct BayerFrame {
+		FrameBuffer *buffer;
+		ControlList controls;
+	};
+
+	std::queue<BayerFrame> bayerQueue_;
 	std::queue<FrameBuffer *> embeddedQueue_;
 	std::deque<Request *> requestQueue_;
 
@@ -222,7 +228,7 @@ public:
 private:
 	void checkRequestCompleted();
 	void tryRunPipeline();
-	bool findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *&embeddedBuffer);
+	bool findMatchingBuffers(BayerFrame &bayerFrame, FrameBuffer *&embeddedBuffer);
 
 	unsigned int ispOutputCount_;
 };
@@ -1383,29 +1389,14 @@ void RPiCameraData::unicamBufferDequeue(FrameBuffer *buffer)
 			<< ", timestamp: " << buffer->metadata().timestamp;
 
 	if (stream == &unicam_[Unicam::Image]) {
-		bayerQueue_.push(buffer);
-	} else {
-		embeddedQueue_.push(buffer);
-
-		ControlList ctrl = delayedCtrls_->get(buffer->metadata().sequence);
-
 		/*
-		 * Sensor metadata is unavailable, so put the expected ctrl
-		 * values (accounting for the staggered delays) into the empty
-		 * metadata buffer.
+		 * Lookup the sensor controls used for this frame sequence from
+		 * StaggeredCtrl and queue them along with the frame buffer.
 		 */
-		if (!sensorMetadata_) {
-			unsigned int bufferId = unicam_[Unicam::Embedded].getBufferId(buffer);
-			auto it = mappedEmbeddedBuffers_.find(bufferId);
-			if (it != mappedEmbeddedBuffers_.end()) {
-				uint32_t *mem = reinterpret_cast<uint32_t *>(it->second.maps()[0].data());
-				mem[0] = ctrl.get(V4L2_CID_EXPOSURE).get<int32_t>();
-				mem[1] = ctrl.get(V4L2_CID_ANALOGUE_GAIN).get<int32_t>();
-			} else {
-				LOG(RPI, Warning) << "Failed to find embedded buffer "
-						  << bufferId;
-			}
-		}
+		ControlList ctrl = delayedCtrls_->get(buffer->metadata().sequence);
+		bayerQueue_.push({ buffer, std::move(ctrl) });
+	} else {
+		embeddedQueue_.push(buffer);
 	}
 
 	handleState();
@@ -1656,14 +1647,15 @@ void RPiCameraData::applyScalerCrop(const ControlList &controls)
 
 void RPiCameraData::tryRunPipeline()
 {
-	FrameBuffer *bayerBuffer, *embeddedBuffer;
+	FrameBuffer *embeddedBuffer;
+	BayerFrame bayerFrame;
 
 	/* If any of our request or buffer queues are empty, we cannot proceed. */
 	if (state_ != State::Idle || requestQueue_.empty() ||
 	    bayerQueue_.empty() || embeddedQueue_.empty())
 		return;
 
-	if (!findMatchingBuffers(bayerBuffer, embeddedBuffer))
+	if (!findMatchingBuffers(bayerFrame, embeddedBuffer))
 		return;
 
 	/* Take the first request from the queue and action the IPA. */
@@ -1682,7 +1674,7 @@ void RPiCameraData::tryRunPipeline()
 	/* Set our state to say the pipeline is active. */
 	state_ = State::Busy;
 
-	unsigned int bayerId = unicam_[Unicam::Image].getBufferId(bayerBuffer);
+	unsigned int bayerId = unicam_[Unicam::Image].getBufferId(bayerFrame.buffer);
 	unsigned int embeddedId = unicam_[Unicam::Embedded].getBufferId(embeddedBuffer);
 
 	LOG(RPI, Debug) << "Signalling signalIspPrepare:"
@@ -1692,24 +1684,26 @@ void RPiCameraData::tryRunPipeline()
 	ipa::RPi::ISPConfig ispPrepare;
 	ispPrepare.embeddedBufferId = ipa::RPi::MaskEmbeddedData | embeddedId;
 	ispPrepare.bayerBufferId = ipa::RPi::MaskBayerData | bayerId;
+	ispPrepare.embeddedBufferPresent = true;
+	ispPrepare.controls = std::move(bayerFrame.controls);
 	ipa_->signalIspPrepare(ispPrepare);
 }
 
-bool RPiCameraData::findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *&embeddedBuffer)
+bool RPiCameraData::findMatchingBuffers(BayerFrame &bayerFrame, FrameBuffer *&embeddedBuffer)
 {
 	unsigned int embeddedRequeueCount = 0, bayerRequeueCount = 0;
 
 	/* Loop until we find a matching bayer and embedded data buffer. */
 	while (!bayerQueue_.empty()) {
 		/* Start with the front of the bayer queue. */
-		bayerBuffer = bayerQueue_.front();
+		bayerFrame = bayerQueue_.front();
 
 		/*
 		 * Find the embedded data buffer with a matching timestamp to pass to
 		 * the IPA. Any embedded buffers with a timestamp lower than the
 		 * current bayer buffer will be removed and re-queued to the driver.
 		 */
-		uint64_t ts = bayerBuffer->metadata().timestamp;
+		uint64_t ts = bayerFrame.buffer->metadata().timestamp;
 		embeddedBuffer = nullptr;
 		while (!embeddedQueue_.empty()) {
 			FrameBuffer *b = embeddedQueue_.front();
@@ -1739,7 +1733,7 @@ bool RPiCameraData::findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *
 				 * the front of the queue. This buffer is now orphaned, so requeue
 				 * it back to the device.
 				 */
-				unicam_[Unicam::Image].queueBuffer(bayerQueue_.front());
+				unicam_[Unicam::Image].queueBuffer(bayerQueue_.front().buffer);
 				bayerQueue_.pop();
 				bayerRequeueCount++;
 				LOG(RPI, Warning) << "Dropping unmatched input frame in stream "
@@ -1757,7 +1751,7 @@ bool RPiCameraData::findMatchingBuffers(FrameBuffer *&bayerBuffer, FrameBuffer *
 
 				LOG(RPI, Warning) << "Flushing bayer stream!";
 				while (!bayerQueue_.empty()) {
-					unicam_[Unicam::Image].queueBuffer(bayerQueue_.front());
+					unicam_[Unicam::Image].queueBuffer(bayerQueue_.front().buffer);
 					bayerQueue_.pop();
 				}
 				flushedBuffers = true;
-- 
2.25.1



More information about the libcamera-devel mailing list