[libcamera-devel] [PATCH 3/6] libcamera: pipeline: Raspberry Pi pipeline handler

Laurent Pinchart laurent.pinchart at ideasonboard.com
Mon May 4 11:28:26 CEST 2020


From: Naushir Patuck <naush at raspberrypi.com>

Initial implementation of the Raspberry Pi (BCM2835) ISP pipeline
handler.

All code is licensed under the BSD-2-Clause terms.
Copyright (c) 2019-2020 Raspberry Pi Trading Ltd.

Signed-off-by: Naushir Patuck <naush at raspberrypi.com>
Signed-off-by: Laurent Pinchart <laurent.pinchart at ideasonboard.com>
---
 include/ipa/raspberrypi.h                     |   58 +
 .../pipeline/raspberrypi/meson.build          |    3 +
 .../pipeline/raspberrypi/raspberrypi.cpp      | 1598 +++++++++++++++++
 .../pipeline/raspberrypi/staggered_ctrl.h     |  236 +++
 src/libcamera/pipeline/raspberrypi/vcsm.h     |  144 ++
 5 files changed, 2039 insertions(+)
 create mode 100644 include/ipa/raspberrypi.h
 create mode 100644 src/libcamera/pipeline/raspberrypi/meson.build
 create mode 100644 src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
 create mode 100644 src/libcamera/pipeline/raspberrypi/staggered_ctrl.h
 create mode 100644 src/libcamera/pipeline/raspberrypi/vcsm.h

diff --git a/include/ipa/raspberrypi.h b/include/ipa/raspberrypi.h
new file mode 100644
index 000000000000..3df56e8a1306
--- /dev/null
+++ b/include/ipa/raspberrypi.h
@@ -0,0 +1,58 @@
+/* SPDX-License-Identifier: LGPL-2.1-or-later */
+/*
+ * Copyright (C) 2019-2020, Raspberry Pi Ltd.
+ *
+ * raspberrypi.h - Image Processing Algorithm interface for Raspberry Pi
+ */
+#ifndef __LIBCAMERA_IPA_INTERFACE_RASPBERRYPI_H__
+#define __LIBCAMERA_IPA_INTERFACE_RASPBERRYPI_H__
+
+#include <libcamera/control_ids.h>
+#include <libcamera/controls.h>
+
+enum RPiOperations {
+	RPI_IPA_ACTION_V4L2_SET_STAGGERED = 1,
+	RPI_IPA_ACTION_V4L2_SET_ISP,
+	RPI_IPA_ACTION_STATS_METADATA_COMPLETE,
+	RPI_IPA_ACTION_RUN_ISP,
+	RPI_IPA_ACTION_RUN_ISP_AND_DROP_FRAME,
+	RPI_IPA_ACTION_SET_SENSOR_CONFIG,
+	RPI_IPA_ACTION_EMBEDDED_COMPLETE,
+	RPI_IPA_EVENT_SIGNAL_STAT_READY,
+	RPI_IPA_EVENT_SIGNAL_ISP_PREPARE,
+	RPI_IPA_EVENT_QUEUE_REQUEST,
+	RPI_IPA_EVENT_LS_TABLE_ALLOCATION,
+};
+
+enum RPiIpaMask {
+	ID = 0x0ffff,
+	STATS = 0x10000,
+	EMBEDDED_DATA = 0x20000,
+	BAYER_DATA = 0x40000
+};
+
+/* Size of the LS grid allocation. */
+#define MAX_LS_GRID_SIZE (32 << 10)
+
+namespace libcamera {
+
+/* List of controls handled by the Raspberry Pi IPA */
+static const ControlInfoMap RPiControls = {
+	{ &controls::AeEnable, ControlInfo(false, true) },
+	{ &controls::ExposureTime, ControlInfo(0, 999999) },
+	{ &controls::AnalogueGain, ControlInfo(1.0f, 32.0f) },
+	{ &controls::AeMeteringMode, ControlInfo(0, static_cast<int32_t>(controls::MeteringModeMax)) },
+	{ &controls::AeConstraintMode, ControlInfo(0, static_cast<int32_t>(controls::ConstraintModeMax)) },
+	{ &controls::AeExposureMode, ControlInfo(0, static_cast<int32_t>(controls::ExposureModeMax)) },
+	{ &controls::ExposureValue, ControlInfo(0.0f, 16.0f) },
+	{ &controls::AwbEnable, ControlInfo(false, true) },
+	{ &controls::ColourGains, ControlInfo(0.0f, 32.0f) },
+	{ &controls::AwbMode, ControlInfo(0, static_cast<int32_t>(controls::AwbModeMax)) },
+	{ &controls::Brightness, ControlInfo(-1.0f, 1.0f) },
+	{ &controls::Contrast, ControlInfo(0.0f, 32.0f) },
+	{ &controls::Saturation, ControlInfo(0.0f, 32.0f) },
+};
+
+} /* namespace libcamera */
+
+#endif /* __LIBCAMERA_IPA_INTERFACE_RASPBERRYPI_H__ */
diff --git a/src/libcamera/pipeline/raspberrypi/meson.build b/src/libcamera/pipeline/raspberrypi/meson.build
new file mode 100644
index 000000000000..737857977831
--- /dev/null
+++ b/src/libcamera/pipeline/raspberrypi/meson.build
@@ -0,0 +1,3 @@
+libcamera_sources += files([
+    'raspberrypi.cpp'
+])
diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
new file mode 100644
index 000000000000..1685081997e5
--- /dev/null
+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
@@ -0,0 +1,1598 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2019-2020, Raspberry Pi (Trading) Ltd.
+ *
+ * raspberrypi.cpp - Pipeline handler for Raspberry Pi devices
+ */
+#include <algorithm>
+#include <assert.h>
+#include <fcntl.h>
+#include <mutex>
+#include <queue>
+#include <sys/mman.h>
+
+#include <ipa/raspberrypi.h>
+#include <libcamera/camera.h>
+#include <libcamera/control_ids.h>
+#include <libcamera/logging.h>
+#include <libcamera/request.h>
+#include <libcamera/stream.h>
+
+#include <linux/drm_fourcc.h>
+#include <linux/videodev2.h>
+
+#include "camera_sensor.h"
+#include "device_enumerator.h"
+#include "ipa_manager.h"
+#include "media_device.h"
+#include "pipeline_handler.h"
+#include "staggered_ctrl.h"
+#include "utils.h"
+#include "v4l2_controls.h"
+#include "v4l2_videodevice.h"
+#include "vcsm.h"
+
+namespace libcamera {
+
+LOG_DEFINE_CATEGORY(RPI)
+
+using V4L2PixFmtMap = std::map<V4L2PixelFormat, std::vector<SizeRange>>;
+
+namespace {
+
+bool isRaw(PixelFormat &pixFmt)
+{
+	/*
+	 * The isRaw test might be redundant right now the pipeline handler only
+	 * supports RAW sensors. Leave it in for now, just as a sanity check.
+	 */
+	const PixelFormatInfo &info = PixelFormatInfo::info(pixFmt);
+	if (!info.isValid())
+		return false;
+
+	return info.colourEncoding == PixelFormatInfo::ColourEncodingRAW;
+}
+
+double scoreFormat(double desired, double actual)
+{
+	double score = desired - actual;
+	/* Smaller desired dimensions are preferred. */
+	if (score < 0.0)
+		score = (-score) / 8;
+	/* Penalise non-exact matches. */
+	if (actual != desired)
+		score *= 2;
+
+	return score;
+}
+
+V4L2DeviceFormat findBestMode(V4L2PixFmtMap &formatsMap, const Size &req)
+{
+	double bestScore = 9e9, score;
+	V4L2DeviceFormat bestMode = {};
+
+#define PENALTY_AR		1500.0
+#define PENALTY_8BIT		2000.0
+#define PENALTY_10BIT		1000.0
+#define PENALTY_12BIT		   0.0
+#define PENALTY_UNPACKED	 500.0
+
+	/* Calculate the closest/best mode from the user requested size. */
+	for (const auto &iter : formatsMap) {
+		V4L2PixelFormat v4l2Format = iter.first;
+		PixelFormat pixelFormat = v4l2Format.toPixelFormat();
+		const PixelFormatInfo &info = PixelFormatInfo::info(pixelFormat);
+
+		for (const SizeRange &sz : iter.second) {
+			double modeWidth = sz.contains(req) ? req.width : sz.max.width;
+			double modeHeight = sz.contains(req) ? req.height : sz.max.height;
+			double reqAr = static_cast<double>(req.width) / req.height;
+			double modeAr = modeWidth / modeHeight;
+
+			/* Score the dimensions for closeness. */
+			score = scoreFormat(req.width, modeWidth);
+			score += scoreFormat(req.height, modeHeight);
+			score += PENALTY_AR * scoreFormat(reqAr, modeAr);
+
+			/* Add any penalties... this is not an exact science! */
+			if (!info.packed)
+				score += PENALTY_UNPACKED;
+
+			if (info.bitsPerPixel == 12)
+				score += PENALTY_12BIT;
+			else if (info.bitsPerPixel == 10)
+				score += PENALTY_10BIT;
+			else if (info.bitsPerPixel == 8)
+				score += PENALTY_8BIT;
+
+			if (score <= bestScore) {
+				bestScore = score;
+				bestMode.fourcc = v4l2Format;
+				bestMode.size = Size(modeWidth, modeHeight);
+			}
+
+			LOG(RPI, Info) << "Mode: " << modeWidth << "x" << modeHeight
+				       << " fmt " << v4l2Format.toString()
+				       << " Score: " << score
+				       << " (best " << bestScore << ")";
+		}
+	}
+
+	return bestMode;
+}
+
+} /* namespace */
+
+/*
+ * Device stream abstraction for either an internal or external stream.
+ * Used for both Unicam and the ISP.
+ */
+class RPiStream : public Stream
+{
+public:
+	RPiStream()
+	{
+	}
+
+	RPiStream(const char *name, MediaEntity *dev, bool importOnly = false)
+		: external_(false), importOnly_(importOnly), name_(name),
+		  dev_(std::make_unique<V4L2VideoDevice>(dev))
+	{
+	}
+
+	V4L2VideoDevice *dev() const
+	{
+		return dev_.get();
+	}
+
+	void setExternal(bool external)
+	{
+		external_ = external;
+	}
+
+	bool isExternal() const
+	{
+		/*
+		 * Import streams cannot be external.
+		 *
+		 * RAW capture is a special case where we simply copy the RAW
+		 * buffer out of the request.  All other buffer handling happens
+		 * as if the stream is internal.
+		 */
+		return external_ && !importOnly_;
+	}
+
+	bool isImporter() const
+	{
+		return importOnly_;
+	}
+
+	void reset()
+	{
+		external_ = false;
+		internalBuffers_.clear();
+	}
+
+	std::string name() const
+	{
+		return name_;
+	}
+
+	void setExternalBuffers(std::vector<std::unique_ptr<FrameBuffer>> *buffers)
+	{
+		externalBuffers_ = buffers;
+	}
+
+	const std::vector<std::unique_ptr<FrameBuffer>> *getBuffers() const
+	{
+		return external_ ? externalBuffers_ : &internalBuffers_;
+	}
+
+	void releaseBuffers()
+	{
+		dev_->releaseBuffers();
+		if (!external_ && !importOnly_)
+			internalBuffers_.clear();
+	}
+
+	int importBuffers(unsigned int count)
+	{
+		return dev_->importBuffers(count);
+	}
+
+	int allocateBuffers(unsigned int count)
+	{
+		return dev_->allocateBuffers(count, &internalBuffers_);
+	}
+
+	int queueBuffers()
+	{
+		if (external_)
+			return 0;
+
+		for (auto &b : internalBuffers_) {
+			int ret = dev_->queueBuffer(b.get());
+			if (ret) {
+				LOG(RPI, Error) << "Failed to queue buffers for "
+						<< name_;
+				return ret;
+			}
+		}
+
+		return 0;
+	}
+
+	bool findFrameBuffer(FrameBuffer *buffer) const
+	{
+		auto start = external_ ? externalBuffers_->begin() : internalBuffers_.begin();
+		auto end = external_ ? externalBuffers_->end() : internalBuffers_.end();
+
+		if (importOnly_)
+			return false;
+
+		if (std::find_if(start, end,
+				 [buffer](std::unique_ptr<FrameBuffer> const &ref) { return ref.get() == buffer; }) != end)
+			return true;
+
+		return false;
+	}
+
+private:
+	/*
+	 * Indicates that this stream is active externally, i.e. the buffers
+	 * are provided by the application.
+	 */
+	bool external_;
+	/* Indicates that this stream only imports buffers, e.g. ISP input. */
+	bool importOnly_;
+	/* Stream name identifier. */
+	std::string name_;
+	/* The actual device stream. */
+	std::unique_ptr<V4L2VideoDevice> dev_;
+	/* Internally allocated framebuffers associated with this device stream. */
+	std::vector<std::unique_ptr<FrameBuffer>> internalBuffers_;
+	/* Externally allocated framebuffers associated with this device stream. */
+	std::vector<std::unique_ptr<FrameBuffer>> *externalBuffers_;
+};
+
+/*
+ * The following class is just a convenient (and typesafe) array of device
+ * streams indexed with an enum class.
+ */
+enum class Unicam : unsigned int { Image, Embedded };
+enum class Isp : unsigned int { Input, Output0, Output1, Stats };
+
+template<typename E, std::size_t N>
+class RPiDevice : public std::array<class RPiStream, N>
+{
+private:
+	constexpr auto index(E e) const noexcept
+	{
+		return static_cast<std::underlying_type_t<E>>(e);
+	}
+public:
+	RPiStream &operator[](E e)
+	{
+		return std::array<class RPiStream, N>::operator[](index(e));
+	}
+	const RPiStream &operator[](E e) const
+	{
+		return std::array<class RPiStream, N>::operator[](index(e));
+	}
+};
+
+class RPiCameraData : public CameraData
+{
+public:
+	RPiCameraData(PipelineHandler *pipe)
+		: CameraData(pipe), sensor_(nullptr), lsTable_(nullptr),
+		  state_(State::Stopped), dropFrame_(false), ispOutputCount_(0)
+	{
+	}
+
+	~RPiCameraData()
+	{
+		/*
+		 * Free the LS table if we have allocated one. Another
+		 * allocation will occur in applyLS() with the appropriate
+		 * size.
+		 */
+		if (lsTable_) {
+			vcsm_.free(lsTable_);
+			lsTable_ = nullptr;
+		}
+
+		/* Stop the IPA proxy thread. */
+		ipa_->stop();
+	}
+
+	void frameStarted(uint32_t sequence);
+
+	int loadIPA();
+	void queueFrameAction(unsigned int frame, const IPAOperationData &action);
+
+	/* bufferComplete signal handlers. */
+	void unicamBufferDequeue(FrameBuffer *buffer);
+	void ispInputDequeue(FrameBuffer *buffer);
+	void ispOutputDequeue(FrameBuffer *buffer);
+
+	void clearIncompleteRequests();
+	void handleStreamBuffer(FrameBuffer *buffer, const RPiStream *stream);
+	void handleState();
+
+	CameraSensor *sensor_;
+	/* Array of Unicam and ISP device streams and associated buffers/streams. */
+	RPiDevice<Unicam, 2> unicam_;
+	RPiDevice<Isp, 4> isp_;
+	/* The vector below is just for convenience when iterating over all streams. */
+	std::vector<RPiStream *> streams_;
+	/* Buffers passed to the IPA. */
+	std::vector<IPABuffer> ipaBuffers_;
+
+	/* VCSM allocation helper. */
+	RPi::Vcsm vcsm_;
+	void *lsTable_;
+
+	RPi::StaggeredCtrl staggeredCtrl_;
+	uint32_t expectedSequence_;
+	bool sensorMetadata_;
+
+	/*
+	 * All the functions in this class are called from a single calling
+	 * thread. So, we do not need to have any mutex to protect access to any
+	 * of the variables below.
+	 */
+	enum class State { Stopped, Idle, Busy, IpaComplete };
+	State state_;
+	std::queue<FrameBuffer *> bayerQueue_;
+	std::queue<FrameBuffer *> embeddedQueue_;
+	std::deque<Request *> requestQueue_;
+
+private:
+	void checkRequestCompleted();
+	void tryRunPipeline();
+	void tryFlushQueues();
+	FrameBuffer *updateQueue(std::queue<FrameBuffer *> &q, uint64_t timestamp, V4L2VideoDevice *dev);
+
+	bool dropFrame_;
+	int ispOutputCount_;
+};
+
+class RPiCameraConfiguration : public CameraConfiguration
+{
+public:
+	RPiCameraConfiguration(const RPiCameraData *data);
+
+	Status validate() override;
+
+private:
+	const RPiCameraData *data_;
+};
+
+class PipelineHandlerRPi : public PipelineHandler
+{
+public:
+	PipelineHandlerRPi(CameraManager *manager);
+	~PipelineHandlerRPi();
+
+	CameraConfiguration *generateConfiguration(Camera *camera, const StreamRoles &roles) override;
+	int configure(Camera *camera, CameraConfiguration *config) override;
+
+	int exportFrameBuffers(Camera *camera, Stream *stream,
+			       std::vector<std::unique_ptr<FrameBuffer>> *buffers) override;
+
+	int start(Camera *camera) override;
+	void stop(Camera *camera) override;
+
+	int queueRequestDevice(Camera *camera, Request *request) override;
+
+	bool match(DeviceEnumerator *enumerator) override;
+
+private:
+	RPiCameraData *cameraData(const Camera *camera)
+	{
+		return static_cast<RPiCameraData *>(PipelineHandler::cameraData(camera));
+	}
+
+	int configureIPA(Camera *camera);
+
+	int queueAllBuffers(Camera *camera);
+	int prepareBuffers(Camera *camera);
+	void freeBuffers(Camera *camera);
+
+	std::shared_ptr<MediaDevice> unicam_;
+	std::shared_ptr<MediaDevice> isp_;
+};
+
+RPiCameraConfiguration::RPiCameraConfiguration(const RPiCameraData *data)
+	: CameraConfiguration(), data_(data)
+{
+}
+
+CameraConfiguration::Status RPiCameraConfiguration::validate()
+{
+	Status status = Valid;
+
+	if (config_.empty())
+		return Invalid;
+
+	unsigned int rawCount = 0, outCount = 0, count = 0, maxIndex = 0;
+	std::pair<int, Size> outSize[2];
+	Size maxSize = {};
+	for (StreamConfiguration &cfg : config_) {
+		if (isRaw(cfg.pixelFormat)) {
+			/*
+			 * Calculate the best sensor mode we can use based on
+			 * the user request.
+			 */
+			V4L2PixFmtMap fmts = data_->unicam_[Unicam::Image].dev()->formats();
+			V4L2DeviceFormat sensorFormat = findBestMode(fmts, cfg.size);
+			PixelFormat sensorPixFormat = sensorFormat.fourcc.toPixelFormat();
+			if (cfg.size != sensorFormat.size ||
+			    cfg.pixelFormat != sensorPixFormat) {
+				cfg.size = sensorFormat.size;
+				cfg.pixelFormat = sensorPixFormat;
+				status = Adjusted;
+			}
+			rawCount++;
+		} else {
+			outSize[outCount] = std::make_pair(count, cfg.size);
+			/* Record the largest resolution for fixups later. */
+			if (maxSize < cfg.size) {
+				maxSize = cfg.size;
+				maxIndex = outCount;
+			}
+			outCount++;
+		}
+
+		count++;
+
+		/* Can only output 1 RAW stream, or 2 YUV/RGB streams. */
+		if (rawCount > 1 || outCount > 2) {
+			LOG(RPI, Error) << "Invalid number of streams requested";
+			return Invalid;
+		}
+	}
+
+	/*
+	 * Now do any fixups needed. For the two ISP outputs, one stream must be
+	 * equal or smaller than the other in all dimensions.
+	 */
+	for (unsigned int i = 0; i < outCount; i++) {
+		outSize[i].second.width = std::min(outSize[i].second.width,
+						   maxSize.width);
+		outSize[i].second.height = std::min(outSize[i].second.height,
+						    maxSize.height);
+
+		if (config_.at(outSize[i].first).size != outSize[i].second) {
+			config_.at(outSize[i].first).size = outSize[i].second;
+			status = Adjusted;
+		}
+
+		/*
+		 * Also validate the correct pixel formats here.
+		 * Note that Output0 and Output1 support a different
+		 * set of formats.
+		 *
+		 * Output 0 must be for the largest resolution. We will
+		 * have that fixed up in the code above.
+		 *
+		 */
+		PixelFormat &cfgPixFmt = config_.at(outSize[i].first).pixelFormat;
+		V4L2PixFmtMap fmts;
+
+		if (i == maxIndex)
+			fmts = data_->isp_[Isp::Output0].dev()->formats();
+		else
+			fmts = data_->isp_[Isp::Output1].dev()->formats();
+
+		if (fmts.find(V4L2PixelFormat::fromPixelFormat(cfgPixFmt, false)) == fmts.end()) {
+			/* If we cannot find a native format, use a default one. */
+			cfgPixFmt = PixelFormat(DRM_FORMAT_NV12);
+			status = Adjusted;
+		}
+	}
+
+	return status;
+}
+
+PipelineHandlerRPi::PipelineHandlerRPi(CameraManager *manager)
+	: PipelineHandler(manager), unicam_(nullptr), isp_(nullptr)
+{
+}
+
+PipelineHandlerRPi::~PipelineHandlerRPi()
+{
+	if (unicam_)
+		unicam_->release();
+
+	if (isp_)
+		isp_->release();
+}
+
+CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
+							       const StreamRoles &roles)
+{
+	RPiCameraData *data = cameraData(camera);
+	CameraConfiguration *config = new RPiCameraConfiguration(data);
+	V4L2DeviceFormat sensorFormat;
+	V4L2PixFmtMap fmts;
+
+	if (roles.empty())
+		return config;
+
+	for (const StreamRole role : roles) {
+		StreamConfiguration cfg{};
+
+		switch (role) {
+		case StreamRole::StillCaptureRaw:
+			cfg.size = data->sensor_->resolution();
+			fmts = data->unicam_[Unicam::Image].dev()->formats();
+			sensorFormat = findBestMode(fmts, cfg.size);
+			cfg.pixelFormat = sensorFormat.fourcc.toPixelFormat();
+			ASSERT(cfg.pixelFormat.isValid());
+			cfg.bufferCount = 1;
+			break;
+
+		case StreamRole::StillCapture:
+			cfg.pixelFormat = PixelFormat(DRM_FORMAT_NV12);
+			/* Return the largest sensor resolution. */
+			cfg.size = data->sensor_->resolution();
+			cfg.bufferCount = 1;
+			break;
+
+		case StreamRole::VideoRecording:
+			cfg.pixelFormat = PixelFormat(DRM_FORMAT_NV12);
+			cfg.size = { 1920, 1080 };
+			cfg.bufferCount = 4;
+			break;
+
+		case StreamRole::Viewfinder:
+			cfg.pixelFormat = PixelFormat(DRM_FORMAT_ARGB8888);
+			cfg.size = { 800, 600 };
+			cfg.bufferCount = 4;
+			break;
+
+		default:
+			LOG(RPI, Error) << "Requested stream role not supported: "
+					<< role;
+			break;
+		}
+
+		config->addConfiguration(cfg);
+	}
+
+	config->validate();
+
+	return config;
+}
+
+int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
+{
+	RPiCameraData *data = cameraData(camera);
+	int ret;
+
+	/* Start by resetting the Unicam and ISP stream states. */
+	for (auto const stream : data->streams_)
+		stream->reset();
+
+	Size maxSize = {}, sensorSize = {};
+	unsigned int maxIndex = 0;
+	bool rawStream = false;
+
+	/*
+	 * Look for the RAW stream (if given) size as well as the largest
+	 * ISP output size.
+	 */
+	for (unsigned i = 0; i < config->size(); i++) {
+		StreamConfiguration &cfg = config->at(i);
+
+		if (isRaw(cfg.pixelFormat)) {
+			/*
+			 * If we have been given a RAW stream, use that size
+			 * for setting up the sensor.
+			 */
+			sensorSize = cfg.size;
+			rawStream = true;
+		} else {
+			if (cfg.size > maxSize) {
+				maxSize = config->at(i).size;
+				maxIndex = i;
+			}
+		}
+	}
+
+	/* First calculate the best sensor mode we can use based on the user request. */
+	V4L2PixFmtMap fmts = data->unicam_[Unicam::Image].dev()->formats();
+	V4L2DeviceFormat sensorFormat = findBestMode(fmts, rawStream ? sensorSize : maxSize);
+
+	/*
+	 * Unicam image output format.  The ISP input format gets set at
+	 * start, just in case we have swapped bayer orders due to flips
+	 */
+	ret = data->unicam_[Unicam::Image].dev()->setFormat(&sensorFormat);
+	if (ret)
+		return ret;
+
+	LOG(RPI, Info) << "Sensor: " << camera->name()
+		       << " - Selected mode: " << sensorFormat.toString();
+
+	/*
+	 * This format may be reset on start() if the bayer order has changed
+	 * because of flips in the sensor.
+	 */
+	ret = data->isp_[Isp::Input].dev()->setFormat(&sensorFormat);
+
+	/*
+	 * See which streams are requested, and route the user
+	 * StreamConfiguration appropriately.
+	 */
+	V4L2DeviceFormat format = {};
+	for (unsigned i = 0; i < config->size(); i++) {
+		StreamConfiguration &cfg = config->at(i);
+
+		if (isRaw(cfg.pixelFormat)) {
+			cfg.setStream(&data->isp_[Isp::Input]);
+			cfg.stride = sensorFormat.planes[0].bpl;
+			data->isp_[Isp::Input].setExternal(true);
+			continue;
+		}
+
+		if (i == maxIndex) {
+			/* ISP main output format. */
+			V4L2VideoDevice *dev = data->isp_[Isp::Output0].dev();
+			V4L2PixelFormat fourcc = dev->toV4L2PixelFormat(cfg.pixelFormat);
+			format.size = cfg.size;
+			format.fourcc = fourcc;
+
+			ret = dev->setFormat(&format);
+			if (ret)
+				return -EINVAL;
+
+			if (format.size != cfg.size || format.fourcc != fourcc) {
+				LOG(RPI, Error)
+					<< "Failed to set format on ISP capture0 device: "
+					<< format.toString();
+				return -EINVAL;
+			}
+
+			cfg.setStream(&data->isp_[Isp::Output0]);
+			cfg.stride = format.planes[0].bpl;
+			data->isp_[Isp::Output0].setExternal(true);
+		}
+
+		/*
+		 * ISP second output format. This fallthrough means that if a
+		 * second output stream has not been configured, we simply use
+		 * the Output0 configuration.
+		 */
+		V4L2VideoDevice *dev = data->isp_[Isp::Output1].dev();
+		format.fourcc = dev->toV4L2PixelFormat(cfg.pixelFormat);
+		format.size = cfg.size;
+
+		ret = dev->setFormat(&format);
+		if (ret) {
+			LOG(RPI, Error)
+				<< "Failed to set format on ISP capture1 device: "
+				<< format.toString();
+			return ret;
+		}
+		/*
+		 * If we have not yet provided a stream for this config, it
+		 * means this is to be routed from Output1.
+		 */
+		if (!cfg.stream()) {
+			cfg.setStream(&data->isp_[Isp::Output1]);
+			cfg.stride = format.planes[0].bpl;
+			data->isp_[Isp::Output1].setExternal(true);
+		}
+	}
+
+	/* ISP statistics output format. */
+	format = {};
+	format.fourcc = V4L2PixelFormat(V4L2_META_FMT_BCM2835_ISP_STATS);
+	ret = data->isp_[Isp::Stats].dev()->setFormat(&format);
+	if (ret) {
+		LOG(RPI, Error) << "Failed to set format on ISP stats stream: "
+				<< format.toString();
+		return ret;
+	}
+
+	/* Unicam embedded data output format. */
+	format = {};
+	format.fourcc = V4L2PixelFormat(V4L2_META_FMT_SENSOR_DATA);
+	LOG(RPI, Debug) << "Setting embedded data format.";
+	ret = data->unicam_[Unicam::Embedded].dev()->setFormat(&format);
+	if (ret) {
+		LOG(RPI, Error) << "Failed to set format on Unicam embedded: "
+				<< format.toString();
+		return ret;
+	}
+
+	/* Adjust aspect ratio by providing crops on the input image. */
+	Rectangle crop = {
+		.x = 0,
+		.y = 0,
+		.width = sensorFormat.size.width,
+		.height = sensorFormat.size.height
+	};
+
+	int ar = maxSize.height * sensorFormat.size.width - maxSize.width * sensorFormat.size.height;
+	if (ar > 0)
+		crop.width = maxSize.width * sensorFormat.size.height / maxSize.height;
+	else if (ar < 0)
+		crop.height = maxSize.height * sensorFormat.size.width / maxSize.width;
+
+	crop.width &= ~1;
+	crop.height &= ~1;
+
+	crop.x = (sensorFormat.size.width - crop.width) >> 1;
+	crop.y = (sensorFormat.size.height - crop.height) >> 1;
+	data->isp_[Isp::Input].dev()->setSelection(V4L2_SEL_TGT_CROP, &crop);
+
+	ret = configureIPA(camera);
+	if (ret)
+		LOG(RPI, Error) << "Failed to configure the IPA: " << ret;
+
+	return ret;
+}
+
+int PipelineHandlerRPi::exportFrameBuffers(Camera *camera, Stream *stream,
+					   std::vector<std::unique_ptr<FrameBuffer>> *buffers)
+{
+	RPiStream *s = static_cast<RPiStream *>(stream);
+	unsigned int count = stream->configuration().bufferCount;
+	int ret = s->dev()->exportBuffers(count, buffers);
+
+	s->setExternalBuffers(buffers);
+
+	return ret;
+}
+
+int PipelineHandlerRPi::start(Camera *camera)
+{
+	RPiCameraData *data = cameraData(camera);
+	ControlList controls(data->sensor_->controls());
+	int ret;
+
+	/* Allocate buffers for internal pipeline usage. */
+	ret = prepareBuffers(camera);
+	if (ret) {
+		LOG(RPI, Error) << "Failed to allocate buffers";
+		return ret;
+	}
+
+	ret = queueAllBuffers(camera);
+	if (ret) {
+		LOG(RPI, Error) << "Failed to queue buffers";
+		return ret;
+	}
+
+	/*
+	 * IPA configure may have changed the sensor flips - hence the bayer
+	 * order. Get the sensor format and set the ISP input now.
+	 */
+	V4L2DeviceFormat sensorFormat;
+	data->unicam_[Unicam::Image].dev()->getFormat(&sensorFormat);
+	ret = data->isp_[Isp::Input].dev()->setFormat(&sensorFormat);
+	if (ret)
+		return ret;
+
+	/* Enable SOF event generation. */
+	data->unicam_[Unicam::Image].dev()->setFrameStartEnabled(true);
+
+	/*
+	 * Write the last set of gain and exposure values to the camera before
+	 * starting.  First check that the staggered ctrl has been initialised
+	 * by the IPA action.
+	 */
+	ASSERT(data->staggeredCtrl_);
+	data->staggeredCtrl_.reset();
+	data->staggeredCtrl_.write();
+	data->expectedSequence_ = 0;
+
+	data->state_ = RPiCameraData::State::Idle;
+
+	/* Start all streams. */
+	for (auto const stream : data->streams_) {
+		ret = stream->dev()->streamOn();
+		if (ret) {
+			stop(camera);
+			return ret;
+		}
+	}
+
+	return 0;
+}
+
+void PipelineHandlerRPi::stop(Camera *camera)
+{
+	RPiCameraData *data = cameraData(camera);
+
+	data->state_ = RPiCameraData::State::Stopped;
+
+	/* Disable SOF event generation. */
+	data->unicam_[Unicam::Image].dev()->setFrameStartEnabled(false);
+
+	/* This also stops the streams. */
+	data->clearIncompleteRequests();
+	/* The default std::queue constructor is explicit with gcc 5 and 6. */
+	data->bayerQueue_ = std::queue<FrameBuffer *>{};
+	data->embeddedQueue_ = std::queue<FrameBuffer *>{};
+
+	freeBuffers(camera);
+}
+
+int PipelineHandlerRPi::queueRequestDevice(Camera *camera, Request *request)
+{
+	RPiCameraData *data = cameraData(camera);
+
+	if (data->state_ == RPiCameraData::State::Stopped)
+		return -EINVAL;
+
+	/* Ensure all external streams have associated buffers! */
+	for (auto &stream : data->isp_) {
+		if (!stream.isExternal())
+			continue;
+
+		if (!request->findBuffer(&stream)) {
+			LOG(RPI, Error) << "Attempt to queue request with invalid stream.";
+			return -ENOENT;
+		}
+	}
+
+	/* Push the request to the back of the queue. */
+	data->requestQueue_.push_back(request);
+	data->handleState();
+
+	return 0;
+}
+
+bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)
+{
+	DeviceMatch unicam("unicam");
+	DeviceMatch isp("bcm2835-isp");
+
+	unicam.add("unicam-embedded");
+	unicam.add("unicam-image");
+
+	isp.add("bcm2835-isp0-output0"); /* Input */
+	isp.add("bcm2835-isp0-capture1"); /* Output 0 */
+	isp.add("bcm2835-isp0-capture2"); /* Output 1 */
+	isp.add("bcm2835-isp0-capture3"); /* Stats */
+
+	unicam_ = enumerator->search(unicam);
+	if (!unicam_)
+		return false;
+
+	isp_ = enumerator->search(isp);
+	if (!isp_)
+		return false;
+
+	unicam_->acquire();
+	isp_->acquire();
+
+	std::unique_ptr<RPiCameraData> data = std::make_unique<RPiCameraData>(this);
+
+	/* Locate and open the unicam video streams. */
+	data->unicam_[Unicam::Embedded] = RPiStream("Unicam Embedded", unicam_->getEntityByName("unicam-embedded"));
+	data->unicam_[Unicam::Image] = RPiStream("Unicam Image", unicam_->getEntityByName("unicam-image"));
+
+	/* Tag the ISP input stream as an import stream. */
+	data->isp_[Isp::Input] = RPiStream("ISP Input", isp_->getEntityByName("bcm2835-isp0-output0"), true);
+	data->isp_[Isp::Output0] = RPiStream("ISP Output0", isp_->getEntityByName("bcm2835-isp0-capture1"));
+	data->isp_[Isp::Output1] = RPiStream("ISP Output1", isp_->getEntityByName("bcm2835-isp0-capture2"));
+	data->isp_[Isp::Stats] = RPiStream("ISP Stats", isp_->getEntityByName("bcm2835-isp0-capture3"));
+
+	/* This is just for convenience so that we can easily iterate over all streams. */
+	for (auto &stream : data->unicam_)
+		data->streams_.push_back(&stream);
+	for (auto &stream : data->isp_)
+		data->streams_.push_back(&stream);
+
+	/* Open all Unicam and ISP streams. */
+	for (auto const stream : data->streams_) {
+		if (stream->dev()->open())
+			return false;
+	}
+
+	/* Wire up all the buffer connections. */
+	data->unicam_[Unicam::Image].dev()->frameStart.connect(data.get(), &RPiCameraData::frameStarted);
+	data->unicam_[Unicam::Image].dev()->bufferReady.connect(data.get(), &RPiCameraData::unicamBufferDequeue);
+	data->unicam_[Unicam::Embedded].dev()->bufferReady.connect(data.get(), &RPiCameraData::unicamBufferDequeue);
+	data->isp_[Isp::Input].dev()->bufferReady.connect(data.get(), &RPiCameraData::ispInputDequeue);
+	data->isp_[Isp::Output0].dev()->bufferReady.connect(data.get(), &RPiCameraData::ispOutputDequeue);
+	data->isp_[Isp::Output1].dev()->bufferReady.connect(data.get(), &RPiCameraData::ispOutputDequeue);
+	data->isp_[Isp::Stats].dev()->bufferReady.connect(data.get(), &RPiCameraData::ispOutputDequeue);
+
+	/* Identify the sensor. */
+	for (MediaEntity *entity : unicam_->entities()) {
+		if (entity->function() == MEDIA_ENT_F_CAM_SENSOR) {
+			data->sensor_ = new CameraSensor(entity);
+			break;
+		}
+	}
+
+	if (!data->sensor_)
+		return false;
+
+	if (data->sensor_->init())
+		return false;
+
+	if (data->loadIPA()) {
+		LOG(RPI, Error) << "Failed to load a suitable IPA library";
+		return false;
+	}
+
+	/* Register the controls that the Raspberry Pi IPA can handle. */
+	data->controlInfo_ = RPiControls;
+	/* Initialize the camera properties. */
+	data->properties_ = data->sensor_->properties();
+
+	/*
+	 * List the available output streams.
+	 * Currently cannot do Unicam streams!
+	 */
+	std::set<Stream *> streams;
+	streams.insert(&data->isp_[Isp::Input]);
+	streams.insert(&data->isp_[Isp::Output0]);
+	streams.insert(&data->isp_[Isp::Output1]);
+	streams.insert(&data->isp_[Isp::Stats]);
+
+	/* Create and register the camera. */
+	std::shared_ptr<Camera> camera = Camera::create(this, data->sensor_->model(), streams);
+	registerCamera(std::move(camera), std::move(data));
+
+	return true;
+}
+
+int PipelineHandlerRPi::configureIPA(Camera *camera)
+{
+	std::map<unsigned int, IPAStream> streamConfig;
+	std::map<unsigned int, const ControlInfoMap &> entityControls;
+	RPiCameraData *data = cameraData(camera);
+
+	/* Get the device format to pass to the IPA. */
+	V4L2DeviceFormat sensorFormat;
+	data->unicam_[Unicam::Image].dev()->getFormat(&sensorFormat);
+	/* Inform IPA of stream configuration and sensor controls. */
+	int i = 0;
+	for (auto const &stream : data->isp_) {
+		if (stream.isExternal()) {
+			streamConfig[i] = {
+				.pixelFormat = stream.configuration().pixelFormat,
+				.size = stream.configuration().size
+			};
+		}
+	}
+	entityControls.emplace(0, data->unicam_[Unicam::Image].dev()->controls());
+	entityControls.emplace(1, data->isp_[Isp::Input].dev()->controls());
+
+	/* Allocate the lens shading table via vcsm and pass to the IPA. */
+	if (!data->lsTable_) {
+		data->lsTable_ = data->vcsm_.alloc("ls_grid", MAX_LS_GRID_SIZE);
+		uintptr_t ptr = reinterpret_cast<uintptr_t>(data->lsTable_);
+
+		if (!data->lsTable_)
+			return -ENOMEM;
+
+		/*
+		 * The vcsm allocation will always be in the memory region
+		 * < 32-bits to allow Videocore to access the memory.
+		 */
+		IPAOperationData op;
+		op.operation = RPI_IPA_EVENT_LS_TABLE_ALLOCATION;
+		op.data = { static_cast<uint32_t>(ptr & 0xffffffff),
+			    data->vcsm_.getVCHandle(data->lsTable_) };
+		data->ipa_->processEvent(op);
+	}
+
+	CameraSensorInfo sensorInfo = {};
+	int ret = data->sensor_->sensorInfo(&sensorInfo);
+	if (ret) {
+		LOG(RPI, Error) << "Failed to retrieve camera sensor info";
+		return ret;
+	}
+
+	/* Ready the IPA - it must know about the sensor resolution. */
+	data->ipa_->configure(sensorInfo, streamConfig, entityControls);
+
+	return 0;
+}
+
+int PipelineHandlerRPi::queueAllBuffers(Camera *camera)
+{
+	RPiCameraData *data = cameraData(camera);
+	int ret;
+
+	for (auto const stream : data->streams_) {
+		ret = stream->queueBuffers();
+		if (ret < 0)
+			return ret;
+	}
+
+	return 0;
+}
+
+int PipelineHandlerRPi::prepareBuffers(Camera *camera)
+{
+	RPiCameraData *data = cameraData(camera);
+	int count, ret;
+
+	/*
+	 * Decide how many internal buffers to allocate.  For now, simply
+	 * look at how many external buffers will be provided.
+	 * Will need to improve this logic.
+	 */
+	unsigned int maxBuffers = 0;
+	for (const Stream *s : camera->streams())
+		if (static_cast<const RPiStream *>(s)->isExternal())
+			maxBuffers = std::max(maxBuffers, s->configuration().bufferCount);
+
+	for (auto const stream : data->streams_) {
+		if (stream->isExternal() || stream->isImporter()) {
+			/*
+			 * If a stream is marked as external reserve memory to
+			 * prepare to import as many buffers are requested in
+			 * the stream configuration.
+			 *
+			 * If a stream is an internal stream with importer
+			 * role, reserve as many buffers as possible.
+			 */
+			unsigned int count = stream->isExternal()
+						     ? stream->configuration().bufferCount
+						     : maxBuffers;
+			ret = stream->importBuffers(count);
+			if (ret < 0)
+				return ret;
+		} else {
+			/*
+			 * If the stream is an internal exporter allocate and
+			 * export as many buffers as possible to its internal
+			 * pool.
+			 */
+			ret = stream->allocateBuffers(maxBuffers);
+			if (ret < 0) {
+				freeBuffers(camera);
+				return ret;
+			}
+		}
+	}
+
+	/*
+	 * Add cookies to the ISP Input buffers so that we can link them with
+	 * the IPA and RPI_IPA_EVENT_SIGNAL_ISP_PREPARE event.
+	 */
+	count = 0;
+	for (auto const &b : *data->unicam_[Unicam::Image].getBuffers()) {
+		b->setCookie(count++);
+	}
+
+	/*
+	 * Add cookies to the stats and embedded data buffers and link them with
+	 * the IPA.
+	 */
+	count = 0;
+	for (auto const &b : *data->isp_[Isp::Stats].getBuffers()) {
+		b->setCookie(count++);
+		data->ipaBuffers_.push_back({ .id = RPiIpaMask::STATS | b->cookie(),
+					      .planes = b->planes() });
+	}
+
+	count = 0;
+	for (auto const &b : *data->unicam_[Unicam::Embedded].getBuffers()) {
+		b->setCookie(count++);
+		data->ipaBuffers_.push_back({ .id = RPiIpaMask::EMBEDDED_DATA | b->cookie(),
+					      .planes = b->planes() });
+	}
+
+	data->ipa_->mapBuffers(data->ipaBuffers_);
+
+	return 0;
+}
+
+void PipelineHandlerRPi::freeBuffers(Camera *camera)
+{
+	RPiCameraData *data = cameraData(camera);
+
+	std::vector<unsigned int> ids;
+	for (IPABuffer &ipabuf : data->ipaBuffers_)
+		ids.push_back(ipabuf.id);
+
+	data->ipa_->unmapBuffers(ids);
+	data->ipaBuffers_.clear();
+
+	for (auto const stream : data->streams_)
+		stream->releaseBuffers();
+}
+
+void RPiCameraData::frameStarted(uint32_t sequence)
+{
+	LOG(RPI, Debug) << "frame start " << sequence;
+
+	/* Write any controls for the next frame as soon as we can. */
+	staggeredCtrl_.write();
+}
+
+int RPiCameraData::loadIPA()
+{
+	ipa_ = IPAManager::instance()->createIPA(pipe_, 1, 1);
+	if (!ipa_)
+		return -ENOENT;
+
+	ipa_->queueFrameAction.connect(this, &RPiCameraData::queueFrameAction);
+
+	IPASettings settings{
+		.configurationFile = ipa_->configurationFile(sensor_->model() + ".json")
+	};
+
+	ipa_->init(settings);
+
+	/*
+	 * Startup the IPA thread now. Without this call, none of the IPA API
+	 * functions will run.
+	 *
+	 * It only gets stopped in the class destructor.
+	 */
+	return ipa_->start();
+}
+
+void RPiCameraData::queueFrameAction(unsigned int frame, const IPAOperationData &action)
+{
+	/*
+	 * The following actions can be handled when the pipeline handler is in
+	 * a stopped state.
+	 */
+	switch (action.operation) {
+	case RPI_IPA_ACTION_V4L2_SET_STAGGERED: {
+		ControlList controls = action.controls[0];
+		if (!staggeredCtrl_.set(controls))
+			LOG(RPI, Error) << "V4L2 staggered set failed";
+		goto done;
+	}
+
+	case RPI_IPA_ACTION_SET_SENSOR_CONFIG: {
+		/*
+		 * Setup our staggered control writer with the sensor default
+		 * gain and exposure delays.
+		 */
+		if (!staggeredCtrl_) {
+			staggeredCtrl_.init(unicam_[Unicam::Image].dev(),
+					    { { V4L2_CID_ANALOGUE_GAIN, action.data[0] },
+					      { V4L2_CID_EXPOSURE, action.data[1] } });
+			sensorMetadata_ = action.data[2];
+		}
+
+		/* Set the sensor orientation here as well. */
+		ControlList controls = action.controls[0];
+		unicam_[Unicam::Image].dev()->setControls(&controls);
+		goto done;
+	}
+
+	case RPI_IPA_ACTION_V4L2_SET_ISP: {
+		ControlList controls = action.controls[0];
+		isp_[Isp::Input].dev()->setControls(&controls);
+		goto done;
+	}
+	}
+
+	if (state_ == State::Stopped)
+		goto done;
+
+	/*
+	 * The following actions must not be handled when the pipeline handler
+	 * is in a stopped state.
+	 */
+	switch (action.operation) {
+	case RPI_IPA_ACTION_STATS_METADATA_COMPLETE: {
+		unsigned int bufferId = action.data[0];
+		FrameBuffer *buffer = isp_[Isp::Stats].getBuffers()->at(bufferId).get();
+
+		handleStreamBuffer(buffer, &isp_[Isp::Stats]);
+		/* Fill the Request metadata buffer with what the IPA has provided */
+		requestQueue_.front()->metadata() = std::move(action.controls[0]);
+		state_ = State::IpaComplete;
+		break;
+	}
+
+	case RPI_IPA_ACTION_EMBEDDED_COMPLETE: {
+		unsigned int bufferId = action.data[0];
+		FrameBuffer *buffer = unicam_[Unicam::Embedded].getBuffers()->at(bufferId).get();
+		handleStreamBuffer(buffer, &unicam_[Unicam::Embedded]);
+		break;
+	}
+
+	case RPI_IPA_ACTION_RUN_ISP_AND_DROP_FRAME:
+	case RPI_IPA_ACTION_RUN_ISP: {
+		unsigned int bufferId = action.data[0];
+		FrameBuffer *buffer = unicam_[Unicam::Image].getBuffers()->at(bufferId).get();
+
+		LOG(RPI, Debug) << "Input re-queue to ISP, buffer id " << buffer->cookie()
+				<< ", timestamp: " << buffer->metadata().timestamp;
+
+		isp_[Isp::Input].dev()->queueBuffer(buffer);
+		dropFrame_ = (action.operation == RPI_IPA_ACTION_RUN_ISP_AND_DROP_FRAME) ? true : false;
+		ispOutputCount_ = 0;
+		break;
+	}
+
+	default:
+		LOG(RPI, Error) << "Unknown action " << action.operation;
+		break;
+	}
+
+done:
+	handleState();
+}
+
+void RPiCameraData::unicamBufferDequeue(FrameBuffer *buffer)
+{
+	const RPiStream *stream = nullptr;
+
+	if (state_ == State::Stopped)
+		return;
+
+	for (RPiStream const &s : unicam_) {
+		if (s.findFrameBuffer(buffer)) {
+			stream = &s;
+			break;
+		}
+	}
+
+	/* The buffer must belong to one of our streams. */
+	ASSERT(stream);
+
+	LOG(RPI, Debug) << "Stream " << stream->name() << " buffer dequeue"
+			<< ", buffer id " << buffer->cookie()
+			<< ", timestamp: " << buffer->metadata().timestamp;
+
+	if (stream == &unicam_[Unicam::Image]) {
+		bayerQueue_.push(buffer);
+	} else {
+		embeddedQueue_.push(buffer);
+
+		std::unordered_map<uint32_t, int32_t> ctrl;
+		int offset = buffer->metadata().sequence - expectedSequence_;
+		staggeredCtrl_.get(ctrl, offset);
+
+		expectedSequence_ = buffer->metadata().sequence + 1;
+
+		/*
+		 * Sensor metadata is unavailable, so put the expected ctrl
+		 * values (accounting for the staggered delays) into the empty
+		 * metadata buffer.
+		 */
+		if (!sensorMetadata_) {
+			const FrameBuffer &fb = buffer->planes();
+			uint32_t *mem = static_cast<uint32_t *>(::mmap(NULL, fb.planes()[0].length,
+								       PROT_READ | PROT_WRITE,
+								       MAP_SHARED,
+								       fb.planes()[0].fd.fd(), 0));
+			mem[0] = ctrl[V4L2_CID_EXPOSURE];
+			mem[1] = ctrl[V4L2_CID_ANALOGUE_GAIN];
+			munmap(mem, fb.planes()[0].length);
+		}
+	}
+
+	handleState();
+}
+
+void RPiCameraData::ispInputDequeue(FrameBuffer *buffer)
+{
+	if (state_ == State::Stopped)
+		return;
+
+	handleStreamBuffer(buffer, &unicam_[Unicam::Image]);
+	handleState();
+}
+
+void RPiCameraData::ispOutputDequeue(FrameBuffer *buffer)
+{
+	const RPiStream *stream = nullptr;
+
+	if (state_ == State::Stopped)
+		return;
+
+	for (RPiStream const &s : isp_) {
+		if (s.findFrameBuffer(buffer)) {
+			stream = &s;
+			break;
+		}
+	}
+
+	/* The buffer must belong to one of our ISP output streams. */
+	ASSERT(stream);
+
+	LOG(RPI, Debug) << "Stream " << stream->name() << " buffer complete"
+			<< ", buffer id " << buffer->cookie()
+			<< ", timestamp: " << buffer->metadata().timestamp;
+
+	handleStreamBuffer(buffer, stream);
+
+	/*
+	 * Increment the number of ISP outputs generated.
+	 * This is needed to track dropped frames.
+	 */
+	ispOutputCount_++;
+
+	/* If this is a stats output, hand it to the IPA now. */
+	if (stream == &isp_[Isp::Stats]) {
+		IPAOperationData op;
+		op.operation = RPI_IPA_EVENT_SIGNAL_STAT_READY;
+		op.data = { RPiIpaMask::STATS | buffer->cookie() };
+		ipa_->processEvent(op);
+	}
+
+	handleState();
+}
+
+void RPiCameraData::clearIncompleteRequests()
+{
+	/*
+	 * Queue up any buffers passed in the request.
+	 * This is needed because streamOff() will then mark the buffers as
+	 * cancelled.
+	 */
+	for (auto const request : requestQueue_) {
+		for (auto const stream : streams_) {
+			if (stream->isExternal())
+				stream->dev()->queueBuffer(request->findBuffer(stream));
+		}
+	}
+
+	/* Stop all streams. */
+	for (auto const stream : streams_)
+		stream->dev()->streamOff();
+
+	/*
+	 * All outstanding requests (and associated buffers) must be returned
+	 * back to the pipeline. The buffers would have been marked as
+	 * cancelled by the call to streamOff() earlier.
+	 */
+	while (!requestQueue_.empty()) {
+		Request *request = requestQueue_.front();
+		/*
+		 * A request could be partially complete,
+		 * i.e. we have returned some buffers, but still waiting
+		 * for others or waiting for metadata.
+		 */
+		for (auto const stream : streams_) {
+			if (!stream->isExternal())
+				continue;
+
+			FrameBuffer *buffer = request->findBuffer(stream);
+			/*
+			 * Has the buffer already been handed back to the
+			 * request? If not, do so now.
+			 */
+			if (buffer->request())
+				pipe_->completeBuffer(camera_, request, buffer);
+		}
+
+		pipe_->completeRequest(camera_, request);
+		requestQueue_.pop_front();
+	}
+}
+
+void RPiCameraData::handleStreamBuffer(FrameBuffer *buffer, const RPiStream *stream)
+{
+	if (stream->isExternal()) {
+		if (!dropFrame_) {
+			Request *request = buffer->request();
+			pipe_->completeBuffer(camera_, request, buffer);
+		}
+	} else {
+		/* Special handling for RAW buffer Requests.
+		 *
+		 * The ISP input stream is alway an import stream, but if the
+		 * current Request has been made for a buffer on the stream,
+		 * simply memcpy to the Request buffer and requeue back to the
+		 * device.
+		 */
+		if (stream == &unicam_[Unicam::Image] && !dropFrame_) {
+			const Stream *rawStream = static_cast<const Stream *>(&isp_[Isp::Input]);
+			Request *request = requestQueue_.front();
+			FrameBuffer *raw = request->findBuffer(const_cast<Stream *>(rawStream));
+			if (raw) {
+				raw->copyFrom(buffer);
+				pipe_->completeBuffer(camera_, request, raw);
+			}
+		}
+
+		/* Simply requeue the buffer. */
+		stream->dev()->queueBuffer(buffer);
+	}
+}
+
+void RPiCameraData::handleState()
+{
+	switch (state_) {
+	case State::Stopped:
+	case State::Busy:
+		break;
+
+	case State::IpaComplete:
+		/* If the request is completed, we will switch to Idle state. */
+		checkRequestCompleted();
+		/*
+		 * No break here, we want to try running the pipeline again.
+		 * The fallthrough clause below suppresses compiler warnings.
+		 */
+		/* Fall through */
+
+	case State::Idle:
+		tryRunPipeline();
+		tryFlushQueues();
+		break;
+	}
+}
+
+void RPiCameraData::checkRequestCompleted()
+{
+	bool requestCompleted = false;
+	/*
+	 * If we are dropping this frame, do not touch the request, simply
+	 * change the state to IDLE when ready.
+	 */
+	if (!dropFrame_) {
+		Request *request = requestQueue_.front();
+		if (request->hasPendingBuffers())
+			return;
+
+		/* Must wait for metadata to be filled in before completing. */
+		if (state_ != State::IpaComplete)
+			return;
+
+		pipe_->completeRequest(camera_, request);
+		requestQueue_.pop_front();
+		requestCompleted = true;
+	}
+
+	/*
+	 * Make sure we have three outputs completed in the case of a dropped
+	 * frame.
+	 */
+	if (state_ == State::IpaComplete &&
+	    ((ispOutputCount_ == 3 && dropFrame_) || requestCompleted)) {
+		state_ = State::Idle;
+		if (dropFrame_)
+			LOG(RPI, Info) << "Dropping frame at the request of the IPA";
+	}
+}
+
+void RPiCameraData::tryRunPipeline()
+{
+	FrameBuffer *bayerBuffer, *embeddedBuffer;
+	IPAOperationData op;
+
+	/* If any of our request or buffer queues are empty, we cannot proceed. */
+	if (state_ != State::Idle || requestQueue_.empty() ||
+	    bayerQueue_.empty() || embeddedQueue_.empty())
+		return;
+
+	/* Start with the front of the bayer buffer queue. */
+	bayerBuffer = bayerQueue_.front();
+
+	/*
+	 * Find the embedded data buffer with a matching timestamp to pass to
+	 * the IPA. Any embedded buffers with a timestamp lower than the
+	 * current bayer buffer will be removed and re-queued to the driver.
+	 */
+	embeddedBuffer = updateQueue(embeddedQueue_, bayerBuffer->metadata().timestamp,
+				     unicam_[Unicam::Embedded].dev());
+
+	if (!embeddedBuffer) {
+		LOG(RPI, Debug) << "Could not find matching embedded buffer";
+
+		/*
+		 * Look the other way, try to match a bayer buffer with the
+		 * first embedded buffer in the queue. This will also do some
+		 * housekeeping on the bayer image queue - clear out any
+		 * buffers that are older than the first buffer in the embedded
+		 * queue.
+		 *
+		 * But first check if the embedded queue has emptied out.
+		 */
+		if (embeddedQueue_.empty())
+			return;
+
+		embeddedBuffer = embeddedQueue_.front();
+		bayerBuffer = updateQueue(bayerQueue_, embeddedBuffer->metadata().timestamp,
+					  unicam_[Unicam::Image].dev());
+
+		if (!bayerBuffer) {
+			LOG(RPI, Debug) << "Could not find matching bayer buffer - ending.";
+			return;
+		}
+	}
+
+	/*
+	 * Take the first request from the queue and action the IPA.
+	 * Unicam buffers for the request have already been queued as they come
+	 * in.
+	 */
+	Request *request = requestQueue_.front();
+
+	/*
+	 * Process all the user controls by the IPA.  Once this is complete, we
+	 * queue the ISP output buffer listed in the request to start the HW
+	 * pipeline.
+	 */
+	op.operation = RPI_IPA_EVENT_QUEUE_REQUEST;
+	op.controls = { request->controls() };
+	ipa_->processEvent(op);
+
+	/* Queue up any ISP buffers passed into the request. */
+	for (auto &stream : isp_) {
+		if (stream.isExternal())
+			stream.dev()->queueBuffer(request->findBuffer(&stream));
+	}
+
+	/* Ready to use the buffers, pop them off the queue. */
+	bayerQueue_.pop();
+	embeddedQueue_.pop();
+
+	/* Set our state to say the pipeline is active. */
+	state_ = State::Busy;
+
+	LOG(RPI, Debug) << "Signalling RPI_IPA_EVENT_SIGNAL_ISP_PREPARE:"
+			<< " Bayer buffer id: " << bayerBuffer->cookie()
+			<< " Embedded buffer id: " << embeddedBuffer->cookie();
+
+	op.operation = RPI_IPA_EVENT_SIGNAL_ISP_PREPARE;
+	op.data = { RPiIpaMask::EMBEDDED_DATA | embeddedBuffer->cookie(),
+		    RPiIpaMask::BAYER_DATA | bayerBuffer->cookie() };
+	ipa_->processEvent(op);
+}
+
+void RPiCameraData::tryFlushQueues()
+{
+	/*
+	 * It is possible for us to end up in a situation where all available
+	 * Unicam buffers have been dequeued but do not match.  This can happen
+	 * when the system is heavily loaded and we get out of lock-step with
+	 * the two channels.
+	 *
+	 * In such cases, the best thing to do is the re-queue all the buffers
+	 * and give a chance for the hardware to return to lock-step.  We do
+	 * have to drop all interim frames.
+	 */
+	if (unicam_[Unicam::Image].getBuffers()->size() == bayerQueue_.size() &&
+	    unicam_[Unicam::Embedded].getBuffers()->size() == embeddedQueue_.size()) {
+		LOG(RPI, Warning) << "Flushing all buffer queues!";
+
+		while (!bayerQueue_.empty()) {
+			unicam_[Unicam::Image].dev()->queueBuffer(bayerQueue_.front());
+			bayerQueue_.pop();
+		}
+
+		while (!embeddedQueue_.empty()) {
+			unicam_[Unicam::Embedded].dev()->queueBuffer(embeddedQueue_.front());
+			embeddedQueue_.pop();
+		}
+	}
+}
+
+FrameBuffer *RPiCameraData::updateQueue(std::queue<FrameBuffer *> &q, uint64_t timestamp,
+					V4L2VideoDevice *dev)
+{
+	while (!q.empty()) {
+		FrameBuffer *b = q.front();
+		if (b->metadata().timestamp < timestamp) {
+			q.pop();
+			dev->queueBuffer(b);
+			LOG(RPI, Error) << "Dropping input frame!";
+		} else if (b->metadata().timestamp == timestamp) {
+			/* The calling function will pop the item from the queue. */
+			return b;
+		} else {
+			break; /* Only higher timestamps from here. */
+		}
+	}
+
+	return nullptr;
+}
+
+REGISTER_PIPELINE_HANDLER(PipelineHandlerRPi);
+
+} /* namespace libcamera */
diff --git a/src/libcamera/pipeline/raspberrypi/staggered_ctrl.h b/src/libcamera/pipeline/raspberrypi/staggered_ctrl.h
new file mode 100644
index 000000000000..0403c087c686
--- /dev/null
+++ b/src/libcamera/pipeline/raspberrypi/staggered_ctrl.h
@@ -0,0 +1,236 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2020, Raspberry Pi (Trading) Ltd.
+ *
+ * staggered_ctrl.h - Helper for writing staggered ctrls to a V4L2 device.
+ */
+#pragma once
+
+#include <algorithm>
+#include <initializer_list>
+#include <mutex>
+#include <unordered_map>
+
+#include <libcamera/controls.h>
+#include "log.h"
+#include "utils.h"
+#include "v4l2_videodevice.h"
+
+/* For logging... */
+using libcamera::LogCategory;
+using libcamera::LogDebug;
+using libcamera::LogInfo;
+using libcamera::utils::hex;
+
+LOG_DEFINE_CATEGORY(RPI_S_W);
+
+namespace RPi {
+
+class StaggeredCtrl
+{
+public:
+	StaggeredCtrl()
+		: init_(false), setCount_(0), getCount_(0), maxDelay_(0)
+	{
+	}
+
+	~StaggeredCtrl()
+	{
+	}
+
+	operator bool() const
+	{
+		return init_;
+	}
+
+	void init(libcamera::V4L2VideoDevice *dev,
+		  std::initializer_list<std::pair<const uint32_t, uint8_t>> delayList)
+	{
+		std::lock_guard<std::mutex> lock(lock_);
+
+		dev_ = dev;
+		delay_ = delayList;
+		ctrl_.clear();
+
+		/* Find the largest delay across all controls. */
+		maxDelay_ = 0;
+		for (auto const &p : delay_) {
+			LOG(RPI_S_W, Info) << "Init ctrl "
+					   << hex(p.first) << " with delay "
+					   << static_cast<int>(p.second);
+			maxDelay_ = std::max(maxDelay_, p.second);
+		}
+
+		init_ = true;
+	}
+
+	void reset()
+	{
+		std::lock_guard<std::mutex> lock(lock_);
+
+		int lastSetCount = std::max<int>(0, setCount_ - 1);
+		std::unordered_map<uint32_t, int32_t> lastVal;
+
+		/* Reset the counters. */
+		setCount_ = getCount_ = 0;
+
+		/* Look for the last set values. */
+		for (auto const &c : ctrl_)
+			lastVal[c.first] = c.second[lastSetCount].value;
+
+		/* Apply the last set values as the next to be applied. */
+		ctrl_.clear();
+		for (auto &c : lastVal)
+			ctrl_[c.first][setCount_] = CtrlInfo(c.second);
+	}
+
+	bool set(uint32_t ctrl, int32_t value)
+	{
+		std::lock_guard<std::mutex> lock(lock_);
+
+		/* Can we find this ctrl as one that is registered? */
+		if (delay_.find(ctrl) == delay_.end())
+			return false;
+
+		ctrl_[ctrl][setCount_].value = value;
+		ctrl_[ctrl][setCount_].updated = true;
+
+		return true;
+	}
+
+	bool set(std::initializer_list<std::pair<const uint32_t, int32_t>> ctrlList)
+	{
+		std::lock_guard<std::mutex> lock(lock_);
+
+		for (auto const &p : ctrlList) {
+			/* Can we find this ctrl? */
+			if (delay_.find(p.first) == delay_.end())
+				return false;
+
+			ctrl_[p.first][setCount_] = CtrlInfo(p.second);
+		}
+
+		return true;
+	}
+
+	bool set(libcamera::ControlList &controls)
+	{
+		std::lock_guard<std::mutex> lock(lock_);
+
+		for (auto const &p : controls) {
+			/* Can we find this ctrl? */
+			if (delay_.find(p.first) == delay_.end())
+				return false;
+
+			ctrl_[p.first][setCount_] = CtrlInfo(p.second.get<int32_t>());
+			LOG(RPI_S_W, Debug) << "Setting ctrl "
+					    << hex(p.first) << " to "
+					    << ctrl_[p.first][setCount_].value
+					    << " at index "
+					    << setCount_;
+		}
+
+		return true;
+	}
+
+	int write()
+	{
+		std::lock_guard<std::mutex> lock(lock_);
+		libcamera::ControlList controls(dev_->controls());
+
+		for (auto &p : ctrl_) {
+			int delayDiff = maxDelay_ - delay_[p.first];
+			int index = std::max<int>(0, setCount_ - delayDiff);
+
+			if (p.second[index].updated) {
+				/* We need to write this value out. */
+				controls.set(p.first, p.second[index].value);
+				p.second[index].updated = false;
+				LOG(RPI_S_W, Debug) << "Writing ctrl "
+						    << hex(p.first) << " to "
+						    << p.second[index].value
+						    << " at index "
+						    << index;
+			}
+		}
+
+		nextFrame();
+		return dev_->setControls(&controls);
+	}
+
+	void get(std::unordered_map<uint32_t, int32_t> &ctrl, uint8_t offset = 0)
+	{
+		std::lock_guard<std::mutex> lock(lock_);
+
+		/* Account for the offset to reset the getCounter. */
+		getCount_ += offset + 1;
+
+		ctrl.clear();
+		for (auto &p : ctrl_) {
+			int index = std::max<int>(0, getCount_ - maxDelay_);
+			ctrl[p.first] = p.second[index].value;
+			LOG(RPI_S_W, Debug) << "Getting ctrl "
+					    << hex(p.first) << " to "
+					    << p.second[index].value
+					    << " at index "
+					    << index;
+		}
+	}
+
+private:
+	void nextFrame()
+	{
+		/* Advance the control history to the next frame */
+		int prevCount = setCount_;
+		setCount_++;
+
+		LOG(RPI_S_W, Debug) << "Next frame, set index is " << setCount_;
+
+		for (auto &p : ctrl_) {
+			p.second[setCount_].value = p.second[prevCount].value;
+			p.second[setCount_].updated = false;
+		}
+	}
+
+	/* listSize must be a power of 2. */
+	static constexpr int listSize = (1 << 4);
+	struct CtrlInfo {
+		CtrlInfo()
+			: value(0), updated(false)
+		{
+		}
+
+		CtrlInfo(int32_t value_)
+			: value(value_), updated(true)
+		{
+		}
+
+		int32_t value;
+		bool updated;
+	};
+
+	class CircularArray : public std::array<CtrlInfo, listSize>
+	{
+	public:
+		CtrlInfo &operator[](int index)
+		{
+			return std::array<CtrlInfo, listSize>::operator[](index & (listSize - 1));
+		}
+
+		const CtrlInfo &operator[](int index) const
+		{
+			return std::array<CtrlInfo, listSize>::operator[](index & (listSize - 1));
+		}
+	};
+
+	bool init_;
+	uint32_t setCount_;
+	uint32_t getCount_;
+	uint8_t maxDelay_;
+	libcamera::V4L2VideoDevice *dev_;
+	std::unordered_map<uint32_t, uint8_t> delay_;
+	std::unordered_map<uint32_t, CircularArray> ctrl_;
+	std::mutex lock_;
+};
+
+} /* namespace RPi */
diff --git a/src/libcamera/pipeline/raspberrypi/vcsm.h b/src/libcamera/pipeline/raspberrypi/vcsm.h
new file mode 100644
index 000000000000..fdce0050c26b
--- /dev/null
+++ b/src/libcamera/pipeline/raspberrypi/vcsm.h
@@ -0,0 +1,144 @@
+/* SPDX-License-Identifier: BSD-2-Clause */
+/*
+ * Copyright (C) 2019, Raspberry Pi (Trading) Limited
+ *
+ * vcsm.h - Helper class for vcsm allocations.
+ */
+#pragma once
+
+#include <iostream>
+#include <mutex>
+
+#include <fcntl.h>
+#include <linux/vc_sm_cma_ioctl.h>
+#include <sys/ioctl.h>
+#include <sys/mman.h>
+#include <unistd.h>
+
+namespace RPi {
+
+#define VCSM_CMA_DEVICE_NAME "/dev/vcsm-cma"
+
+class Vcsm
+{
+public:
+	Vcsm()
+	{
+		vcsmHandle_ = ::open(VCSM_CMA_DEVICE_NAME, O_RDWR, 0);
+		if (vcsmHandle_ == -1) {
+			std::cerr << "Could not open vcsm device: "
+				  << VCSM_CMA_DEVICE_NAME;
+		}
+	}
+
+	~Vcsm()
+	{
+		/* Free all existing allocations. */
+		auto it = allocMap_.begin();
+		while (it != allocMap_.end())
+			it = remove(it->first);
+
+		if (vcsmHandle_)
+			::close(vcsmHandle_);
+	}
+
+	void *alloc(const char *name, unsigned int size,
+		    vc_sm_cma_cache_e cache = VC_SM_CMA_CACHE_NONE)
+	{
+		unsigned int pageSize = getpagesize();
+		void *user_ptr;
+		int ret;
+
+		/* Ask for page aligned allocation. */
+		size = (size + pageSize - 1) & ~(pageSize - 1);
+
+		struct vc_sm_cma_ioctl_alloc alloc;
+		memset(&alloc, 0, sizeof(alloc));
+		alloc.size = size;
+		alloc.num = 1;
+		alloc.cached = cache;
+		alloc.handle = 0;
+		if (name != NULL)
+			memcpy(alloc.name, name, 32);
+
+		ret = ::ioctl(vcsmHandle_, VC_SM_CMA_IOCTL_MEM_ALLOC, &alloc);
+
+		if (ret < 0 || alloc.handle < 0) {
+			std::cerr << "vcsm allocation failure for "
+				  << name << std::endl;
+			return nullptr;
+		}
+
+		/* Map the buffer into user space. */
+		user_ptr = ::mmap(0, alloc.size, PROT_READ | PROT_WRITE,
+				  MAP_SHARED, alloc.handle, 0);
+
+		if (user_ptr == MAP_FAILED) {
+			std::cerr << "vcsm mmap failure for " << name << std::endl;
+			::close(alloc.handle);
+			return nullptr;
+		}
+
+		std::lock_guard<std::mutex> lock(lock_);
+		allocMap_.emplace(user_ptr, AllocInfo(alloc.handle,
+						      alloc.size, alloc.vc_handle));
+
+		return user_ptr;
+	}
+
+	void free(void *user_ptr)
+	{
+		std::lock_guard<std::mutex> lock(lock_);
+		remove(user_ptr);
+	}
+
+	unsigned int getVCHandle(void *user_ptr)
+	{
+		std::lock_guard<std::mutex> lock(lock_);
+		auto it = allocMap_.find(user_ptr);
+		if (it != allocMap_.end())
+			return it->second.vcHandle;
+
+		return 0;
+	}
+
+private:
+	struct AllocInfo {
+		AllocInfo(int handle_, int size_, int vcHandle_)
+			: handle(handle_), size(size_), vcHandle(vcHandle_)
+		{
+		}
+
+		int handle;
+		int size;
+		uint32_t vcHandle;
+	};
+
+	/* Map of all allocations that have been requested. */
+	using AllocMap = std::map<void *, AllocInfo>;
+
+	AllocMap::iterator remove(void *user_ptr)
+	{
+		auto it = allocMap_.find(user_ptr);
+		if (it != allocMap_.end()) {
+			int handle = it->second.handle;
+			int size = it->second.size;
+			::munmap(user_ptr, size);
+			::close(handle);
+			/*
+			 * Remove the allocation from the map. This returns
+			 * an iterator to the next element.
+			 */
+			it = allocMap_.erase(it);
+		}
+
+		/* Returns an iterator to the next element. */
+		return it;
+	}
+
+	AllocMap allocMap_;
+	int vcsmHandle_;
+	std::mutex lock_;
+};
+
+} /* namespace RPi */
-- 
Regards,

Laurent Pinchart



More information about the libcamera-devel mailing list