[libcamera-devel] [PATCH 6/6] [PoC/RFC] libcamera: pipeline: Add RaspberryPi handler

Laurent Pinchart laurent.pinchart at ideasonboard.com
Thu Aug 8 23:51:14 CEST 2019


Hi Kieran,

Thank you for the patch.

On Thu, Aug 08, 2019 at 04:12:21PM +0100, Kieran Bingham wrote:
> Utilise the CameraSensor class and construct a pipeline for a single
> sensor on the Unicam, routed through the V4L2 Codec ISP interface.
> 
> Signed-off-by: Kieran Bingham <kieran.bingham at ideasonboard.com>

A pipeline handler for the RPi, very nice :-)

> ---
>  src/libcamera/pipeline/meson.build     |   1 +
>  src/libcamera/pipeline/raspberrypi.cpp | 425 +++++++++++++++++++++++++
>  2 files changed, 426 insertions(+)
>  create mode 100644 src/libcamera/pipeline/raspberrypi.cpp
> 
> diff --git a/src/libcamera/pipeline/meson.build b/src/libcamera/pipeline/meson.build
> index 0d466225a72e..7ed7b26f3033 100644
> --- a/src/libcamera/pipeline/meson.build
> +++ b/src/libcamera/pipeline/meson.build
> @@ -1,4 +1,5 @@
>  libcamera_sources += files([
> +    'raspberrypi.cpp',

I would expect the same level of complexity as the IPU3 and RkISP1
pipeline handlers, should this get its own directory ?

>      'uvcvideo.cpp',
>      'vimc.cpp',
>  ])
> diff --git a/src/libcamera/pipeline/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi.cpp
> new file mode 100644
> index 000000000000..4c4c3dea0113
> --- /dev/null
> +++ b/src/libcamera/pipeline/raspberrypi.cpp
> @@ -0,0 +1,425 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2019, Google Inc.
> + *
> + * raspberrypi.cpp - Pipeline handler for raspberrypi devices

s/raspberrypi devices/Raspberry Pi devices/ ?

Should we name it Raspberry Pi, or based on the SoC ? It could be used
on other systems using the same SoC (or family of SoCs).

> + */
> +
> +#include <libcamera/camera.h>
> +#include <libcamera/request.h>
> +#include <libcamera/stream.h>
> +
> +#include "camera_sensor.h"
> +#include "device_enumerator.h"
> +#include "log.h"
> +#include "media_device.h"
> +#include "pipeline_handler.h"
> +#include "utils.h"
> +#include "v4l2_controls.h"
> +#include "v4l2_videodevice.h"
> +
> +namespace libcamera {
> +
> +LOG_DEFINE_CATEGORY(RPI)
> +
> +class RPiCameraData : public CameraData
> +{
> +public:
> +	RPiCameraData(PipelineHandler *pipe)
> +		: CameraData(pipe), sensor_(nullptr), unicam_(nullptr),
> +		  isp_(nullptr)
> +	{
> +	}
> +
> +	~RPiCameraData()
> +	{
> +		bayerBuffers_.destroyBuffers();

Shouldn't you also delete sensor_ ?

> +		delete unicam_;
> +		delete isp_;
> +	}
> +
> +	void sensorReady(Buffer *buffer);
> +	void ispOutputReady(Buffer *buffer);
> +	void ispCaptureReady(Buffer *buffer);
> +
> +	CameraSensor *sensor_;
> +	V4L2VideoDevice *unicam_;
> +	V4L2M2MDevice *isp_;
> +	Stream stream_;
> +
> +	BufferPool bayerBuffers_;
> +	std::vector<std::unique_ptr<Buffer>> rawBuffers_;
> +};
> +
> +class RPiCameraConfiguration : public CameraConfiguration
> +{
> +public:
> +	RPiCameraConfiguration();
> +
> +	Status validate() override;
> +};
> +
> +class PipelineHandlerRPi : public PipelineHandler
> +{
> +public:
> +	PipelineHandlerRPi(CameraManager *manager);
> +	~PipelineHandlerRPi();
> +
> +	CameraConfiguration *
> +	generateConfiguration(Camera *camera,
> +			      const StreamRoles &roles) override;
> +	int configure(Camera *camera,
> +		      CameraConfiguration *config) override;
> +
> +	int allocateBuffers(Camera *camera,
> +			    const std::set<Stream *> &streams) override;
> +	int freeBuffers(Camera *camera,
> +			const std::set<Stream *> &streams) override;
> +
> +	int start(Camera *camera) override;
> +	void stop(Camera *camera) override;
> +
> +	int queueRequest(Camera *camera, Request *request) override;
> +
> +	bool match(DeviceEnumerator *enumerator) override;
> +
> +private:
> +	RPiCameraData *cameraData(const Camera *camera)
> +	{
> +		return static_cast<RPiCameraData *>(
> +			PipelineHandler::cameraData(camera));
> +	}
> +
> +	std::shared_ptr<MediaDevice> unicam_;
> +	std::shared_ptr<MediaDevice> codec_;
> +};
> +
> +RPiCameraConfiguration::RPiCameraConfiguration()
> +	: CameraConfiguration()
> +{
> +}
> +
> +CameraConfiguration::Status RPiCameraConfiguration::validate()
> +{
> +	Status status = Valid;
> +
> +	if (config_.empty())
> +		return Invalid;
> +
> +	/* Todo: Experiment with increased stream support through the ISP. */

s/Todo:/\todo/

> +	if (config_.size() > 1) {
> +		config_.resize(1);
> +		status = Adjusted;
> +	}
> +
> +	StreamConfiguration &cfg = config_[0];
> +
> +	/* Todo: restrict to hardware capabilities. */

I think this one should be addressed before merging the code.

> +
> +	cfg.bufferCount = 4;
> +
> +	return status;
> +}
> +
> +PipelineHandlerRPi::PipelineHandlerRPi(CameraManager *manager)
> +	: PipelineHandler(manager), unicam_(nullptr), codec_(nullptr)
> +{
> +}
> +
> +PipelineHandlerRPi::~PipelineHandlerRPi()
> +{
> +	if (unicam_)
> +		unicam_->release();
> +
> +	if (codec_)
> +		codec_->release();
> +}
> +
> +CameraConfiguration *
> +PipelineHandlerRPi::generateConfiguration(Camera *camera,
> +					  const StreamRoles &roles)
> +{
> +	CameraConfiguration *config = new RPiCameraConfiguration();
> +
> +	if (roles.empty())
> +		return config;
> +
> +	StreamConfiguration cfg{};
> +	cfg.pixelFormat = V4L2_PIX_FMT_YUYV;
> +	cfg.size = { 1920, 1080 }; // data->sensor_->resolution();

Let's remove commented-out code.

What prevents from using the sensor resolution, and how is 1080p
selected as the default resolution ?

> +	cfg.bufferCount = 4;
> +
> +	LOG(RPI, Debug) << "Config default as " << cfg.toString();

I think the configuration is printed in the caller already.

> +
> +	config->addConfiguration(cfg);
> +
> +	config->validate();
> +
> +	return config;
> +}
> +
> +int PipelineHandlerRPi::configure(Camera *camera,
> +				  CameraConfiguration *config)

This holds on a single line.

> +{
> +	RPiCameraData *data = cameraData(camera);
> +	StreamConfiguration &cfg = config->at(0);
> +	int ret;
> +
> +	Size sensorSize = { 1920, 1080 };
> +	Size outputSize = { 1920, 1088 };
> +
> +	V4L2DeviceFormat format = {};
> +	format.size = sensorSize;
> +	format.fourcc = V4L2_PIX_FMT_SRGGB10P;

This all seems to lack genericity :-) I think that at least he format
should support different Bayer patterns and bpp values.

Don't you need to set the format on the sensor subdev ?

> +
> +	LOG(RPI, Debug) << "Setting format to " << format.toString();
> +
> +	ret = data->unicam_->setFormat(&format);
> +	if (ret)
> +		return ret;
> +
> +	if (format.size != sensorSize ||
> +	    format.fourcc != V4L2_PIX_FMT_SRGGB10P) {
> +		LOG(RPI, Error)
> +			<< "Failed to set format on Video device: "
> +			<< format.toString();
> +		return -EINVAL;
> +	}
> +
> +	format.size = outputSize;
> +
> +	ret = data->isp_->output()->setFormat(&format);
> +
> +	if (format.size != outputSize ||
> +	    format.fourcc != V4L2_PIX_FMT_SRGGB10P) {
> +		LOG(RPI, Error) << "Failed to set format on ISP output device: "
> +				<< format.toString();
> +		return -EINVAL;
> +	}
> +
> +	/* Configure the ISP based to generate the requested size and format. */

Are you missing something after "based" ?

> +	format.size = cfg.size;
> +	format.fourcc = cfg.pixelFormat;
> +
> +	ret = data->isp_->capture()->setFormat(&format);
> +
> +	if (format.size != cfg.size ||
> +	    format.fourcc != cfg.pixelFormat) {
> +		LOG(RPI, Error)
> +			<< "Failed to set format on ISP capture device: "
> +			<< format.toString();
> +		return -EINVAL;
> +	}
> +
> +	cfg.setStream(&data->stream_);
> +
> +	return 0;
> +}
> +
> +int PipelineHandlerRPi::allocateBuffers(Camera *camera,
> +					const std::set<Stream *> &streams)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	Stream *stream = *streams.begin();
> +	const StreamConfiguration &cfg = stream->configuration();
> +	int ret;
> +
> +	LOG(RPI, Debug) << "Requesting " << cfg.bufferCount << " buffers";

I'd drop this message, or move it to Camera::allocateBuffers().

> +
> +	/*
> +	 * Buffers are allocated on the camera, and the capture pad of the ISP
> +	 *      unicam -> isp.output -> isp.capture -> Application
> +	 */
> +
> +	/* Create a new intermediate buffer pool */

s/pool/pool./ (same comment for all other comments in this file that
don't end with a period)

> +	data->bayerBuffers_.createBuffers(cfg.bufferCount);
> +
> +	/* Tie the unicam video buffers to the intermediate pool */
> +	ret = data->unicam_->exportBuffers(&data->bayerBuffers_);
> +	if (ret)
> +		return ret;
> +
> +	ret = data->isp_->output()->importBuffers(&data->bayerBuffers_);
> +	if (ret)
> +		return ret;
> +
> +	/* Tie the stream buffers to the capture device of the ISP */
> +	if (stream->memoryType() == InternalMemory)
> +		ret = data->isp_->capture()->exportBuffers(&stream->bufferPool());
> +	else
> +		ret = data->isp_->capture()->importBuffers(&stream->bufferPool());
> +
> +	return ret;
> +}
> +
> +int PipelineHandlerRPi::freeBuffers(Camera *camera,
> +				    const std::set<Stream *> &streams)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	int ret;
> +
> +	ret = data->unicam_->releaseBuffers();
> +	ret = data->isp_->output()->releaseBuffers();
> +	ret = data->isp_->capture()->releaseBuffers();

You're losing the first two error values.

> +
> +	data->bayerBuffers_.destroyBuffers();
> +
> +	return ret;
> +}
> +
> +int PipelineHandlerRPi::start(Camera *camera)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	int ret;
> +
> +	data->rawBuffers_ = data->unicam_->queueAllBuffers();
> +	if (data->rawBuffers_.empty()) {
> +		LOG(RPI, Debug) << "Failed to queue unicam buffers";
> +		return -EINVAL;
> +	}
> +
> +	LOG(RPI, Warning) << "Using hard-coded exposure/gain defaults";
> +
> +	V4L2ControlList controls;
> +	controls.add(V4L2_CID_EXPOSURE, 1700);
> +	controls.add(V4L2_CID_ANALOGUE_GAIN, 180);
> +	ret = data->unicam_->setControls(&controls);
> +	if (ret) {
> +		LOG(RPI, Error) << "Failed to set controls";
> +		return ret;
> +	}
> +
> +	ret = data->isp_->output()->streamOn();
> +	if (ret)
> +		return ret;
> +
> +	ret = data->isp_->capture()->streamOn();
> +	if (ret)
> +		return ret;
> +
> +	ret = data->unicam_->streamOn();
> +	if (ret)
> +		return ret;

Shouldn't you stop streaming on the successfully started streams when an
error happens ?

> +
> +	return ret;

	return 0;

> +}
> +
> +void PipelineHandlerRPi::stop(Camera *camera)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +
> +	data->isp_->capture()->streamOff();
> +	data->isp_->output()->streamOff();
> +	data->unicam_->streamOff();
> +
> +	data->rawBuffers_.clear();
> +}
> +
> +int PipelineHandlerRPi::queueRequest(Camera *camera, Request *request)
> +{
> +	RPiCameraData *data = cameraData(camera);
> +	Buffer *buffer = request->findBuffer(&data->stream_);
> +	if (!buffer) {
> +		LOG(RPI, Error)
> +			<< "Attempt to queue request with invalid stream";
> +
> +		return -ENOENT;

Can this happen ?

> +	}
> +
> +	int ret = data->isp_->capture()->queueBuffer(buffer);
> +	if (ret < 0)
> +		return ret;
> +
> +	PipelineHandler::queueRequest(camera, request);
> +
> +	return 0;
> +}
> +
> +bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)
> +{
> +	DeviceMatch unicam("unicam");
> +	DeviceMatch codec("bcm2835-codec");
> +
> +	/* We explicitly need the ISP device from the MMAL codec driver. */
> +	codec.add("bcm2835-codec-isp");

Is there any subdev in the unicam device that we could match on ?

> +
> +	unicam_ = enumerator->search(unicam);
> +	if (!unicam_)
> +		return false;
> +
> +	codec_ = enumerator->search(codec);
> +	if (!codec_)
> +		return false;
> +
> +	unicam_->acquire();
> +	codec_->acquire();
> +
> +	std::unique_ptr<RPiCameraData> data = utils::make_unique<RPiCameraData>(this);
> +
> +	/* Locate and open the unicam video node. */
> +	data->unicam_ = new V4L2VideoDevice(unicam_->getEntityByName("unicam"));
> +	if (data->unicam_->open())
> +		return false;
> +
> +	/* Locate the ISP M2M node */
> +	MediaEntity *isp = codec_->getEntityByName("bcm2835-codec-isp");
> +	if (!isp)
> +		return false;
> +
> +	data->isp_ = new V4L2M2MDevice(isp->deviceNode());
> +	if (data->isp_->status()) {
> +		LOG(RPI, Error) << "Could not create the ISP device";
> +		return false;
> +	}
> +
> +	data->unicam_->bufferReady.connect(data.get(), &RPiCameraData::sensorReady);
> +	data->isp_->output()->bufferReady.connect(data.get(), &RPiCameraData::ispOutputReady);
> +	data->isp_->capture()->bufferReady.connect(data.get(), &RPiCameraData::ispCaptureReady);
> +
> +	/* Identify the sensor */
> +	for (MediaEntity *entity : unicam_->entities()) {
> +		if (entity->function() == MEDIA_ENT_F_CAM_SENSOR) {
> +			data->sensor_ = new CameraSensor(entity);
> +			break;
> +		}
> +	}
> +
> +	if (!data->sensor_)
> +		return false;
> +
> +	int ret = data->sensor_->init();
> +	if (ret)
> +		return false;

No need for an intermediate variable, you can write

	if (data->sensor_->init())
		return false;

> +
> +	/* Create and register the camera. */
> +	std::set<Stream *> streams{ &data->stream_ };
> +	std::shared_ptr<Camera> camera =
> +		Camera::create(this, data->sensor_->entity()->name(), streams);
> +	registerCamera(std::move(camera), std::move(data));
> +
> +	return true;
> +}
> +
> +void RPiCameraData::sensorReady(Buffer *buffer)
> +{
> +	/* Deliver the frame from the sensor to the ISP */

You should skip this when buffer->status() == Buffer::BufferCancelled
(see the IPU3 pipeline handler).

> +	isp_->output()->queueBuffer(buffer);
> +}
> +
> +void RPiCameraData::ispOutputReady(Buffer *buffer)
> +{
> +	/* Return a completed buffer from the ISP back to the sensor */

Same comment here.

> +	unicam_->queueBuffer(buffer);
> +}
> +
> +void RPiCameraData::ispCaptureReady(Buffer *buffer)
> +{
> +	Request *request = buffer->request();
> +
> +	pipe_->completeBuffer(camera_, request, buffer);
> +	pipe_->completeRequest(camera_, request);
> +}
> +
> +REGISTER_PIPELINE_HANDLER(PipelineHandlerRPi);
> +
> +} /* namespace libcamera */

-- 
Regards,

Laurent Pinchart


More information about the libcamera-devel mailing list