[libcamera-devel] [PATCH] libcamera: pipeline: Add IMX8 ISI pipeline

Jacopo Mondi jacopo at jmondi.org
Thu Nov 10 10:52:55 CET 2022


Hi Kieran

On Mon, Oct 31, 2022 at 12:26:17PM +0000, Kieran Bingham wrote:
> Quoting Jacopo Mondi via libcamera-devel (2022-10-18 17:48:52)
> > Add a pipeline handler for the ISI capture interface found on
> > several versions of the i.MX8 SoC generation.
> >
> > The pipeline handler supports capturing up to two streams in YUV, RGB or
> > RAW formats.
> >
> > Signed-off-by: Jacopo Mondi <jacopo at jmondi.org>
> > ---
> >  meson_options.txt                            |   2 +-
> >  src/libcamera/pipeline/imx8-isi/imx8-isi.cpp | 856 +++++++++++++++++++
> >  src/libcamera/pipeline/imx8-isi/meson.build  |   5 +
> >  3 files changed, 862 insertions(+), 1 deletion(-)
> >  create mode 100644 src/libcamera/pipeline/imx8-isi/imx8-isi.cpp
> >  create mode 100644 src/libcamera/pipeline/imx8-isi/meson.build
> >
> > diff --git a/meson_options.txt b/meson_options.txt
> > index f1d678089452..1ba6778ce257 100644
> > --- a/meson_options.txt
> > +++ b/meson_options.txt
> > @@ -37,7 +37,7 @@ option('lc-compliance',
> >
> >  option('pipelines',
> >          type : 'array',
> > -        choices : ['ipu3', 'raspberrypi', 'rkisp1', 'simple', 'uvcvideo', 'vimc'],
> > +        choices : ['imx8-isi', 'ipu3', 'raspberrypi', 'rkisp1', 'simple', 'uvcvideo', 'vimc'],
> >          description : 'Select which pipeline handlers to include')
> >
> >  option('qcam',
> > diff --git a/src/libcamera/pipeline/imx8-isi/imx8-isi.cpp b/src/libcamera/pipeline/imx8-isi/imx8-isi.cpp
> > new file mode 100644
> > index 000000000000..d404d00353c4
> > --- /dev/null
> > +++ b/src/libcamera/pipeline/imx8-isi/imx8-isi.cpp
> > @@ -0,0 +1,856 @@
> > +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> > +/*
> > + * Copyright (C) 2022 - Jacopo Mondi <jacopo at jmondi.org>
> > + *
> > + * imx8-isi.cpp - Pipeline handler for ISI interface found on NXP i.MX8 SoC
> > + */
> > +
> > +#include <algorithm>
> > +#include <map>
> > +#include <memory>
> > +#include <set>
> > +#include <sstream>
> > +#include <string>
> > +#include <vector>
> > +
> > +#include <libcamera/base/log.h>
> > +#include <libcamera/base/utils.h>
> > +
> > +#include <libcamera/camera_manager.h>
> > +#include <libcamera/formats.h>
> > +#include <libcamera/geometry.h>
> > +#include <libcamera/stream.h>
> > +
> > +#include "libcamera/internal/bayer_format.h"
> > +#include "libcamera/internal/camera.h"
> > +#include "libcamera/internal/camera_sensor.h"
> > +#include "libcamera/internal/device_enumerator.h"
> > +#include "libcamera/internal/media_device.h"
> > +#include "libcamera/internal/pipeline_handler.h"
> > +#include "libcamera/internal/v4l2_subdevice.h"
> > +#include "libcamera/internal/v4l2_videodevice.h"
> > +
> > +#include "linux/media-bus-format.h"
> > +
> > +namespace libcamera {
> > +
> > +LOG_DEFINE_CATEGORY(ISI)
> > +
> > +class PipelineHandlerISI;
> > +
> > +class ISICameraData : public Camera::Private
> > +{
> > +public:
> > +       ISICameraData(PipelineHandler *ph)
> > +               : Camera::Private(ph)
> > +       {
> > +       }
> > +
> > +       PipelineHandlerISI *pipe();
> > +
> > +       int init();
> > +
> > +       /*
> > +        * stream1_ maps on the first ISI channel, stream2_ on the second one.
> > +        *
> > +        * \todo Assume 2 channels only for now, as that's the number of
> > +        * available channels on i.MX8MP.
>
> Will we be able to identify the platform capabilities from the kernel?
> or do we have to keep a table here?
>

We can infer that from the media device I guess

>
> > +        */
> > +       unsigned int pipeIndex(const Stream *stream)
> > +       {
> > +               return stream == &stream1_ ? 0 : 1;
> > +       }
> > +
> > +       std::unique_ptr<CameraSensor> sensor;
> > +       std::unique_ptr<V4L2Subdevice> csis;
> > +
> > +       Stream stream1_;
> > +       Stream stream2_;
> > +
> > +       std::vector<Stream *> enabledStreams_;
> > +
> > +       unsigned int id_;
> > +};
> > +
> > +class ISICameraConfiguration : public CameraConfiguration
> > +{
> > +public:
> > +       /*
> > +        * formatsMap records the association between an output pixel format
> > +        * and the combination of V4L2 pixel format and media bus codes that have
> > +        * to be applied to the pipeline.
> > +        */
> > +       struct PipeFormat {
> > +               V4L2PixelFormat fourcc;
> > +               unsigned int isiCode;
> > +               unsigned int sensorCode;
> > +       };
> > +
> > +       using FormatMap = std::map<PixelFormat, PipeFormat>;
> > +
> > +       ISICameraConfiguration(ISICameraData *data)
> > +               : data_(data)
> > +       {
> > +       }
> > +
> > +       Status validate() override;
> > +
> > +       static const FormatMap formatsMap;
> > +
> > +       V4L2SubdeviceFormat sensorFormat_;
> > +
> > +private:
> > +       const ISICameraData *data_;
> > +};
> > +
> > +class PipelineHandlerISI : public PipelineHandler
> > +{
> > +public:
> > +       PipelineHandlerISI(CameraManager *manager);
> > +
> > +       bool match(DeviceEnumerator *enumerator) override;
> > +
> > +       CameraConfiguration *generateConfiguration(Camera *camera,
> > +                                                  const StreamRoles &roles) override;
> > +       int configure(Camera *camera, CameraConfiguration *config) override;
> > +
> > +       int exportFrameBuffers(Camera *camera, Stream *stream,
> > +                              std::vector<std::unique_ptr<FrameBuffer>> *buffers) override;
> > +
> > +       int start(Camera *camera, const ControlList *controls) override;
> > +
> > +protected:
> > +       void stopDevice(Camera *camera) override;
> > +
> > +       int queueRequestDevice(Camera *camera, Request *request) override;
> > +
> > +private:
> > +       static constexpr Size previewSize = { 1920, 1080 };
>
> Only curiously, why is this a fixed constant here ? Perhaps it's used
> multiple times ... I guess I'll see below
>
> Are the input limits of the ISI captured in here anywhere?

validate() handles that

>
> > +
> > +       struct Pipe {
> > +               std::unique_ptr<V4L2Subdevice> isi;
> > +               std::unique_ptr<V4L2VideoDevice> capture;
> > +       };
> > +
> > +       ISICameraData *cameraData(Camera *camera)
> > +       {
> > +               return static_cast<ISICameraData *>(camera->_d());
> > +       }
> > +
> > +       Pipe *pipeFromStream(const Camera *camera, const Stream *stream);
> > +
> > +       void bufferReady(FrameBuffer *buffer);
> > +
> > +       MediaDevice *isiDev_;
> > +
> > +       std::unique_ptr<V4L2Subdevice> crossbar_;
> > +       std::vector<Pipe> pipes_;
> > +};
> > +
> > +/* -----------------------------------------------------------------------------
> > + * Camera Data
> > + */
> > +
> > +PipelineHandlerISI *ISICameraData::pipe()
> > +{
> > +       return static_cast<PipelineHandlerISI *>(Camera::Private::pipe());
> > +}
> > +
> > +/* Open and initialize pipe components. */
> > +int ISICameraData::init()
> > +{
> > +       int ret = sensor->init();
> > +       if (ret)
> > +               return ret;
> > +
> > +       ret = csis->open();
> > +       if (ret)
> > +               return ret;
> > +
> > +       properties_ = sensor->properties();
> > +
> > +       return 0;
> > +}
> > +
> > +/* -----------------------------------------------------------------------------
> > + * Camera Configuration
> > + */
> > +
> > +const ISICameraConfiguration::FormatMap ISICameraConfiguration::formatsMap = {
> > +       {
> > +               formats::YUYV,
> > +               { V4L2PixelFormat(V4L2_PIX_FMT_YUYV),
> > +                 MEDIA_BUS_FMT_YUV8_1X24,
> > +                 MEDIA_BUS_FMT_UYVY8_1X16 },
> > +       },
> > +       {
> > +               formats::RGB565,
> > +               { V4L2PixelFormat(V4L2_PIX_FMT_RGB565),
> > +                 MEDIA_BUS_FMT_RGB888_1X24,
> > +                 MEDIA_BUS_FMT_RGB565_1X16 },
> > +       },
> > +       {
> > +               formats::SBGGR8,
> > +               { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR8),
> > +                 MEDIA_BUS_FMT_SBGGR8_1X8,
> > +                 MEDIA_BUS_FMT_SBGGR8_1X8 },
> > +       },
> > +       {
> > +               formats::SBGGR10,
> > +               { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR10),
> > +                 MEDIA_BUS_FMT_SBGGR10_1X10,
> > +                 MEDIA_BUS_FMT_SBGGR10_1X10 },
> > +       },
> > +       {
> > +               formats::SGBRG10,
> > +               { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG10),
> > +                 MEDIA_BUS_FMT_SGBRG10_1X10,
> > +                 MEDIA_BUS_FMT_SGBRG10_1X10 },
> > +       },
> > +       {
> > +               formats::SGRBG10,
> > +               { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG10),
> > +                 MEDIA_BUS_FMT_SGRBG10_1X10,
> > +                 MEDIA_BUS_FMT_SGRBG10_1X10 },
> > +       },
> > +       {
> > +               formats::SRGGB10,
> > +               { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB10),
> > +                 MEDIA_BUS_FMT_SRGGB10_1X10,
> > +                 MEDIA_BUS_FMT_SRGGB10_1X10 },
> > +       },
> > +       {
> > +               formats::SBGGR12,
> > +               { V4L2PixelFormat(V4L2_PIX_FMT_SBGGR12),
> > +                 MEDIA_BUS_FMT_SBGGR12_1X12,
> > +                 MEDIA_BUS_FMT_SBGGR12_1X12 },
> > +       },
> > +       {
> > +               formats::SGBRG12,
> > +               { V4L2PixelFormat(V4L2_PIX_FMT_SGBRG12),
> > +                 MEDIA_BUS_FMT_SGBRG12_1X12,
> > +                 MEDIA_BUS_FMT_SGBRG12_1X12 },
> > +       },
> > +       {
> > +               formats::SGRBG12,
> > +               { V4L2PixelFormat(V4L2_PIX_FMT_SGRBG12),
> > +                 MEDIA_BUS_FMT_SGRBG12_1X12,
> > +                 MEDIA_BUS_FMT_SGRBG12_1X12 },
> > +       },
> > +       {
> > +               formats::SRGGB12,
> > +               { V4L2PixelFormat(V4L2_PIX_FMT_SRGGB12),
> > +                 MEDIA_BUS_FMT_SRGGB12_1X12,
> > +                 MEDIA_BUS_FMT_SRGGB12_1X12 },
> > +       },
> > +};
>
>
> It pains me to see so much format mapping on a pipeline ...
>
> I don't yet understand what the platform specific parts here, or why
> this isn't possible to be handled by the

the ... ? :)

>
> What happens with 14 bit bayer sensors or 16 bit bayer sensors ?
>

For RAW formats that concerns me less. A new entry should be added
here.

I'm more concerned about how to handle colorspace conversion
(YUV<->RGB) as it is possible to generate YUV from RGB and viceversa,
while we here map YUV on YUYV and RGB on RGB565 only.

>
> > +
> > +CameraConfiguration::Status ISICameraConfiguration::validate()
> > +{
> > +       Status status = Valid;
> > +
> > +       std::set<Stream *> availableStreams = {
> > +               const_cast<Stream *>(&data_->stream1_),
> > +               const_cast<Stream *>(&data_->stream2_)
> > +       };
> > +
> > +       if (config_.empty())
> > +               return Invalid;
> > +
> > +       /* Assume at most two streams available. */
> > +       if (config_.size() > 2) {
> > +               config_.resize(2);
> > +               status = Adjusted;
> > +       }
> > +
> > +       /*
> > +        * If more than a single stream is requested, the maximum allowed image
> > +        * width is 2048. Cap the maximum image size accordingly.
>
> Is this input image size, or output image size?

Input size

>
> I think I interpret it as output image size. What are the input
> limitations, and how are they handled?
>
> > +        */
> > +       CameraSensor *sensor = data_->sensor.get();
> > +       Size maxResolution = sensor->resolution();
> > +       if (config_.size() == 2)
> > +               maxResolution.boundTo({ std::min(2048U, maxResolution.width),
> > +                                       maxResolution.height });
>
> If there is a single raw stream, does the width need to be bound to
> 4096?

That's my understanding

>
> Are there any height restrictions at all which need to be considered ?
>

Not that I'm aware of

>
> > +       /* Indentify the largest RAW stream, if any. */
> > +       const ISICameraConfiguration::PipeFormat *pipeConfig = nullptr;
> > +       StreamConfiguration *rawConfig = nullptr;
> > +       Size maxRawSize;
> > +
> > +       for (StreamConfiguration &cfg : config_) {
> > +               /* Make sure format is supported and default to YUV if it's not. */
> > +               auto it = formatsMap.find(cfg.pixelFormat);
> > +               if (it == formatsMap.end()) {
> > +                       LOG(ISI, Warning) << "Unsupported format: " << cfg.pixelFormat
> > +                                         << " - adjusted to YUV";
> > +                       it = formatsMap.find(formats::YUYV);
> > +                       ASSERT(it != formatsMap.end());
> > +
> > +                       cfg.pixelFormat = it->first;
> > +                       status = Adjusted;
> > +               }
> > +
> > +               const PixelFormatInfo info = PixelFormatInfo::info(cfg.pixelFormat);
> > +               if (info.colourEncoding != PixelFormatInfo::ColourEncodingRAW)
> > +                       continue;
> > +
> > +               /* Cap the RAW stream size to the maximum resolution. */
> > +               Size configSize = cfg.size;
> > +               cfg.size.boundTo(maxResolution);
> > +               if (cfg.size != configSize) {
> > +                       LOG(ISI, Warning)
> > +                               << "RAW Stream configuration adjusted to "
> > +                               << cfg.size;
> > +                       status = Adjusted;
> > +               }
> > +
> > +               if (cfg.size > maxRawSize) {
> > +                       /* Store the stream and the pipe configurations. */
> > +                       pipeConfig = &it->second;
> > +                       maxRawSize = cfg.size;
> > +                       rawConfig = &cfg;
> > +               }
> > +
> > +               /* All the RAW streams must have the same format. */
> > +               if (rawConfig && rawConfig->pixelFormat != cfg.pixelFormat) {
> > +                       LOG(ISI, Error)
> > +                               << "All the RAW streams must have the same format.";
> > +                       return Invalid;
> > +               }
> > +
> > +               /* Assign streams in the order they are presented, with RAW first. */
> > +               auto stream = availableStreams.extract(availableStreams.begin());
> > +               cfg.setStream(stream.value());
> > +       }
> > +
> > +       /* Now re-iterate the YUV streams to adjust formats and sizes. */
> > +       Size maxYuvSize;
> > +       for (StreamConfiguration &cfg : config_) {
> > +               const PixelFormatInfo info = PixelFormatInfo::info(cfg.pixelFormat);
> > +               if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW)
> > +                       continue;
> > +
> > +               if (rawConfig) {
> > +                       /*
> > +                        * The ISI can perform color conversion (RGB<->YUV) but
> > +                        * not debayering. If a RAW stream is requested all
> > +                        * other streams must have the same RAW format.
> > +                        */
> > +                       cfg.pixelFormat = rawConfig->pixelFormat;
> > +                       status = Adjusted;
> > +
> > +                       /* The RAW stream size caps the YUV stream sizes. */
> > +                       cfg.size.boundTo(rawConfig->size);
> > +
> > +                       LOG(ISI, Debug) << "Stream configuration adjusted to: "
> > +                                       << cfg.size << "/" << rawConfig->pixelFormat;
> > +                       continue;
> > +               }
> > +
> > +               /* Cap the YUV stream size to the maximum accepted resolution. */
> > +               Size configSize = cfg.size;
> > +               cfg.size.boundTo(maxResolution);
> > +               if (cfg.size != configSize) {
> > +                       LOG(ISI, Warning)
> > +                               << "Stream configuration adjusted to " << cfg.size;
> > +                       status = Adjusted;
> > +               }
> > +
> > +               /* The largest YUV stream picks the pipeConfig. */
> > +               if (cfg.size > maxYuvSize) {
> > +                       pipeConfig = &formatsMap.find(cfg.pixelFormat)->second;
> > +                       maxYuvSize = cfg.size;
> > +               }
> > +
> > +               /* Assign streams in the order they are presented. */
> > +               auto stream = availableStreams.extract(availableStreams.begin());
> > +               cfg.setStream(stream.value());
> > +       }
> > +
> > +       /*
> > +        * Sensor format configuration: if a RAW stream is requested, use its
> > +        * configuration, otherwise use the largerst YUV one.
> > +        *
> > +        * \todo The sensor format selection policies could be changed to
> > +        * prefer operating the sensor at full resolution to prioritize
> > +        * image quality and FOV in exchange of a usually slower frame rate.
> > +        * Usage of the STILL_CAPTURE role could be consider for this.
> > +        */
> > +       V4L2SubdeviceFormat sensorFormat{};
> > +       sensorFormat.mbus_code = pipeConfig->sensorCode;
> > +       sensorFormat.size = rawConfig ? rawConfig->size : maxYuvSize;
> > +
> > +       LOG(ISI, Debug) << "Computed sensor configuration: " << sensorFormat;
> > +
> > +       /*
> > +        * We can't use CameraSensor::getFormat() as it might return a
> > +        * format larger than our strict width limit, as that function
> > +        * prioritizes formats with the same FOV ratio over formats with less
> > +        * difference in size.
>
> Do we need to make updates to CameraSensor::getFormat to faciliicate
> stricter restrictions?
>

Not sure, I think we overdue in CameraSensor::getFormat() already and
I'm not sure allowing it to handle more constraints is the right way
forward

>
> > +        *
> > +        * Manually walk all the sensor supported sizes searching for
> > +        * the smallest larger format without considering the FOV ratio
> > +        * as the ISI can freely scale.
>
> Will that break the aspect ratio of the image without the user expecting
> it ?
>

It might change the FOV I guess.
The alternative is here implementing more logic to filter resolutions
that are "big enough but that do not exceed the input limits" on FOV
ratio. As most YUV sensors are mode based, I'm not sure how many
formats in that range we'll have to chose from

> > +        */
> > +       auto sizes = sensor->sizes(sensorFormat.mbus_code);
> > +       Size bestSize;
> > +
> > +       for (const Size &s : sizes) {
> > +               /* Ignore smaller sizes. */
> > +               if (s.width < sensorFormat.size.width ||
> > +                   s.height < sensorFormat.size.height)
> > +                       continue;
> > +
> > +               /* Make sure the width stays in the limits. */
> > +               if (s.width > maxResolution.width)
> > +                       continue;
> > +
> > +               bestSize = s;
> > +               break;
> > +       }
> > +
> > +       /*
> > +        * This should happen only if the sensor can only produce formats big
> > +        * enough to accommodate all streams but that exceeds the maximum
> > +        * allowed input size.
> > +        */
> > +       if (bestSize.isNull()) {
> > +               LOG(ISI, Error) << "Unable to find a suitable sensor format";
> > +               return Invalid;
> > +       }
> > +
> > +       sensorFormat_.mbus_code = sensorFormat.mbus_code;
> > +       sensorFormat_.size = bestSize;
> > +
> > +       LOG(ISI, Debug) << "Selected sensor format: " << sensorFormat_;
> > +
> > +       return status;
> > +}
> > +
> > +/* -----------------------------------------------------------------------------
> > + * Pipeline Handler
> > + */
> > +PipelineHandlerISI::Pipe *PipelineHandlerISI::pipeFromStream(const Camera *camera,
> > +                                                            const Stream *stream)
> > +{
> > +       ISICameraData *data = cameraData(const_cast<Camera *>(camera));
> > +       unsigned int pipeIndex = data->pipeIndex(stream);
> > +
> > +       ASSERT(pipeIndex < pipes_.size());
> > +
> > +       return &pipes_[pipeIndex];
> > +}
> > +
> > +PipelineHandlerISI::PipelineHandlerISI(CameraManager *manager)
> > +       : PipelineHandler(manager)
> > +{
> > +}
> > +
> > +CameraConfiguration *
> > +PipelineHandlerISI::generateConfiguration(Camera *camera,
> > +                                         const StreamRoles &roles)
> > +{
> > +       ISICameraData *data = cameraData(camera);
> > +       CameraSensor *sensor = data->sensor.get();
> > +       CameraConfiguration *config = new ISICameraConfiguration(data);
> > +
> > +       if (roles.empty())
> > +               return config;
> > +
> > +       if (roles.size() > 2) {
> > +               LOG(ISI, Error) << "Only two streams are supported";
> > +               delete config;
> > +               return nullptr;
> > +       }
> > +
> > +       for (const auto &role : roles) {
> > +               /*
> > +                * Prefer the following formats
> > +                * - Still Capture: Full resolution RGB565
> > +                * - Preview/VideoRecording: 1080p YUYV
> > +                * - RAW: sensor's native format and resolution
> > +                */
> > +               StreamConfiguration cfg;
> > +
> > +               switch (role) {
> > +               case StillCapture:
> > +                       cfg.size = data->sensor->resolution();
> > +                       cfg.pixelFormat = formats::RGB565;
> > +                       cfg.bufferCount = 4;
>
> If bufferCount is always 4, perhaps set it before the role switch?
>

This will anyway be rewored in v2 to populate the stream configuration
formats

> > +                       break;
> > +               default:
> > +                       LOG(ISI, Warning) << "Unsupported role: " << role;
> > +                       [[fallthrough]];
> > +               case Viewfinder:
> > +               case VideoRecording:
> > +                       cfg.size = PipelineHandlerISI::previewSize;
> > +                       cfg.pixelFormat = formats::YUYV;
> > +                       cfg.bufferCount = 4;
> > +                       break;
> > +               case Raw:
> > +                       /*
> > +                        * Make sure the sensor can generate a RAW format and
> > +                        * prefer the ones with a larger bitdepth.
> > +                        */
> > +                       unsigned int maxDepth = 0;
> > +                       unsigned int rawCode = 0;
> > +
> > +                       for (unsigned int code : sensor->mbusCodes()) {
> > +                               const BayerFormat &bayerFormat = BayerFormat::fromMbusCode(code);
> > +                               if (!bayerFormat.isValid())
> > +                                       continue;
> > +
> > +                               if (bayerFormat.bitDepth > maxDepth) {
> > +                                       maxDepth = bayerFormat.bitDepth;
> > +                                       rawCode = code;
> > +                               }
> > +                       }
> > +
> > +                       if (!rawCode) {
> > +                               LOG(ISI, Error)
> > +                                       << "Cannot generate a configuration for RAW stream";
> > +                               LOG(ISI, Error)
> > +                                       << "The sensor does not support RAW";
> > +                               delete config;
> > +                               return nullptr;
> > +                       }
> > +
> > +                       /*
> > +                        * Search the PixelFormat that corresponds to the
> > +                        * selected sensor's mbus code.
> > +                        */
> > +                       const ISICameraConfiguration::PipeFormat *rawPipeFormat = nullptr;
> > +                       PixelFormat rawFormat;
> > +
> > +                       for (const auto &format : ISICameraConfiguration::formatsMap) {
> > +                               const ISICameraConfiguration::PipeFormat &pipeFormat = format.second;
> > +
> > +                               if (pipeFormat.sensorCode != rawCode)
> > +                                       continue;
> > +
> > +                               rawPipeFormat = &pipeFormat;
> > +                               rawFormat = format.first;
> > +                               break;
> > +                       }
> > +
> > +                       if (!rawPipeFormat) {
> > +                               LOG(ISI, Error)
> > +                                       << "Cannot generate a configuration for RAW stream";
> > +                               LOG(ISI, Error)
> > +                                       << "Format not supported: "
> > +                                       << BayerFormat::fromMbusCode(rawCode);
> > +                               delete config;
> > +                               return nullptr;
> > +                       }
> > +
> > +                       cfg.size = sensor->resolution();
> > +                       cfg.pixelFormat = rawFormat;
> > +                       cfg.bufferCount = 4;
> > +                       break;
> > +               }
> > +
> > +               config->addConfiguration(cfg);
> > +       }
> > +
> > +       config->validate();
> > +
> > +       return config;
> > +}
> > +
> > +int PipelineHandlerISI::configure(Camera *camera, CameraConfiguration *c)
> > +{
> > +       ISICameraConfiguration *camConfig = static_cast<ISICameraConfiguration *>(c);
> > +
> > +       ISICameraData *data = cameraData(camera);
> > +       CameraSensor *sensor = data->sensor.get();
> > +
> > +       /* All links are immutable except the sensor -> csis link. */
> > +       const MediaPad *sensorSrc = sensor->entity()->getPadByIndex(0);
> > +       sensorSrc->links()[0]->setEnabled(true);
> > +
> > +       /*
> > +        * Reset the crossbar switch routing and enable one route for each
> > +        * requested stream configuration.
> > +        *
> > +        * \todo Handle concurrent usage of multiple cameras by adjusting the
> > +        * routing table instead of resetting it.
> > +        */
> > +       V4L2Subdevice::Routing routing = {};
> > +
> > +       int ret = crossbar_->setRouting(&routing, V4L2Subdevice::ActiveFormat);
> > +       if (ret)
> > +               return ret;
> > +
> > +       routing = {};
> > +       for (const auto &[idx, config] : utils::enumerate(*c)) {
> > +               struct v4l2_subdev_route route = {
> > +                       .sink_pad = data->id_,
> > +                       .sink_stream = 0,
> > +                       .source_pad = static_cast<unsigned int>(3 + idx),
> > +                       .source_stream = 0,
> > +                       .flags = V4L2_SUBDEV_ROUTE_FL_ACTIVE,
> > +                       .reserved = {}
> > +               };
> > +
> > +               routing.push_back(route);
> > +       }
> > +
> > +       ret = crossbar_->setRouting(&routing, V4L2Subdevice::ActiveFormat);
> > +       if (ret)
> > +               return ret;
> > +
> > +       /* Apply format to the sensor and CSIS receiver. */
> > +       V4L2SubdeviceFormat sensorFmt = camConfig->sensorFormat_;
> > +       ret = sensor->setFormat(&sensorFmt);
> > +       if (ret)
> > +               return ret;
> > +
> > +       ret = data->csis->setFormat(0, &sensorFmt);
> > +       if (ret)
> > +               return ret;
> > +
> > +       ret = data->csis->setFormat(1, &sensorFmt);
> > +       if (ret)
> > +               return ret;
> > +
> > +       ret = crossbar_->setFormat(data->id_, &sensorFmt);
> > +       if (ret)
> > +               return ret;
> > +
>
> This makes me wonder about Naush's MediaWalk proposal earlier this year.
> Seems we could likely do with some helpers for all the media graph
> options in each pipeline handler.
>
> But ... that's not something that will happen for this patch.
>
>
> > +       /* Now configure the ISI and video node instances, one per stream. */
> > +       data->enabledStreams_.clear();
> > +       for (const auto &config : *c) {
> > +               Pipe *pipe = pipeFromStream(camera, config.stream());
> > +
> > +               /*
> > +                * Set the format on the ISI sink pad: it must match what is
> > +                * received by the CSIS.
> > +                */
> > +               ret = pipe->isi->setFormat(0, &sensorFmt);
> > +               if (ret)
> > +                       return ret;
> > +
> > +               /*
> > +                * Configure the ISI sink compose rectangle to downscale the
> > +                * image.
> > +                *
> > +                * \todo Additional cropping could be applied on the ISI source
> > +                * pad to further downscale the image.
>
> Cropping ? or downscaling?

There's an intermediate cropping step which as the \todo reports is
not actionated atm. If the concern is on the "further downscale" term
in the comment, as it suggests scaling and not cropping, I can indeed
change it.

>
> Aside from the comments, I don't have any specific objections - and I
> expect more things can develop on top.
>
>
> Reviewed-by: Kieran Bingham <kieran.bingham at ideasonboard.com>
>

Thanks
  j

> > +                */
> > +               Rectangle isiScale = {};
> > +               isiScale.x = 0;
> > +               isiScale.y = 0;
> > +               isiScale.width = config.size.width;
> > +               isiScale.height = config.size.height;
> > +
> > +               ret = pipe->isi->setSelection(0, V4L2_SEL_TGT_COMPOSE, &isiScale);
> > +               if (ret)
> > +                       return ret;
> > +
> > +               /*
> > +                * Set the format on ISI source pad: only the media bus code
> > +                * is relevant as it configures format conversion, while the
> > +                * size is taken from the sink's COMPOSE (or source's CROP,
> > +                * if any) rectangles.
> > +                */
> > +               auto fmtMap = ISICameraConfiguration::formatsMap.find(config.pixelFormat);
> > +               ISICameraConfiguration::PipeFormat pipeFormat = fmtMap->second;
> > +
> > +               V4L2SubdeviceFormat isiFormat{};
> > +               isiFormat.mbus_code = pipeFormat.isiCode;
> > +               isiFormat.size = config.size;
> > +
> > +               ret = pipe->isi->setFormat(1, &isiFormat);
> > +               if (ret)
> > +                       return ret;
> > +
> > +               V4L2DeviceFormat captureFmt{};
> > +               captureFmt.fourcc = pipeFormat.fourcc;
> > +               captureFmt.size = config.size;
> > +
> > +               ret = pipe->capture->setFormat(&captureFmt);
> > +               if (ret)
> > +                       return ret;
> > +
> > +               /* Store the list of enabled streams for later use. */
> > +               data->enabledStreams_.push_back(config.stream());
> > +       }
> > +
> > +       return 0;
> > +}
> > +
> > +int PipelineHandlerISI::exportFrameBuffers(Camera *camera, Stream *stream,
> > +                                          std::vector<std::unique_ptr<FrameBuffer>> *buffers)
> > +{
> > +       unsigned int count = stream->configuration().bufferCount;
> > +       Pipe *pipe = pipeFromStream(camera, stream);
> > +
> > +       return pipe->capture->exportBuffers(count, buffers);
> > +}
> > +
> > +int PipelineHandlerISI::start(Camera *camera,
> > +                             [[maybe_unused]] const ControlList *controls)
> > +{
> > +       ISICameraData *data = cameraData(camera);
> > +
> > +       for (const auto &stream : data->enabledStreams_) {
> > +               Pipe *pipe = pipeFromStream(camera, stream);
> > +               V4L2VideoDevice *capture = pipe->capture.get();
> > +               const StreamConfiguration &config = stream->configuration();
> > +
> > +               int ret = capture->importBuffers(config.bufferCount);
> > +               if (ret)
> > +                       return ret;
> > +
> > +               ret = capture->streamOn();
> > +               if (ret)
> > +                       return ret;
> > +       }
> > +
> > +       return 0;
> > +}
> > +
> > +void PipelineHandlerISI::stopDevice(Camera *camera)
> > +{
> > +       ISICameraData *data = cameraData(camera);
> > +
> > +       for (const auto &stream : data->enabledStreams_) {
> > +               Pipe *pipe = pipeFromStream(camera, stream);
> > +               V4L2VideoDevice *capture = pipe->capture.get();
> > +
> > +               capture->streamOff();
> > +               capture->releaseBuffers();
> > +       }
> > +}
> > +
> > +int PipelineHandlerISI::queueRequestDevice(Camera *camera, Request *request)
> > +{
> > +       for (auto &[stream, buffer] : request->buffers()) {
> > +               Pipe *pipe = pipeFromStream(camera, stream);
> > +
> > +               int ret = pipe->capture->queueBuffer(buffer);
> > +               if (ret)
> > +                       return ret;
> > +       }
> > +
> > +       return 0;
> > +}
> > +
> > +bool PipelineHandlerISI::match(DeviceEnumerator *enumerator)
> > +{
> > +       DeviceMatch dm("mxc-isi");
> > +       dm.add("crossbar");
> > +       dm.add("mxc_isi.0");
> > +       dm.add("mxc_isi.0.capture");
> > +       dm.add("mxc_isi.1");
> > +       dm.add("mxc_isi.1.capture");
> > +
> > +       isiDev_ = acquireMediaDevice(enumerator, dm);
> > +       if (!isiDev_)
> > +               return false;
> > +
> > +       /*
> > +        * Acquire the subdevs and video nodes for the crossbar switch and the
> > +        * processing pipelines.
> > +        */
> > +       crossbar_ = V4L2Subdevice::fromEntityName(isiDev_, "crossbar");
> > +       if (!crossbar_)
> > +               return false;
> > +
> > +       int ret = crossbar_->open();
> > +       if (ret)
> > +               return false;
> > +
> > +       for (unsigned int i = 0;; ++i) {
> > +               std::string entityName = "mxc_isi." + std::to_string(i);
> > +               std::unique_ptr<V4L2Subdevice> isi =
> > +                       V4L2Subdevice::fromEntityName(isiDev_, entityName);
> > +               if (!isi)
> > +                       break;
> > +
> > +               ret = isi->open();
> > +               if (ret)
> > +                       return ret;
> > +
> > +               entityName += ".capture";
> > +               std::unique_ptr<V4L2VideoDevice> capture =
> > +                       V4L2VideoDevice::fromEntityName(isiDev_, entityName);
> > +               if (!capture)
> > +                       break;
> > +
> > +               capture->bufferReady.connect(this, &PipelineHandlerISI::bufferReady);
> > +
> > +               ret = capture->open();
> > +               if (ret)
> > +                       return ret;
> > +
> > +               pipes_.push_back({ std::move(isi), std::move(capture) });
> > +       }
> > +
> > +       if (pipes_.empty())
> > +               return false;
> > +
> > +       /*
> > +        * Loop over all the crossbar switch sink pads to find connected CSI-2
> > +        * receivers and camera sensors.
> > +        */
> > +       unsigned int numCameras = 0;
> > +       for (MediaPad *pad : crossbar_->entity()->pads()) {
> > +               if (!(pad->flags() & MEDIA_PAD_FL_SINK) || pad->links().empty())
> > +                       continue;
> > +
> > +               MediaEntity *csi = pad->links()[0]->source()->entity();
> > +               if (csi->pads().size() != 2)
> > +                       continue;
> > +
> > +               pad = csi->pads()[0];
> > +               if (!(pad->flags() & MEDIA_PAD_FL_SINK) || pad->links().empty())
> > +                       continue;
> > +
> > +               MediaEntity *sensor = pad->links()[0]->source()->entity();
> > +               if (sensor->function() != MEDIA_ENT_F_CAM_SENSOR)
> > +                       continue;
> > +
> > +               /* Create the camera data. */
> > +               std::unique_ptr<ISICameraData> data =
> > +                       std::make_unique<ISICameraData>(this);
> > +
> > +               data->sensor = std::make_unique<CameraSensor>(sensor);
> > +               data->csis = std::make_unique<V4L2Subdevice>(csi);
> > +               data->id_ = numCameras;
> > +
> > +               ret = data->init();
> > +               if (ret)
> > +                       return false;
> > +
> > +               /* Register the camera. */
> > +               const std::string &id = data->sensor->id();
> > +               std::set<Stream *> streams = {
> > +                       &data->stream1_,
> > +                       &data->stream2_
> > +               };
> > +               std::shared_ptr<Camera> camera =
> > +                       Camera::create(std::move(data), id, streams);
> > +
> > +               registerCamera(std::move(camera));
> > +               numCameras++;
> > +       }
> > +
> > +       return numCameras > 0;
> > +}
> > +
> > +void PipelineHandlerISI::bufferReady(FrameBuffer *buffer)
> > +{
> > +       Request *request = buffer->request();
> > +
> > +       completeBuffer(request, buffer);
> > +       if (request->hasPendingBuffers())
> > +               return;
> > +
> > +       completeRequest(request);
> > +}
> > +
> > +REGISTER_PIPELINE_HANDLER(PipelineHandlerISI)
> > +
> > +} /* namespace libcamera */
> > diff --git a/src/libcamera/pipeline/imx8-isi/meson.build b/src/libcamera/pipeline/imx8-isi/meson.build
> > new file mode 100644
> > index 000000000000..ffd0ce54ce92
> > --- /dev/null
> > +++ b/src/libcamera/pipeline/imx8-isi/meson.build
> > @@ -0,0 +1,5 @@
> > +# SPDX-License-Identifier: CC0-1.0
> > +
> > +libcamera_sources += files([
> > +    'imx8-isi.cpp'
> > +])
> > --
> > 2.37.3
> >


More information about the libcamera-devel mailing list