[libcamera-devel] [RFC PATCH 3/3] libcamera: pipeline: raspberrypi: Support colour spaces

Naushir Patuck naush at raspberrypi.com
Mon Jun 14 13:04:32 CEST 2021


Hi David,

Thank you for your patch.

On Tue, 8 Jun 2021 at 15:44, David Plowman <david.plowman at raspberrypi.com>
wrote:

> The Raspberry Pi pipeline handler now sets colour spaces correctly.
>
> In generateConfiguration() is sets them to reasonable default values
>

Typo:
s\is\it\


> based on the stream role. Note how video recording streams use the
> "VIDEO" YCbCr encoding, which will later be fixed up according to the
> requested resolution.
>
> validate() and configure() check the colour space is valid, and also
> force all (non-raw) output streams to share the same colour space
> (which is a hardware restriction). Finally, the "VIDEO" YCbCr encoding
> is corrected by configure() to be one of REC601, REC709 or REC2020.
> ---
>  .../pipeline/raspberrypi/raspberrypi.cpp      | 84 +++++++++++++++++++
>  1 file changed, 84 insertions(+)
>
> diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> index a65b4568..acb4a1c3 100644
> --- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> +++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
> @@ -279,6 +279,24 @@ RPiCameraConfiguration::RPiCameraConfiguration(const
> RPiCameraData *data)
>  {
>  }
>
> +static bool validateColorSpace(ColorSpace &colorSpace)
> +{
> +       const std::vector<ColorSpace> validColorSpaces = {
> +               ColorSpace::JFIF,
> +               ColorSpace::SMPTE170M,
> +               ColorSpace::REC709,
> +               ColorSpace::REC2020,
> +               ColorSpace::VIDEO,
> +       };
> +       auto it = std::find_if(validColorSpaces.begin(),
> validColorSpaces.end(),
> +                              [&colorSpace](const ColorSpace &item) {
> return colorSpace == item; });
>

You could use std::find() for fewer characters to type :-)


> +       if (it != validColorSpaces.end())
> +               return true;
> +
> +       colorSpace = ColorSpace::JFIF;
> +       return false;
> +}
> +
>  CameraConfiguration::Status RPiCameraConfiguration::validate()
>  {
>         Status status = Valid;
> @@ -344,6 +362,7 @@ CameraConfiguration::Status
> RPiCameraConfiguration::validate()
>         unsigned int rawCount = 0, outCount = 0, count = 0, maxIndex = 0;
>         std::pair<int, Size> outSize[2];
>         Size maxSize;
> +       ColorSpace colorSpace; /* colour space for all non-raw output
> streams */
>         for (StreamConfiguration &cfg : config_) {
>                 if (isRaw(cfg.pixelFormat)) {
>                         /*
> @@ -352,6 +371,7 @@ CameraConfiguration::Status
> RPiCameraConfiguration::validate()
>                          */
>                         V4L2VideoDevice::Formats fmts =
> data_->unicam_[Unicam::Image].dev()->formats();
>                         V4L2DeviceFormat sensorFormat = findBestMode(fmts,
> cfg.size);
> +                       sensorFormat.colorSpace = ColorSpace::RAW;
>                         int ret =
> data_->unicam_[Unicam::Image].dev()->tryFormat(&sensorFormat);
>                         if (ret)
>                                 return Invalid;
> @@ -390,6 +410,7 @@ CameraConfiguration::Status
> RPiCameraConfiguration::validate()
>                         if (maxSize < cfg.size) {
>                                 maxSize = cfg.size;
>                                 maxIndex = outCount;
> +                               colorSpace = cfg.colorSpace;
>                         }
>                         outCount++;
>                 }
> @@ -403,6 +424,10 @@ CameraConfiguration::Status
> RPiCameraConfiguration::validate()
>                 }
>         }
>
> +       /* Ensure the output colour space (if present) is one we handle. */
> +       if (outCount)
> +               validateColorSpace(colorSpace);
> +
>         /*
>          * Now do any fixups needed. For the two ISP outputs, one stream
> must be
>          * equal or smaller than the other in all dimensions.
> @@ -444,10 +469,26 @@ CameraConfiguration::Status
> RPiCameraConfiguration::validate()
>                         status = Adjusted;
>                 }
>
> +               /* Output streams must share the same colour space. */
> +               if (cfg.colorSpace != colorSpace) {
> +                       LOG(RPI, Warning) << "Output stream " << (i ==
> maxIndex ? 0 : 1)
> +                                         << " colour space changed";
> +                       cfg.colorSpace = colorSpace;
> +                       status = Adjusted;
> +               }
> +
>                 V4L2DeviceFormat format;
>                 format.fourcc = dev->toV4L2PixelFormat(cfg.pixelFormat);
>                 format.size = cfg.size;
>
> +               /*
> +                * Request a sensible colour space. Note that "VIDEO"
> isn't a real
> +                * encoding, so substitute something else sensible.
> +                */
> +               format.colorSpace = colorSpace;
> +               if (format.colorSpace.encoding ==
> ColorSpace::Encoding::VIDEO)
> +                       format.colorSpace.encoding =
> ColorSpace::Encoding::REC601;
> +
>                 int ret = dev->tryFormat(&format);
>                 if (ret)
>                         return Invalid;
> @@ -473,6 +514,7 @@ CameraConfiguration
> *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>         V4L2DeviceFormat sensorFormat;
>         unsigned int bufferCount;
>         PixelFormat pixelFormat;
> +       ColorSpace colorSpace;
>         V4L2VideoDevice::Formats fmts;
>         Size size;
>
> @@ -488,6 +530,7 @@ CameraConfiguration
> *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>                         fmts =
> data->unicam_[Unicam::Image].dev()->formats();
>                         sensorFormat = findBestMode(fmts, size);
>                         pixelFormat = sensorFormat.fourcc.toPixelFormat();
> +                       colorSpace = ColorSpace::RAW;
>                         ASSERT(pixelFormat.isValid());
>                         bufferCount = 2;
>                         rawCount++;
> @@ -499,6 +542,7 @@ CameraConfiguration
> *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>                         /* Return the largest sensor resolution. */
>                         size = data->sensor_->resolution();
>                         bufferCount = 1;
> +                       colorSpace = ColorSpace::JFIF;
>                         outCount++;
>                         break;
>
> @@ -515,6 +559,7 @@ CameraConfiguration
> *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>                         pixelFormat = formats::YUV420;
>                         size = { 1920, 1080 };
>                         bufferCount = 4;
> +                       colorSpace = ColorSpace::VIDEO;
>                         outCount++;
>                         break;
>
> @@ -523,6 +568,7 @@ CameraConfiguration
> *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>                         pixelFormat = formats::ARGB8888;
>                         size = { 800, 600 };
>                         bufferCount = 4;
> +                       colorSpace = ColorSpace::JFIF;
>                         outCount++;
>                         break;
>
> @@ -552,6 +598,7 @@ CameraConfiguration
> *PipelineHandlerRPi::generateConfiguration(Camera *camera,
>                 StreamConfiguration cfg(formats);
>                 cfg.size = size;
>                 cfg.pixelFormat = pixelFormat;
> +               cfg.colorSpace = colorSpace;
>                 cfg.bufferCount = bufferCount;
>                 config->addConfiguration(cfg);
>         }
> @@ -573,6 +620,7 @@ int PipelineHandlerRPi::configure(Camera *camera,
> CameraConfiguration *config)
>         Size maxSize, sensorSize;
>         unsigned int maxIndex = 0;
>         bool rawStream = false;
> +       ColorSpace colorSpace; /* colour space for all non-raw output
> streams */
>
>         /*
>          * Look for the RAW stream (if given) size as well as the largest
> @@ -591,14 +639,40 @@ int PipelineHandlerRPi::configure(Camera *camera,
> CameraConfiguration *config)
>                 } else {
>                         if (cfg.size > maxSize) {
>                                 maxSize = config->at(i).size;
> +                               colorSpace = config->at(i).colorSpace;
>                                 maxIndex = i;
>                         }
>                 }
>         }
>
> +       if (maxSize.isNull()) {
> +               /*
> +                * No non-raw streams, so some will get made below.
> Doesn't matter
> +                * what colour space we assign to them.
> +                */
> +               colorSpace = ColorSpace::JFIF;
> +       } else {
> +               /* Make sure we can handle this colour space. */
> +               validateColorSpace(colorSpace);
> +
> +               /*
> +                * The "VIDEO" colour encoding means that we choose one of
> REC601,
> +                * REC709 or REC2020 automatically according to the
> resolution.
> +                */
> +               if (colorSpace.encoding == ColorSpace::Encoding::VIDEO) {
> +                       if (maxSize.width >= 3840)
> +                               colorSpace.encoding =
> ColorSpace::Encoding::REC2020;
> +                       else if (maxSize.width >= 1280)
> +                               colorSpace.encoding =
> ColorSpace::Encoding::REC709;
> +                       else
> +                               colorSpace.encoding =
> ColorSpace::Encoding::REC601;
> +               }
> +       }
> +
>         /* First calculate the best sensor mode we can use based on the
> user request. */
>         V4L2VideoDevice::Formats fmts =
> data->unicam_[Unicam::Image].dev()->formats();
>         V4L2DeviceFormat sensorFormat = findBestMode(fmts, rawStream ?
> sensorSize : maxSize);
> +       sensorFormat.colorSpace = ColorSpace::RAW;
>
>         /*
>          * Unicam image output format. The ISP input format gets set at
> start,
> @@ -636,11 +710,18 @@ int PipelineHandlerRPi::configure(Camera *camera,
> CameraConfiguration *config)
>                 StreamConfiguration &cfg = config->at(i);
>
>                 if (isRaw(cfg.pixelFormat)) {
> +                       cfg.colorSpace = ColorSpace::RAW;
>                         cfg.setStream(&data->unicam_[Unicam::Image]);
>                         data->unicam_[Unicam::Image].setExternal(true);
>                         continue;
>                 }
>
> +               /* All other streams share the same colour space. */
> +               if (cfg.colorSpace != colorSpace) {
> +                       LOG(RPI, Warning) << "Stream " << i << " colour
> space changed";
>

Perhaps this should be a Info log level?

All things here are minors so

Reviewed-by: Naushir Patuck <naush at raspberrypi.com>


> +                       cfg.colorSpace = colorSpace;
> +               }
> +
>                 /* The largest resolution gets routed to the ISP Output 0
> node. */
>                 RPi::Stream *stream = i == maxIndex ?
> &data->isp_[Isp::Output0]
>                                                     :
> &data->isp_[Isp::Output1];
> @@ -648,6 +729,7 @@ int PipelineHandlerRPi::configure(Camera *camera,
> CameraConfiguration *config)
>                 V4L2PixelFormat fourcc =
> stream->dev()->toV4L2PixelFormat(cfg.pixelFormat);
>                 format.size = cfg.size;
>                 format.fourcc = fourcc;
> +               format.colorSpace = cfg.colorSpace;
>
>                 LOG(RPI, Debug) << "Setting " << stream->name() << " to "
>                                 << format.toString();
> @@ -687,6 +769,7 @@ int PipelineHandlerRPi::configure(Camera *camera,
> CameraConfiguration *config)
>                 format = {};
>                 format.size = maxSize;
>                 format.fourcc =
> V4L2PixelFormat::fromPixelFormat(formats::YUV420, false);
> +               format.colorSpace = colorSpace;
>                 ret = data->isp_[Isp::Output0].dev()->setFormat(&format);
>                 if (ret) {
>                         LOG(RPI, Error)
> @@ -716,6 +799,7 @@ int PipelineHandlerRPi::configure(Camera *camera,
> CameraConfiguration *config)
>                 const Size limit =
> maxDimensions.boundedToAspectRatio(format.size);
>
>                 output1Format.size = (format.size /
> 2).boundedTo(limit).alignedDownTo(2, 2);
> +               output1Format.colorSpace = colorSpace;
>
>                 LOG(RPI, Debug) << "Setting ISP Output1 (internal) to "
>                                 << output1Format.toString();
> --
> 2.20.1
>
>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.libcamera.org/pipermail/libcamera-devel/attachments/20210614/57d07093/attachment-0001.htm>


More information about the libcamera-devel mailing list