[libcamera-devel] [PATCH v10 8/8] libcamera: pipeline: raspberrypi: Support color spaces

David Plowman david.plowman at raspberrypi.com
Thu Dec 9 11:12:45 CET 2021


The Raspberry Pi pipeline handler now sets color spaces correctly.

In generateConfiguration() it sets them to reasonable default values
based on the stream role.

validate() now calls validateColorSpaces() to ensure that the
requested color spaces are sensible, before proceeding to check what
the hardware can deliver.

Signed-off-by: David Plowman <david.plowman at raspberrypi.com>
---
 .../pipeline/raspberrypi/raspberrypi.cpp      | 40 +++++++++++++++++++
 1 file changed, 40 insertions(+)

diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
index 22c8ee68..368953e1 100644
--- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
@@ -96,6 +96,7 @@ V4L2DeviceFormat toV4L2DeviceFormat(const V4L2SubdeviceFormat &format,
 
 	deviceFormat.fourcc = V4L2PixelFormat::fromPixelFormat(pix);
 	deviceFormat.size = format.size;
+	deviceFormat.colorSpace = format.colorSpace;
 	return deviceFormat;
 }
 
@@ -132,6 +133,7 @@ V4L2SubdeviceFormat findBestFormat(const SensorFormats &formatsMap, const Size &
 {
 	double bestScore = std::numeric_limits<double>::max(), score;
 	V4L2SubdeviceFormat bestFormat;
+	bestFormat.colorSpace = ColorSpace::Raw;
 
 	constexpr float penaltyAr = 1500.0;
 	constexpr float penaltyBitDepth = 500.0;
@@ -329,6 +331,8 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
 	if (config_.empty())
 		return Invalid;
 
+	status = validateColorSpaces(ColorSpaceFlag::StreamsShareColorSpace);
+
 	/*
 	 * What if the platform has a non-90 degree rotation? We can't even
 	 * "adjust" the configuration and carry on. Alternatively, raising an
@@ -496,11 +500,25 @@ CameraConfiguration::Status RPiCameraConfiguration::validate()
 		V4L2DeviceFormat format;
 		format.fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat);
 		format.size = cfg.size;
+		format.colorSpace = cfg.colorSpace;
+
+		LOG(RPI, Debug)
+			<< "Try color space " << ColorSpace::toString(cfg.colorSpace);
 
 		int ret = dev->tryFormat(&format);
 		if (ret)
 			return Invalid;
 
+		if (!format.colorSpace || cfg.colorSpace != format.colorSpace) {
+			status = Adjusted;
+			LOG(RPI, Warning)
+				<< "Color space changed from "
+				<< ColorSpace::toString(cfg.colorSpace) << " to "
+				<< ColorSpace::toString(format.colorSpace);
+		}
+
+		cfg.colorSpace = format.colorSpace;
+
 		cfg.stride = format.planes[0].bpl;
 		cfg.frameSize = format.planes[0].size;
 
@@ -524,6 +542,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 	PixelFormat pixelFormat;
 	V4L2VideoDevice::Formats fmts;
 	Size size;
+	std::optional<ColorSpace> colorSpace;
 
 	if (roles.empty())
 		return config;
@@ -538,6 +557,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 			pixelFormat = mbusCodeToPixelFormat(sensorFormat.mbus_code,
 							    BayerFormat::Packing::CSI2);
 			ASSERT(pixelFormat.isValid());
+			colorSpace = ColorSpace::Raw;
 			bufferCount = 2;
 			rawCount++;
 			break;
@@ -545,6 +565,12 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 		case StreamRole::StillCapture:
 			fmts = data->isp_[Isp::Output0].dev()->formats();
 			pixelFormat = formats::NV12;
+			/*
+			 * Still image codecs usually expect the JPEG color space.
+			 * Even RGB codecs will be fine as the RGB we get with the
+			 * JPEG color space is the same as sRGB.
+			 */
+			colorSpace = ColorSpace::Jpeg;
 			/* Return the largest sensor resolution. */
 			size = data->sensor_->resolution();
 			bufferCount = 1;
@@ -562,6 +588,11 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 			 */
 			fmts = data->isp_[Isp::Output0].dev()->formats();
 			pixelFormat = formats::YUV420;
+			/*
+			 * Choose a color space appropriate for video recording.
+			 * Rec.709 will be a good default for HD resolutions.
+			 */
+			colorSpace = ColorSpace::Rec709;
 			size = { 1920, 1080 };
 			bufferCount = 4;
 			outCount++;
@@ -570,6 +601,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 		case StreamRole::Viewfinder:
 			fmts = data->isp_[Isp::Output0].dev()->formats();
 			pixelFormat = formats::ARGB8888;
+			colorSpace = ColorSpace::Jpeg;
 			size = { 800, 600 };
 			bufferCount = 4;
 			outCount++;
@@ -612,6 +644,7 @@ CameraConfiguration *PipelineHandlerRPi::generateConfiguration(Camera *camera,
 		StreamConfiguration cfg(formats);
 		cfg.size = size;
 		cfg.pixelFormat = pixelFormat;
+		cfg.colorSpace = colorSpace;
 		cfg.bufferCount = bufferCount;
 		config->addConfiguration(cfg);
 	}
@@ -719,6 +752,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 		V4L2PixelFormat fourcc = V4L2PixelFormat::fromPixelFormat(cfg.pixelFormat);
 		format.size = cfg.size;
 		format.fourcc = fourcc;
+		format.colorSpace = cfg.colorSpace;
 
 		LOG(RPI, Debug) << "Setting " << stream->name() << " to "
 				<< format.toString();
@@ -734,6 +768,10 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 			return -EINVAL;
 		}
 
+		LOG(RPI, Debug)
+			<< "Stream " << stream->name() << " has color space "
+			<< ColorSpace::toString(cfg.colorSpace);
+
 		cfg.setStream(stream);
 		stream->setExternal(true);
 
@@ -758,6 +796,8 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
 		format = {};
 		format.size = maxSize;
 		format.fourcc = V4L2PixelFormat::fromPixelFormat(formats::YUV420);
+		/* No one asked for output, so the color space doesn't matter. */
+		format.colorSpace = ColorSpace::Jpeg;
 		ret = data->isp_[Isp::Output0].dev()->setFormat(&format);
 		if (ret) {
 			LOG(RPI, Error)
-- 
2.30.2



More information about the libcamera-devel mailing list