[libcamera-devel] [PATCH 3/4] pipeline: raspberrypi: Only enabled embedded stream when available
Naushir Patuck
naush at raspberrypi.com
Tue Feb 16 11:31:39 CET 2021
The pipeline handler would enable and use the Unicam embedded data stream
even if the sensor did not support it. This was to allow a means to
pass exposure and gain values for the frame to the IPA in a synchronised
way.
The recent changes to get the pipeline handler to pass a ControlList
with exposure and gain values means this is no longer required. Disable
the use of the embedded data stream when a sensor does not support it.
This change also removes the mappedEmbeddedBuffers_ map as it is no
longer used.
Signed-off-by: Naushir Patuck <naush at raspberrypi.com>
---
.../pipeline/raspberrypi/raspberrypi.cpp | 109 +++++++++++-------
1 file changed, 68 insertions(+), 41 deletions(-)
diff --git a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
index 7e744ce13172..b6ee014b620e 100644
--- a/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
+++ b/src/libcamera/pipeline/raspberrypi/raspberrypi.cpp
@@ -168,12 +168,6 @@ public:
/* Stores the ids of the buffers mapped in the IPA. */
std::unordered_set<unsigned int> ipaBuffers_;
- /*
- * Map of (internal) mmaped embedded data buffers, to avoid having to
- * map/unmap on every frame.
- */
- std::map<unsigned int, MappedFrameBuffer> mappedEmbeddedBuffers_;
-
/* DMAHEAP allocation helper. */
RPi::DmaHeap dmaHeap_;
FileDescriptor lsTable_;
@@ -627,14 +621,7 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
if (isRaw(cfg.pixelFormat)) {
cfg.setStream(&data->unicam_[Unicam::Image]);
- /*
- * We must set both Unicam streams as external, even
- * though the application may only request RAW frames.
- * This is because we match timestamps on both streams
- * to synchronise buffers.
- */
data->unicam_[Unicam::Image].setExternal(true);
- data->unicam_[Unicam::Embedded].setExternal(true);
continue;
}
@@ -706,17 +693,6 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
return ret;
}
- /* Unicam embedded data output format. */
- format = {};
- format.fourcc = V4L2PixelFormat(V4L2_META_FMT_SENSOR_DATA);
- LOG(RPI, Debug) << "Setting embedded data format.";
- ret = data->unicam_[Unicam::Embedded].dev()->setFormat(&format);
- if (ret) {
- LOG(RPI, Error) << "Failed to set format on Unicam embedded: "
- << format.toString();
- return ret;
- }
-
/* Figure out the smallest selection the ISP will allow. */
Rectangle testCrop(0, 0, 1, 1);
data->isp_[Isp::Input].dev()->setSelection(V4L2_SEL_TGT_CROP, &testCrop);
@@ -733,6 +709,41 @@ int PipelineHandlerRPi::configure(Camera *camera, CameraConfiguration *config)
if (ret)
LOG(RPI, Error) << "Failed to configure the IPA: " << ret;
+ /*
+ * The IPA will set data->sensorMetadata_ to true if embedded data is
+ * supported on this sensor. If so, open the Unicam embedded data
+ * node and configure the output format.
+ */
+ if (data->sensorMetadata_) {
+ format = {};
+ format.fourcc = V4L2PixelFormat(V4L2_META_FMT_SENSOR_DATA);
+ LOG(RPI, Debug) << "Setting embedded data format.";
+ data->unicam_[Unicam::Embedded].dev()->open();
+ ret = data->unicam_[Unicam::Embedded].dev()->setFormat(&format);
+ if (ret) {
+ LOG(RPI, Error) << "Failed to set format on Unicam embedded: "
+ << format.toString();
+ return ret;
+ }
+
+ /*
+ * If a RAW/Bayer stream has been requested by the application,
+ * we must set both Unicam streams as external, even though the
+ * application may only request RAW frames. This is because we
+ * match timestamps on both streams to synchronise buffers.
+ */
+ if (rawStream)
+ data->unicam_[Unicam::Embedded].setExternal(true);
+ } else {
+ /*
+ * No embedded data present, so we do not want to iterate over
+ * the embedded data stream when starting and stopping.
+ */
+ data->streams_.erase(std::remove(data->streams_.begin(), data->streams_.end(),
+ &data->unicam_[Unicam::Embedded]),
+ data->streams_.end());
+ }
+
/*
* Update the ScalerCropMaximum to the correct value for this camera mode.
* For us, it's the same as the "analogue crop".
@@ -950,10 +961,16 @@ bool PipelineHandlerRPi::match(DeviceEnumerator *enumerator)
for (auto &stream : data->isp_)
data->streams_.push_back(&stream);
- /* Open all Unicam and ISP streams. */
+ /*
+ * Open all Unicam and ISP streams. The exception is the embedded data
+ * stream, which only gets opened if the IPA reports that the sensor
+ * supports embedded data. This happens in RPiCameraData::configureIPA().
+ */
for (auto const stream : data->streams_) {
- if (stream->dev()->open())
- return false;
+ if (stream != &data->unicam_[Unicam::Embedded]) {
+ if (stream->dev()->open())
+ return false;
+ }
}
/* Wire up all the buffer connections. */
@@ -1110,19 +1127,13 @@ int PipelineHandlerRPi::prepareBuffers(Camera *camera)
return ret;
}
- if (!data->sensorMetadata_) {
- for (auto const &it : data->unicam_[Unicam::Embedded].getBuffers()) {
- MappedFrameBuffer fb(it.second, PROT_READ | PROT_WRITE);
- data->mappedEmbeddedBuffers_.emplace(it.first, std::move(fb));
- }
- }
-
/*
* Pass the stats and embedded data buffers to the IPA. No other
* buffers need to be passed.
*/
mapBuffers(camera, data->isp_[Isp::Stats].getBuffers(), RPi::BufferMask::STATS);
- mapBuffers(camera, data->unicam_[Unicam::Embedded].getBuffers(), RPi::BufferMask::EMBEDDED_DATA);
+ if (data->sensorMetadata_)
+ mapBuffers(camera, data->unicam_[Unicam::Embedded].getBuffers(), RPi::BufferMask::EMBEDDED_DATA);
return 0;
}
@@ -1155,7 +1166,6 @@ void PipelineHandlerRPi::freeBuffers(Camera *camera)
std::vector<unsigned int> ipaBuffers(data->ipaBuffers_.begin(), data->ipaBuffers_.end());
data->ipa_->unmapBuffers(ipaBuffers);
data->ipaBuffers_.clear();
- data->mappedEmbeddedBuffers_.clear();
for (auto const stream : data->streams_)
stream->releaseBuffers();
@@ -1659,7 +1669,7 @@ void RPiCameraData::tryRunPipeline()
/* If any of our request or buffer queues are empty, we cannot proceed. */
if (state_ != State::Idle || requestQueue_.empty() ||
- bayerQueue_.empty() || embeddedQueue_.empty())
+ bayerQueue_.empty() || (embeddedQueue_.empty() && sensorMetadata_))
return;
if (!findMatchingBuffers(bayerFrame, embeddedBuffer))
@@ -1684,17 +1694,23 @@ void RPiCameraData::tryRunPipeline()
state_ = State::Busy;
unsigned int bayerId = unicam_[Unicam::Image].getBufferId(bayerFrame.buffer);
- unsigned int embeddedId = unicam_[Unicam::Embedded].getBufferId(embeddedBuffer);
LOG(RPI, Debug) << "Signalling RPi::IPA_EVENT_SIGNAL_ISP_PREPARE:"
- << " Bayer buffer id: " << bayerId
- << " Embedded buffer id: " << embeddedId;
+ << " Bayer buffer id: " << bayerId;
+ op = {};
op.operation = RPi::IPA_EVENT_SIGNAL_ISP_PREPARE;
op.data.push_back(RPi::BufferMask::BAYER_DATA | bayerId);
- op.data.push_back(RPi::BufferMask::EMBEDDED_DATA | embeddedId);
op.controls.emplace_back(std::move(bayerFrame.controls));
+ if (embeddedBuffer) {
+ unsigned int embeddedId = unicam_[Unicam::Embedded].getBufferId(embeddedBuffer);
+
+ op.data.push_back(RPi::BufferMask::EMBEDDED_DATA | embeddedId);
+ LOG(RPI, Debug) << "Signalling RPi::IPA_EVENT_SIGNAL_ISP_PREPARE:"
+ << " Embedded buffer id: " << embeddedId;
+ }
+
ipa_->processEvent(op);
}
@@ -1736,6 +1752,17 @@ bool RPiCameraData::findMatchingBuffers(BayerFrame &bayerFrame, FrameBuffer *&em
LOG(RPI, Debug) << "Could not find matching embedded buffer";
+ if (!sensorMetadata_) {
+ /*
+ * If there is no sensor metadata, simply return the
+ * first bayer frame in the queue.
+ */
+ LOG(RPI, Debug) << "Returning bayer frame without a match";
+ bayerQueue_.pop();
+ embeddedBuffer = nullptr;
+ return true;
+ }
+
if (!embeddedQueue_.empty()) {
/*
* Not found a matching embedded buffer for the bayer buffer in
--
2.25.1
More information about the libcamera-devel
mailing list