[PATCH v7 4/4] libcamera: rkisp1: Plumb the dw100 dewarper as V4L2M2M converter
Laurent Pinchart
laurent.pinchart at ideasonboard.com
Wed Sep 25 20:46:32 CEST 2024
On Wed, Sep 25, 2024 at 01:29:42PM +0530, Umang Jain wrote:
> On 25/09/24 1:20 pm, Stefan Klug wrote:
> > On Fri, Sep 06, 2024 at 12:04:44PM +0530, Umang Jain wrote:
> >> Plumb the dw100 dewarper as a V4L2M2M converter in the rkisp1 pipeline
> >> handler. If the dewarper is found, it is instantiated and buffers are
> >> exported from it, instead of RkISP1Path. Internal buffers are allocated
> >> for the RkISP1Path in case where dewarper is going to be used.
> >>
> >> The RKISP1 pipeline handler now supports scaler crop control through
> >> the converter. Register the ScalerCrop control for the cameras created
> >> in the RKISP1 pipeline handler.
> >>
> >> Signed-off-by: Umang Jain <umang.jain at ideasonboard.com>
> >> ---
> >> src/libcamera/pipeline/rkisp1/rkisp1.cpp | 183 ++++++++++++++++++++++-
> >> 1 file changed, 176 insertions(+), 7 deletions(-)
> >>
> >> diff --git a/src/libcamera/pipeline/rkisp1/rkisp1.cpp b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> >> index 651258e3..0a794d63 100644
> >> --- a/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> >> +++ b/src/libcamera/pipeline/rkisp1/rkisp1.cpp
> >> @@ -6,10 +6,12 @@
> >> */
> >>
> >> #include <algorithm>
> >> +#include <map>
> >> #include <memory>
> >> #include <numeric>
> >> #include <optional>
> >> #include <queue>
> >> +#include <vector>
> >>
> >> #include <linux/media-bus-format.h>
> >> #include <linux/rkisp1-config.h>
> >> @@ -32,6 +34,7 @@
> >>
> >> #include "libcamera/internal/camera.h"
> >> #include "libcamera/internal/camera_sensor.h"
> >> +#include "libcamera/internal/converter/converter_v4l2_m2m.h"
> >> #include "libcamera/internal/delayed_controls.h"
> >> #include "libcamera/internal/device_enumerator.h"
> >> #include "libcamera/internal/framebuffer.h"
> >> @@ -180,6 +183,7 @@ private:
> >> void bufferReady(FrameBuffer *buffer);
> >> void paramReady(FrameBuffer *buffer);
> >> void statReady(FrameBuffer *buffer);
> >> + void dewarpBufferReady(FrameBuffer *buffer);
> >> void frameStart(uint32_t sequence);
> >>
> >> int allocateBuffers(Camera *camera);
> >> @@ -199,6 +203,15 @@ private:
> >> RkISP1MainPath mainPath_;
> >> RkISP1SelfPath selfPath_;
> >>
> >> + std::unique_ptr<V4L2M2MConverter> dewarper_;
> >> + bool useDewarper_;
> >> +
> >> + std::optional<Rectangle> activeCrop_;
> >> +
> >> + /* Internal buffers used when dewarper is being used */
> >> + std::vector<std::unique_ptr<FrameBuffer>> mainPathBuffers_;
> >> + std::queue<FrameBuffer *> availableMainPathBuffers_;
> >> +
> >> std::vector<std::unique_ptr<FrameBuffer>> paramBuffers_;
> >> std::vector<std::unique_ptr<FrameBuffer>> statBuffers_;
> >> std::queue<FrameBuffer *> availableParamBuffers_;
> >> @@ -221,6 +234,8 @@ RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
> >>
> >> FrameBuffer *paramBuffer = nullptr;
> >> FrameBuffer *statBuffer = nullptr;
> >> + FrameBuffer *mainPathBuffer = nullptr;
> >> + FrameBuffer *selfPathBuffer = nullptr;
> >>
> >> if (!isRaw) {
> >> if (pipe_->availableParamBuffers_.empty()) {
> >> @@ -238,10 +253,16 @@ RkISP1FrameInfo *RkISP1Frames::create(const RkISP1CameraData *data, Request *req
> >>
> >> statBuffer = pipe_->availableStatBuffers_.front();
> >> pipe_->availableStatBuffers_.pop();
> >> +
> >> + if (pipe_->useDewarper_) {
> >> + mainPathBuffer = pipe_->availableMainPathBuffers_.front();
> >> + pipe_->availableMainPathBuffers_.pop();
> >> + }
> >> }
> >>
> >> - FrameBuffer *mainPathBuffer = request->findBuffer(&data->mainPathStream_);
> >> - FrameBuffer *selfPathBuffer = request->findBuffer(&data->selfPathStream_);
> >> + if (!mainPathBuffer)
> >> + mainPathBuffer = request->findBuffer(&data->mainPathStream_);
> >> + selfPathBuffer = request->findBuffer(&data->selfPathStream_);
> >>
> >> RkISP1FrameInfo *info = new RkISP1FrameInfo;
> >>
> >> @@ -267,6 +288,7 @@ int RkISP1Frames::destroy(unsigned int frame)
> >>
> >> pipe_->availableParamBuffers_.push(info->paramBuffer);
> >> pipe_->availableStatBuffers_.push(info->statBuffer);
> >> + pipe_->availableMainPathBuffers_.push(info->mainPathBuffer);
> >>
> >> frameInfo_.erase(info->frame);
> >>
> >> @@ -282,6 +304,7 @@ void RkISP1Frames::clear()
> >>
> >> pipe_->availableParamBuffers_.push(info->paramBuffer);
> >> pipe_->availableStatBuffers_.push(info->statBuffer);
> >> + pipe_->availableMainPathBuffers_.push(info->mainPathBuffer);
> >>
> >> delete info;
> >> }
> >> @@ -600,7 +623,7 @@ CameraConfiguration::Status RkISP1CameraConfiguration::validate()
> >> */
> >>
> >> PipelineHandlerRkISP1::PipelineHandlerRkISP1(CameraManager *manager)
> >> - : PipelineHandler(manager), hasSelfPath_(true)
> >> + : PipelineHandler(manager), hasSelfPath_(true), useDewarper_(false)
> >> {
> >> }
> >>
> >> @@ -778,12 +801,19 @@ int PipelineHandlerRkISP1::configure(Camera *camera, CameraConfiguration *c)
> >> << " crop " << rect;
> >>
> >> std::map<unsigned int, IPAStream> streamConfig;
> >> + std::vector<std::reference_wrapper<StreamConfiguration>> outputCfgs;
> >>
> >> for (const StreamConfiguration &cfg : *config) {
> >> if (cfg.stream() == &data->mainPathStream_) {
> >> ret = mainPath_.configure(cfg, format);
> >> streamConfig[0] = IPAStream(cfg.pixelFormat,
> >> cfg.size);
> >> + /* Configure dewarp */
> >> + if (dewarper_ && !isRaw_) {
> >> + outputCfgs.push_back(const_cast<StreamConfiguration &>(cfg));
> >> + ret = dewarper_->configure(cfg, outputCfgs);
> >> + useDewarper_ = ret ? false : true;
> >> + }
> >> } else if (hasSelfPath_) {
> >> ret = selfPath_.configure(cfg, format);
> >> streamConfig[1] = IPAStream(cfg.pixelFormat,
> >> @@ -833,10 +863,19 @@ int PipelineHandlerRkISP1::exportFrameBuffers([[maybe_unused]] Camera *camera, S
> >> RkISP1CameraData *data = cameraData(camera);
> >> unsigned int count = stream->configuration().bufferCount;
> >>
> >> - if (stream == &data->mainPathStream_)
> >> - return mainPath_.exportBuffers(count, buffers);
> >> - else if (hasSelfPath_ && stream == &data->selfPathStream_)
> >> + if (stream == &data->mainPathStream_) {
> >> + /*
> >> + * Currently, i.MX8MP is the only platform with DW100 dewarper.
> >> + * It has mainpath and no self path. Hence, export buffers from
> >> + * dewarper just for the main path stream, for now.
> >> + */
> >> + if (useDewarper_)
> >> + return dewarper_->exportBuffers(&data->mainPathStream_, count, buffers);
> >> + else
> >> + return mainPath_.exportBuffers(count, buffers);
> >> + } else if (hasSelfPath_ && stream == &data->selfPathStream_) {
> >> return selfPath_.exportBuffers(count, buffers);
> >> + }
> >>
> >> return -EINVAL;
> >> }
> >> @@ -860,6 +899,16 @@ int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
> >> ret = stat_->allocateBuffers(maxCount, &statBuffers_);
> >
> > Is there a way for the user to prevent usage of the dewarper? It
> > increases latency and doubles the required buffers. Now that I write
> > about it. To configure actual lens dewarping I envision something like a
> > "dewarp" algorithm in the tuning file. Maybe that is the best place for
> > that decision...
>
> I think somewhere in the previous interations, we decided that if the
> dewarper is present, it should be discovered and used. At that point, we
> failed to justify any use cases, where a user might have to disable the
> dewarper 'intentionally'.
>
> If there is use case, we can surely allow disabling the dewarper (and
> shouldn't be a tedious patch on top, I believe). But, The justification
> has to be there.
If the camera doesn't have a dewarping map, and if the user doesn't
request digital zoom, then I think the dewarper should be bypassed
instead of introducing delays.
I would still like to get to the bottom of using the resizer to
implement digital zoom.
> >> if (ret < 0)
> >> goto error;
> >> +
> >> + /* If the dewarper is being used, allocate internal buffers for ISP. */
> >> + if (useDewarper_) {
> >> + ret = mainPath_.exportBuffers(maxCount, &mainPathBuffers_);
> >> + if (ret < 0)
> >> + goto error;
> >> +
> >> + for (std::unique_ptr<FrameBuffer> &buffer : mainPathBuffers_)
> >> + availableMainPathBuffers_.push(buffer.get());
> >> + }
> >> }
> >>
> >> for (std::unique_ptr<FrameBuffer> &buffer : paramBuffers_) {
> >> @@ -883,6 +932,7 @@ int PipelineHandlerRkISP1::allocateBuffers(Camera *camera)
> >> error:
> >> paramBuffers_.clear();
> >> statBuffers_.clear();
> >> + mainPathBuffers_.clear();
> >>
> >> return ret;
> >> }
> >> @@ -897,8 +947,12 @@ int PipelineHandlerRkISP1::freeBuffers(Camera *camera)
> >> while (!availableParamBuffers_.empty())
> >> availableParamBuffers_.pop();
> >>
> >> + while (!availableMainPathBuffers_.empty())
> >> + availableMainPathBuffers_.pop();
> >> +
> >> paramBuffers_.clear();
> >> statBuffers_.clear();
> >> + mainPathBuffers_.clear();
> >>
> >> std::vector<unsigned int> ids;
> >> for (IPABuffer &ipabuf : data->ipaBuffers_)
> >> @@ -954,6 +1008,15 @@ int PipelineHandlerRkISP1::start(Camera *camera, [[maybe_unused]] const ControlL
> >> return ret;
> >> }
> >> actions += [&]() { stat_->streamOff(); };
> >> +
> >> + if (useDewarper_) {
> >> + ret = dewarper_->start();
> >> + if (ret) {
> >> + LOG(RkISP1, Error) << "Failed to start dewarper";
> >> + return ret;
> >> + }
> >> + }
> >> + actions += [&]() { dewarper_->stop(); };
> >> }
> >>
> >> if (data->mainPath_->isEnabled()) {
> >> @@ -1000,6 +1063,9 @@ void PipelineHandlerRkISP1::stopDevice(Camera *camera)
> >> if (ret)
> >> LOG(RkISP1, Warning)
> >> << "Failed to stop parameters for " << camera->id();
> >> +
> >> + if (useDewarper_)
> >> + dewarper_->stop();
> >> }
> >>
> >> ASSERT(data->queuedRequests_.empty());
> >> @@ -1110,6 +1176,16 @@ int PipelineHandlerRkISP1::updateControls(RkISP1CameraData *data)
> >> {
> >> ControlInfoMap::Map controls;
> >>
> >> + if (dewarper_) {
> >> + std::pair<Rectangle, Rectangle> cropLimits =
> >> + dewarper_->inputCropBounds(&data->mainPathStream_);
> >> +
> >> + controls[&controls::ScalerCrop] = ControlInfo(cropLimits.first,
> >> + cropLimits.second,
> >> + cropLimits.second);
> >> + activeCrop_ = cropLimits.second;
> >> + }
> >> +
> >
> > I'm a bit uncertain here. The current dewarper kernel driver produces
> > quite unexpected and difficult to explain results (modified crop
> > rectangle, changed pixel aspect ratio). So to me it feels like we
> > shouldn't expose that hardware control directly to the user, but do an
> > internal implementation (based on the same libcamera control). Could we
> > split just the logic that adds the control into a separate patch?
>
> I don't understand internal implementation based on same libcamera
> control? How would that look like?
>
> So, you mean to say, we don't feed any scaler crop rectangles from
> application, but do it internally ?
>
> > Another approach would be to merge it in and improve later. But then we
> > might break the interface for users of the feature... I lean towards
> > splitting the control and merging the rest. In that case we should
> > disable the dewarper by default, as the user has no added benefit
> > without the control. Still I think all the plumbing should definitely
> > go in.
>
> I don't foresee breaking the interface for users. But there can be
> improvements on top of this (for e.g. mapping the V4L2 Selection flags)
> and letting the application control that via an interface.
>
> https://docs.kernel.org/userspace-api/media/v4l/v4l2-selection-flags.html
>
> >> /* Add the IPA registered controls to list of camera controls. */
> >> for (const auto &ipaControl : data->ipaControls_)
> >> controls[ipaControl.first] = ipaControl.second;
> >> @@ -1236,6 +1312,26 @@ bool PipelineHandlerRkISP1::match(DeviceEnumerator *enumerator)
> >> stat_->bufferReady.connect(this, &PipelineHandlerRkISP1::statReady);
> >> param_->bufferReady.connect(this, &PipelineHandlerRkISP1::paramReady);
> >>
> >> + /* If dewarper is present, create its instance. */
> >> + DeviceMatch dwp("dw100");
> >> + dwp.add("dw100-source");
> >> + dwp.add("dw100-sink");
> >> +
> >> + std::shared_ptr<MediaDevice> dwpMediaDevice = enumerator->search(dwp);
> >> + if (dwpMediaDevice) {
> >> + dewarper_ = std::make_unique<V4L2M2MConverter>(dwpMediaDevice.get());
> >> + if (dewarper_->isValid()) {
> >> + dewarper_->outputBufferReady.connect(
> >> + this, &PipelineHandlerRkISP1::dewarpBufferReady);
> >> +
> >> + LOG(RkISP1, Info) << "Using DW100 dewarper " << dewarper_->deviceNode();
> >> + } else {
> >> + LOG(RkISP1, Warning) << "Found DW100 dewarper " << dewarper_->deviceNode()
> >> + << " but invalid";
> >> + dewarper_.reset();
> >> + }
> >> + }
> >> +
> >> /*
> >> * Enumerate all sensors connected to the ISP and create one
> >> * camera instance for each of them.
> >> @@ -1282,7 +1378,7 @@ void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
> >> return;
> >>
> >> const FrameMetadata &metadata = buffer->metadata();
> >> - Request *request = buffer->request();
> >> + Request *request = info->request;
> >>
> >> if (metadata.status != FrameMetadata::FrameCancelled) {
> >> /*
> >> @@ -1304,6 +1400,79 @@ void PipelineHandlerRkISP1::bufferReady(FrameBuffer *buffer)
> >> info->metadataProcessed = true;
> >> }
> >>
> >> + if (!useDewarper_) {
> >> + completeBuffer(request, buffer);
> >> + tryCompleteRequest(info);
> >> +
> >> + return;
> >> + }
> >> +
> >> + /* Do not queue cancelled frames to dewarper. */
> >> + if (metadata.status == FrameMetadata::FrameCancelled) {
> >> + /*
> >> + * i.MX8MP is the only known platform with dewarper. It has
> >> + * no self path. Hence, only main path buffer completion is
> >> + * required.
> >> + *
> >> + * Also, we cannot completeBuffer(request, buffer) as buffer
> >> + * here, is an internal buffer (between ISP and dewarper) and
> >> + * is not associated to the any specific request. The request
> >> + * buffer associated with main path stream is the one that
> >> + * is required to be completed (not the internal buffer).
> >> + */
> >> + for (auto it : request->buffers()) {
> >> + if (it.first == &data->mainPathStream_)
> >> + completeBuffer(request, it.second);
> >> + }
> >> +
> >> + tryCompleteRequest(info);
> >> + return;
> >> + }
> >> +
> >> + /* Handle scaler crop control. */
> >> + const auto &crop = request->controls().get(controls::ScalerCrop);
> >> + if (crop) {
> >> + Rectangle appliedRect = crop.value();
> >> +
> >> + int ret = dewarper_->setInputCrop(&data->mainPathStream_,
> >> + &appliedRect);
> >> + if (!ret && appliedRect != crop.value()) {
> >> + /*
> >> + * If the rectangle is changed by setInputCrop on the
> >> + * dewarper, log a debug message and cache the actual
> >> + * applied rectangle for metadata reporting.
> >> + */
> >> + LOG(RkISP1, Debug)
> >> + << "Applied rectangle " << appliedRect.toString()
> >> + << " differs from requested " << crop.value().toString();
> >> + }
> >> +
> >> + activeCrop_ = appliedRect;
> >> + }
> >> +
> >> + /*
> >> + * Queue input and output buffers to the dewarper. The output
> >> + * buffers for the dewarper are the buffers of the request, supplied
> >> + * by the application.
> >> + */
> >> + int ret = dewarper_->queueBuffers(buffer, request->buffers());
> >> + if (ret < 0)
> >> + LOG(RkISP1, Error) << "Cannot queue buffers to dewarper: "
> >> + << strerror(-ret);
> >> +}
> >> +
> >> +void PipelineHandlerRkISP1::dewarpBufferReady(FrameBuffer *buffer)
> >> +{
> >> + ASSERT(activeCamera_);
> >> + RkISP1CameraData *data = cameraData(activeCamera_);
> >> + Request *request = buffer->request();
> >> +
> >> + RkISP1FrameInfo *info = data->frameInfo_.find(buffer->request());
> >> + if (!info)
> >> + return;
> >> +
> >> + request->metadata().set(controls::ScalerCrop, activeCrop_.value());
> >
> > I couldn't easily see if there are cases where more than one buffer are
> > queued to the dewarper. In that case the activeCrop_ could be the one
> > applied to the next frame. If it is ensured that that is not the case it
> > might be worth a comment.
> >
> >> +
> >> completeBuffer(request, buffer);
> >> tryCompleteRequest(info);
> >> }
--
Regards,
Laurent Pinchart
More information about the libcamera-devel
mailing list