[libcamera-devel] [PATCH v2 12/12] android: camera_device: Support MJPEG stream construction
Kieran Bingham
kieran.bingham at ideasonboard.com
Mon Aug 3 18:18:16 CEST 2020
MJPEG streams must be created referencing a libcamera stream.
This stream may already be provided by the request configuration,
in which case the existing stream is utilised.
If no compatible stream is available to encode, a new stream is requested
from the libcamera configuration.
Signed-off-by: Kieran Bingham <kieran.bingham at ideasonboard.com>
---
This is probably the main patch holding me up at the moment, as there
are just so many bits I'm not happy with.
- Clean up the "Fill in the JPEG blob header."
- Added streams for different MPEG requirements don't have their own
buffers (only just realised that today, so I think we should just
pull that out for a first integration)
- JPEG compression should run in a thread.
- I really dislike the current mapping between camera3_stream to
libcamera stream with the index
- The JPEG compressor is not currently destructed.
I was going to handle that by moving the CameraStream to a class
which would then destroy the compressor in it's destructor - but that
hasn't gone down that path, maybe it still should.
- Metadata additions are horrible to keep in sync (especially through
rebases)
- More stuff ;-)
Anyway, here's the current WIP - and it works ... so hey that's a bonus
;-)
src/android/camera_device.cpp | 202 +++++++++++++++++++++++++++++++++-
src/android/camera_device.h | 8 ++
2 files changed, 204 insertions(+), 6 deletions(-)
diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp
index e23ab055d012..42b08cfc5fed 100644
--- a/src/android/camera_device.cpp
+++ b/src/android/camera_device.cpp
@@ -8,6 +8,7 @@
#include "camera_device.h"
#include "camera_ops.h"
+#include <sys/mman.h>
#include <tuple>
#include <vector>
@@ -21,6 +22,8 @@
#include "camera_metadata.h"
#include "system/graphics.h"
+#include "jpeg/compressor_jpeg.h"
+
using namespace libcamera;
namespace {
@@ -80,6 +83,40 @@ const std::map<int, const Camera3Format> camera3FormatsMap = {
LOG_DECLARE_CATEGORY(HAL);
+class MappedCamera3Buffer : public MappedBuffer
+{
+public:
+ MappedCamera3Buffer(const buffer_handle_t camera3buffer, int flags)
+ {
+ maps_.reserve(camera3buffer->numFds);
+ error_ = 0;
+
+ for (int i = 0; i < camera3buffer->numFds; i++) {
+ if (camera3buffer->data[i] == -1)
+ continue;
+
+ off_t length = lseek(camera3buffer->data[i], 0, SEEK_END);
+ if (length < 0) {
+ error_ = errno;
+ LOG(HAL, Error) << "Failed to query plane length";
+ break;
+ }
+
+ void *address = mmap(nullptr, length, flags, MAP_SHARED,
+ camera3buffer->data[i], 0);
+ if (address == MAP_FAILED) {
+ error_ = errno;
+ LOG(HAL, Error) << "Failed to mmap plane";
+ break;
+ }
+
+ maps_.emplace_back(static_cast<uint8_t *>(address), static_cast<size_t>(length));
+ }
+
+ valid_ = error_ == 0;
+ }
+};
+
/*
* \struct Camera3RequestDescriptor
*
@@ -368,9 +405,9 @@ std::tuple<uint32_t, uint32_t> CameraDevice::calculateStaticMetadataSize()
{
/*
* \todo Keep this in sync with the actual number of entries.
- * Currently: 50 entries, 647 bytes of static metadata
+ * Currently: 51 entries, 651 bytes of static metadata
*/
- uint32_t numEntries = 50;
+ uint32_t numEntries = 51;
uint32_t byteSize = 651;
/*
@@ -527,6 +564,9 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
availableThumbnailSizes.data(),
availableThumbnailSizes.size());
+ int32_t jpegMaxSize = 13 << 20; /* 13631488 from USB HAL */
+ staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
+
/* Sensor static metadata. */
int32_t pixelArraySize[] = {
2592, 1944,
@@ -729,6 +769,7 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
ANDROID_CONTROL_AVAILABLE_MODES,
ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
+ ANDROID_JPEG_MAX_SIZE,
ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
@@ -795,6 +836,9 @@ const camera_metadata_t *CameraDevice::getStaticMetadata()
ANDROID_SENSOR_EXPOSURE_TIME,
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
ANDROID_STATISTICS_SCENE_FLICKER,
+ ANDROID_JPEG_SIZE,
+ ANDROID_JPEG_QUALITY,
+ ANDROID_JPEG_ORIENTATION,
};
staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
availableResultKeys.data(),
@@ -956,6 +1000,7 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
*/
unsigned int streamIndex = 0;
+ /* First handle all non-MJPEG streams */
for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
camera3_stream_t *stream = stream_list->streams[i];
@@ -971,6 +1016,18 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
if (!format.isValid())
return -EINVAL;
+ stream->priv = static_cast<void *>(&streams_[i]);
+ streams_[i].format = format;
+ streams_[i].size = Size(stream->width, stream->height);
+
+ /* Defer handling of MJPEG streams until all others are known. */
+ if (format == formats::MJPEG) {
+ LOG(HAL, Info) << "Handling MJPEG stream";
+
+ streams_[i].index = -1;
+ continue;
+ }
+
StreamConfiguration streamConfiguration;
streamConfiguration.size.width = stream->width;
@@ -980,7 +1037,61 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
config_->addConfiguration(streamConfiguration);
streams_[i].index = streamIndex++;
- stream->priv = static_cast<void *>(&streams_[i]);
+ }
+
+ /* Now handle MJPEG streams, adding a new stream if required. */
+ for (unsigned int i = 0; i < stream_list->num_streams; ++i) {
+ camera3_stream_t *stream = stream_list->streams[i];
+ CameraStream *cameraStream = &streams_[i];
+
+ /* Only process MJPEG streams */
+ if (cameraStream->format != formats::MJPEG)
+ continue;
+
+ bool match = false;
+
+ /* Search for a compatible stream */
+ for (unsigned int j = 0; j < config_->size(); j++) {
+ StreamConfiguration &cfg = config_->at(j);
+
+ /*
+ * The PixelFormat must also be compatible with the
+ * encoder.
+ */
+ if (cfg.size == cameraStream->size) {
+ LOG(HAL, Info)
+ << "Stream " << i
+ << " using libcamera stream "
+ << j;
+
+ match = true;
+ cameraStream->index = j;
+ }
+ }
+
+ /*
+ * Without a compatible match for JPEG encoding we must
+ * introduce a new stream to satisfy the request requirements.
+ */
+ if (!match) {
+ StreamConfiguration streamConfiguration;
+
+ /*
+ * \todo: The pixelFormat should be a 'best-fit' choice
+ * and may require a validation cycle. This is not yet
+ * handled, and should be considered as part of any
+ * stream configuration reworks.
+ */
+ streamConfiguration.size.width = stream->width;
+ streamConfiguration.size.height = stream->height;
+ streamConfiguration.pixelFormat = formats::NV12;
+
+ LOG(HAL, Info) << "Adding " << streamConfiguration.toString()
+ << " for MJPEG support";
+
+ config_->addConfiguration(streamConfiguration);
+ streams_[i].index = streamIndex++;
+ }
}
switch (config_->validate()) {
@@ -1007,6 +1118,18 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
/* Use the bufferCount confirmed by the validation process. */
stream->max_buffers = cfg.bufferCount;
+
+ /*
+ * Construct a software compressor for MJPEG streams from the
+ * chosen libcamera source stream.
+ */
+ if (cameraStream->format == formats::MJPEG) {
+ cameraStream->jpeg = new CompressorJPEG();
+ cameraStream->jpeg->configure(cfg);
+ } else {
+ /* Either construct this correctly, or use a better interface */
+ cameraStream->jpeg = nullptr;
+ }
}
/*
@@ -1101,6 +1224,10 @@ int CameraDevice::processCaptureRequest(camera3_capture_request_t *camera3Reques
descriptor->buffers[i].stream = camera3Buffers[i].stream;
descriptor->buffers[i].buffer = camera3Buffers[i].buffer;
+ /* Software streams are handled after hardware streams complete. */
+ if (cameraStream->format == formats::MJPEG)
+ continue;
+
/*
* Create a libcamera buffer using the dmabuf descriptors of
* the camera3Buffer for each stream. The FrameBuffer is
@@ -1157,9 +1284,72 @@ void CameraDevice::requestComplete(Request *request)
resultMetadata = getResultMetadata(descriptor->frameNumber,
buffer->metadata().timestamp);
- /* Prepare to call back the Android camera stack. */
+ /* Handle any JPEG compression */
+ for (unsigned int i = 0; i < descriptor->numBuffers; ++i) {
+ CameraStream *cameraStream =
+ static_cast<CameraStream *>(descriptor->buffers[i].stream->priv);
+ Compressor *compressor = cameraStream->jpeg;
+ /* Only handle streams with compression enabled. */
+ if (!compressor)
+ continue;
+ StreamConfiguration *streamConfiguration = &config_->at(cameraStream->index);
+ Stream *stream = streamConfiguration->stream();
+ FrameBuffer *buffer = request->findBuffer(stream);
+ if (!buffer) {
+ LOG(HAL, Error) << "Failed to find a source stream buffer";
+ continue;
+ }
+
+ MappedCamera3Buffer mapped(*descriptor->buffers[i].buffer, PROT_READ | PROT_WRITE);
+ if (!mapped.isValid()) {
+ LOG(HAL, Error) << "Failed to mmap android blob buffer";
+ continue;
+ }
+
+ CompressedImage output;
+ output.data = static_cast<unsigned char *>(mapped.maps()[0].data());
+ output.length = mapped.maps()[0].size();
+
+ int ret = compressor->compress(buffer, &output);
+ if (ret) {
+ LOG(HAL, Error) << "Failed to compress stream image";
+ status = CAMERA3_BUFFER_STATUS_ERROR;
+ }
+
+ /* Fill in the JPEG blob header. */
+ {
+ /* The mapped size of the buffer is being returned as substantially larger
+ * than the requested JPEG_MAX_SIZE (re-defined here).
+ * Utilise this static size as a workaround to ensure the correct offset
+ * of the blob is determined.
+ *
+ * \todo Investigate and fix the root cause of this.
+ */
+ int32_t max_size = { 13 << 20 };
+ uint8_t *resultPtr = static_cast<uint8_t *>(mapped.maps()[0].data()) +
+ max_size - sizeof(struct camera3_jpeg_blob);
+ auto *blob = reinterpret_cast<struct camera3_jpeg_blob *>(resultPtr);
+
+ blob->jpeg_blob_id = CAMERA3_JPEG_BLOB_ID;
+ blob->jpeg_size = output.length;
+ }
+
+ /* Update the JPEG result Metadata. */
+ resultMetadata->addEntry(ANDROID_JPEG_SIZE,
+ &output.length, 1);
+
+ const uint32_t jpeg_quality = 95;
+ resultMetadata->addEntry(ANDROID_JPEG_QUALITY,
+ &jpeg_quality, 1);
+
+ const uint32_t jpeg_orientation = 0;
+ resultMetadata->addEntry(ANDROID_JPEG_ORIENTATION,
+ &jpeg_orientation, 1);
+ }
+
+ /* Prepare to call back the Android camera stack. */
camera3_capture_result_t captureResult = {};
captureResult.frame_number = descriptor->frameNumber;
captureResult.num_output_buffers = descriptor->numBuffers;
@@ -1240,10 +1430,10 @@ std::unique_ptr<CameraMetadata> CameraDevice::getResultMetadata(int frame_number
{
/*
* \todo Keep this in sync with the actual number of entries.
- * Currently: 12 entries, 36 bytes
+ * Currently: 17 entries, 58 bytes
*/
std::unique_ptr<CameraMetadata> resultMetadata =
- std::make_unique<CameraMetadata>(15, 50);
+ std::make_unique<CameraMetadata>(17, 58);
if (!resultMetadata->isValid()) {
LOG(HAL, Error) << "Failed to allocate static metadata";
return nullptr;
diff --git a/src/android/camera_device.h b/src/android/camera_device.h
index 00472c219388..c9ece426a00e 100644
--- a/src/android/camera_device.h
+++ b/src/android/camera_device.h
@@ -23,6 +23,8 @@
#include "libcamera/internal/log.h"
#include "libcamera/internal/message.h"
+#include "jpeg/compressor_jpeg.h"
+
class CameraMetadata;
struct CameraStream {
@@ -32,6 +34,12 @@ struct CameraStream {
* one or more streams to the Android framework.
*/
unsigned int index;
+
+ libcamera::PixelFormat format;
+ libcamera::Size size;
+
+ /* Make sure this gets destructed correctly */
+ CompressorJPEG *jpeg;
};
class CameraDevice : protected libcamera::Loggable
--
2.25.1
More information about the libcamera-devel
mailing list