[libcamera-devel] [PATCH 2/2] android: Introduce CameraCapabilties class
Hirokazu Honda
hiroh at chromium.org
Tue Jun 22 03:34:27 CEST 2021
Hi Jacopo, thank you for the patch.
I failed to apply the patch on the top of the latest tree to review.
Could you tell me the parent commit which I can apply this patch?
-Hiro
On Tue, Jun 22, 2021 at 12:29 AM Jacopo Mondi <jacopo at jmondi.org> wrote:
> The camera_device.cpp has grown a little too much, and it has quickly
> become hard to maintain. Break out the handling of the static
> information collected at camera initialization time to a new
> CameraCapabilities class.
>
> Break out from the camera_device.cpp file all the functions relative to:
> - Initialization of supported stream configurations
> - Initialization of static metadata
> - Initialization of request templates
>
> Signed-off-by: Jacopo Mondi <jacopo at jmondi.org>
> Acked-by: Paul Elder <paul.elder at ideasonboard.com>
> Tested-by: Paul Elder <paul.elder at ideasonboard.com>
> ---
> src/android/camera_capabilities.cpp | 1164 +++++++++++++++++++++++++++
> src/android/camera_capabilities.h | 65 ++
> src/android/camera_device.cpp | 1147 +-------------------------
> src/android/camera_device.h | 27 +-
> src/android/meson.build | 1 +
> 5 files changed, 1245 insertions(+), 1159 deletions(-)
> create mode 100644 src/android/camera_capabilities.cpp
> create mode 100644 src/android/camera_capabilities.h
>
> diff --git a/src/android/camera_capabilities.cpp
> b/src/android/camera_capabilities.cpp
> new file mode 100644
> index 000000000000..311a2c839586
> --- /dev/null
> +++ b/src/android/camera_capabilities.cpp
> @@ -0,0 +1,1164 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2021, Google Inc.
> + *
> + * camera_capabilities.cpp - Camera static properties manager
> + */
> +
> +#include "camera_capabilities.h"
> +
> +#include <array>
> +#include <cmath>
> +
> +#include <hardware/camera3.h>
> +
> +#include <libcamera/control_ids.h>
> +#include <libcamera/controls.h>
> +#include <libcamera/property_ids.h>
> +
> +#include "libcamera/internal/formats.h"
> +#include "libcamera/internal/log.h"
> +
> +using namespace libcamera;
> +
> +LOG_DECLARE_CATEGORY(HAL)
> +
> +namespace {
> +
> +/*
> + * \var camera3Resolutions
> + * \brief The list of image resolutions defined as mandatory to be
> supported by
> + * the Android Camera3 specification
> + */
> +const std::vector<Size> camera3Resolutions = {
> + { 320, 240 },
> + { 640, 480 },
> + { 1280, 720 },
> + { 1920, 1080 }
> +};
> +
> +/*
> + * \struct Camera3Format
> + * \brief Data associated with an Android format identifier
> + * \var libcameraFormats List of libcamera pixel formats compatible with
> the
> + * Android format
> + * \var name The human-readable representation of the Android format code
> + */
> +struct Camera3Format {
> + std::vector<PixelFormat> libcameraFormats;
> + bool mandatory;
> + const char *name;
> +};
> +
> +/*
> + * \var camera3FormatsMap
> + * \brief Associate Android format code with ancillary data
> + */
> +const std::map<int, const Camera3Format> camera3FormatsMap = {
> + {
> + HAL_PIXEL_FORMAT_BLOB, {
> + { formats::MJPEG },
> + true,
> + "BLOB"
> + }
> + }, {
> + HAL_PIXEL_FORMAT_YCbCr_420_888, {
> + { formats::NV12, formats::NV21 },
> + true,
> + "YCbCr_420_888"
> + }
> + }, {
> + /*
> + * \todo Translate IMPLEMENTATION_DEFINED inspecting the
> gralloc
> + * usage flag. For now, copy the YCbCr_420 configuration.
> + */
> + HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, {
> + { formats::NV12, formats::NV21 },
> + true,
> + "IMPLEMENTATION_DEFINED"
> + }
> + }, {
> + HAL_PIXEL_FORMAT_RAW10, {
> + {
> + formats::SBGGR10_CSI2P,
> + formats::SGBRG10_CSI2P,
> + formats::SGRBG10_CSI2P,
> + formats::SRGGB10_CSI2P
> + },
> + false,
> + "RAW10"
> + }
> + }, {
> + HAL_PIXEL_FORMAT_RAW12, {
> + {
> + formats::SBGGR12_CSI2P,
> + formats::SGBRG12_CSI2P,
> + formats::SGRBG12_CSI2P,
> + formats::SRGGB12_CSI2P
> + },
> + false,
> + "RAW12"
> + }
> + }, {
> + HAL_PIXEL_FORMAT_RAW16, {
> + {
> + formats::SBGGR16,
> + formats::SGBRG16,
> + formats::SGRBG16,
> + formats::SRGGB16
> + },
> + false,
> + "RAW16"
> + }
> + },
> +};
> +
> +} /* namespace */
> +
> +int CameraCapabilities::initialize(std::shared_ptr<libcamera::Camera>
> camera,
> + int orientation, int facing)
> +{
> + camera_ = camera;
> + orientation_ = orientation;
> + facing_ = facing;
> +
> + /* Acquire the camera and initialize available stream
> configurations. */
> + int ret = camera_->acquire();
> + if (ret) {
> + LOG(HAL, Error) << "Failed to temporarily acquire the
> camera";
> + return ret;
> + }
> +
> + ret = initializeStreamConfigurations();
> + camera_->release();
> + if (ret)
> + return ret;
> +
> + return initializeStaticMetadata();
> +}
> +
> +std::vector<Size>
> CameraCapabilities::getYUVResolutions(CameraConfiguration *cameraConfig,
> + const PixelFormat
> &pixelFormat,
> + const
> std::vector<Size> &resolutions)
> +{
> + std::vector<Size> supportedResolutions;
> +
> + StreamConfiguration &cfg = cameraConfig->at(0);
> + for (const Size &res : resolutions) {
> + cfg.pixelFormat = pixelFormat;
> + cfg.size = res;
> +
> + CameraConfiguration::Status status =
> cameraConfig->validate();
> + if (status != CameraConfiguration::Valid) {
> + LOG(HAL, Debug) << cfg.toString() << " not
> supported";
> + continue;
> + }
> +
> + LOG(HAL, Debug) << cfg.toString() << " supported";
> +
> + supportedResolutions.push_back(res);
> + }
> +
> + return supportedResolutions;
> +}
> +
> +std::vector<Size> CameraCapabilities::getRawResolutions(const
> libcamera::PixelFormat &pixelFormat)
> +{
> + std::unique_ptr<CameraConfiguration> cameraConfig =
> + camera_->generateConfiguration({ StreamRole::Raw });
> + StreamConfiguration &cfg = cameraConfig->at(0);
> + const StreamFormats &formats = cfg.formats();
> + std::vector<Size> supportedResolutions =
> formats.sizes(pixelFormat);
> +
> + return supportedResolutions;
> +}
> +
> +/*
> + * Initialize the format conversion map to translate from Android format
> + * identifier to libcamera pixel formats and fill in the list of supported
> + * stream configurations to be reported to the Android camera framework
> through
> + * the Camera static metadata.
> + */
> +int CameraCapabilities::initializeStreamConfigurations()
> +{
> + /*
> + * Get the maximum output resolutions
> + * \todo Get this from the camera properties once defined
> + */
> + std::unique_ptr<CameraConfiguration> cameraConfig =
> + camera_->generateConfiguration({ StillCapture });
> + if (!cameraConfig) {
> + LOG(HAL, Error) << "Failed to get maximum resolution";
> + return -EINVAL;
> + }
> + StreamConfiguration &cfg = cameraConfig->at(0);
> +
> + /*
> + * \todo JPEG - Adjust the maximum available resolution by taking
> the
> + * JPEG encoder requirements into account (alignment and aspect
> ratio).
> + */
> + const Size maxRes = cfg.size;
> + LOG(HAL, Debug) << "Maximum supported resolution: " <<
> maxRes.toString();
> +
> + /*
> + * Build the list of supported image resolutions.
> + *
> + * The resolutions listed in camera3Resolution are mandatory to be
> + * supported, up to the camera maximum resolution.
> + *
> + * Augment the list by adding resolutions calculated from the
> camera
> + * maximum one.
> + */
> + std::vector<Size> cameraResolutions;
> + std::copy_if(camera3Resolutions.begin(), camera3Resolutions.end(),
> + std::back_inserter(cameraResolutions),
> + [&](const Size &res) { return res < maxRes; });
> +
> + /*
> + * The Camera3 specification suggests adding 1/2 and 1/4 of the
> maximum
> + * resolution.
> + */
> + for (unsigned int divider = 2;; divider <<= 1) {
> + Size derivedSize{
> + maxRes.width / divider,
> + maxRes.height / divider,
> + };
> +
> + if (derivedSize.width < 320 ||
> + derivedSize.height < 240)
> + break;
> +
> + cameraResolutions.push_back(derivedSize);
> + }
> + cameraResolutions.push_back(maxRes);
> +
> + /* Remove duplicated entries from the list of supported
> resolutions. */
> + std::sort(cameraResolutions.begin(), cameraResolutions.end());
> + auto last = std::unique(cameraResolutions.begin(),
> cameraResolutions.end());
> + cameraResolutions.erase(last, cameraResolutions.end());
> +
> + /*
> + * Build the list of supported camera formats.
> + *
> + * To each Android format a list of compatible libcamera formats is
> + * associated. The first libcamera format that tests successful is
> added
> + * to the format translation map used when configuring the streams.
> + * It is then tested against the list of supported camera
> resolutions to
> + * build the stream configuration map reported through the camera
> static
> + * metadata.
> + */
> + Size maxJpegSize;
> + for (const auto &format : camera3FormatsMap) {
> + int androidFormat = format.first;
> + const Camera3Format &camera3Format = format.second;
> + const std::vector<PixelFormat> &libcameraFormats =
> + camera3Format.libcameraFormats;
> +
> + LOG(HAL, Debug) << "Trying to map Android format "
> + << camera3Format.name;
> +
> + /*
> + * JPEG is always supported, either produced directly by
> the
> + * camera, or encoded in the HAL.
> + */
> + if (androidFormat == HAL_PIXEL_FORMAT_BLOB) {
> + formatsMap_[androidFormat] = formats::MJPEG;
> + LOG(HAL, Debug) << "Mapped Android format "
> + << camera3Format.name << " to "
> + << formats::MJPEG.toString()
> + << " (fixed mapping)";
> + continue;
> + }
> +
> + /*
> + * Test the libcamera formats that can produce images
> + * compatible with the format defined by Android.
> + */
> + PixelFormat mappedFormat;
> + for (const PixelFormat &pixelFormat : libcameraFormats) {
> +
> + LOG(HAL, Debug) << "Testing " <<
> pixelFormat.toString();
> +
> + /*
> + * The stream configuration size can be adjusted,
> + * not the pixel format.
> + *
> + * \todo This could be simplified once all pipeline
> + * handlers will report the StreamFormats list of
> + * supported formats.
> + */
> + cfg.pixelFormat = pixelFormat;
> +
> + CameraConfiguration::Status status =
> cameraConfig->validate();
> + if (status != CameraConfiguration::Invalid &&
> + cfg.pixelFormat == pixelFormat) {
> + mappedFormat = pixelFormat;
> + break;
> + }
> + }
> +
> + if (!mappedFormat.isValid()) {
> + /* If the format is not mandatory, skip it. */
> + if (!camera3Format.mandatory)
> + continue;
> +
> + LOG(HAL, Error)
> + << "Failed to map mandatory Android format
> "
> + << camera3Format.name << " ("
> + << utils::hex(androidFormat) << "):
> aborting";
> + return -EINVAL;
> + }
> +
> + /*
> + * Record the mapping and then proceed to generate the
> + * stream configurations map, by testing the image
> resolutions.
> + */
> + formatsMap_[androidFormat] = mappedFormat;
> + LOG(HAL, Debug) << "Mapped Android format "
> + << camera3Format.name << " to "
> + << mappedFormat.toString();
> +
> + std::vector<Size> resolutions;
> + const PixelFormatInfo &info =
> PixelFormatInfo::info(mappedFormat);
> + if (info.colourEncoding ==
> PixelFormatInfo::ColourEncodingRAW)
> + resolutions = getRawResolutions(mappedFormat);
> + else
> + resolutions = getYUVResolutions(cameraConfig.get(),
> + mappedFormat,
> + cameraResolutions);
> +
> + for (const Size &res : resolutions) {
> + streamConfigurations_.push_back({ res,
> androidFormat });
> +
> + /*
> + * If the format is HAL_PIXEL_FORMAT_YCbCr_420_888
> + * from which JPEG is produced, add an entry for
> + * the JPEG stream.
> + *
> + * \todo Wire the JPEG encoder to query the
> supported
> + * sizes provided a list of formats it can encode.
> + *
> + * \todo Support JPEG streams produced by the
> Camera
> + * natively.
> + */
> + if (androidFormat ==
> HAL_PIXEL_FORMAT_YCbCr_420_888) {
> + streamConfigurations_.push_back(
> + { res, HAL_PIXEL_FORMAT_BLOB });
> + maxJpegSize = std::max(maxJpegSize, res);
> + }
> + }
> +
> + /*
> + * \todo Calculate the maximum JPEG buffer size by asking
> the
> + * encoder giving the maximum frame size required.
> + */
> + maxJpegBufferSize_ = maxJpegSize.width *
> maxJpegSize.height * 1.5;
> + }
> +
> + LOG(HAL, Debug) << "Collected stream configuration map: ";
> + for (const auto &entry : streamConfigurations_)
> + LOG(HAL, Debug) << "{ " << entry.resolution.toString() <<
> " - "
> + << utils::hex(entry.androidFormat) << " }";
> +
> + return 0;
> +}
> +
> +int CameraCapabilities::initializeStaticMetadata()
> +{
> + staticMetadata_ = std::make_unique<CameraMetadata>(64, 1024);
> + if (!staticMetadata_->isValid()) {
> + LOG(HAL, Error) << "Failed to allocate static metadata";
> + staticMetadata_.reset();
> + return -EINVAL;
> + }
> +
> + const ControlInfoMap &controlsInfo = camera_->controls();
> + const ControlList &properties = camera_->properties();
> +
> + /* Color correction static metadata. */
> + {
> + std::vector<uint8_t> data;
> + data.reserve(3);
> + const auto &infoMap =
> controlsInfo.find(&controls::draft::ColorCorrectionAberrationMode);
> + if (infoMap != controlsInfo.end()) {
> + for (const auto &value : infoMap->second.values())
> + data.push_back(value.get<int32_t>());
> + } else {
> +
> data.push_back(ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF);
> + }
> +
> staticMetadata_->addEntry(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
> + data);
> + }
> +
> + /* Control static metadata. */
> + std::vector<uint8_t> aeAvailableAntiBandingModes = {
> + ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
> + ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,
> + ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
> + ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
> + };
> +
> staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
> + aeAvailableAntiBandingModes);
> +
> + std::vector<uint8_t> aeAvailableModes = {
> + ANDROID_CONTROL_AE_MODE_ON,
> + };
> + staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_MODES,
> + aeAvailableModes);
> +
> + int64_t minFrameDurationNsec = -1;
> + int64_t maxFrameDurationNsec = -1;
> + const auto frameDurationsInfo =
> controlsInfo.find(&controls::FrameDurationLimits);
> + if (frameDurationsInfo != controlsInfo.end()) {
> + minFrameDurationNsec =
> frameDurationsInfo->second.min().get<int64_t>() * 1000;
> + maxFrameDurationNsec =
> frameDurationsInfo->second.max().get<int64_t>() * 1000;
> +
> + /*
> + * Adjust the minimum frame duration to comply with Android
> + * requirements. The camera service mandates all
> preview/record
> + * streams to have a minimum frame duration < 33,366
> milliseconds
> + * (see MAX_PREVIEW_RECORD_DURATION_NS in the camera
> service
> + * implementation).
> + *
> + * If we're close enough (+ 500 useconds) to that value,
> round
> + * the minimum frame duration of the camera to an accepted
> + * value.
> + */
> + static constexpr int64_t MAX_PREVIEW_RECORD_DURATION_NS =
> 1e9 / 29.97;
> + if (minFrameDurationNsec > MAX_PREVIEW_RECORD_DURATION_NS
> &&
> + minFrameDurationNsec < MAX_PREVIEW_RECORD_DURATION_NS
> + 500000)
> + minFrameDurationNsec =
> MAX_PREVIEW_RECORD_DURATION_NS - 1000;
> +
> + /*
> + * The AE routine frame rate limits are computed using the
> frame
> + * duration limits, as libcamera clips the AE routine to
> the
> + * frame durations.
> + */
> + int32_t maxFps = std::round(1e9 / minFrameDurationNsec);
> + int32_t minFps = std::round(1e9 / maxFrameDurationNsec);
> + minFps = std::max(1, minFps);
> +
> + /*
> + * Force rounding errors so that we have the proper frame
> + * durations for when we reuse these variables later
> + */
> + minFrameDurationNsec = 1e9 / maxFps;
> + maxFrameDurationNsec = 1e9 / minFps;
> +
> + /*
> + * Register to the camera service {min, max} and {max, max}
> + * intervals as requested by the metadata documentation.
> + */
> + int32_t availableAeFpsTarget[] = {
> + minFps, maxFps, maxFps, maxFps
> + };
> +
> staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
> + availableAeFpsTarget);
> + }
> +
> + std::vector<int32_t> aeCompensationRange = {
> + 0, 0,
> + };
> + staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
> + aeCompensationRange);
> +
> + const camera_metadata_rational_t aeCompensationStep[] = {
> + { 0, 1 }
> + };
> + staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_STEP,
> + aeCompensationStep);
> +
> + std::vector<uint8_t> availableAfModes = {
> + ANDROID_CONTROL_AF_MODE_OFF,
> + };
> + staticMetadata_->addEntry(ANDROID_CONTROL_AF_AVAILABLE_MODES,
> + availableAfModes);
> +
> + std::vector<uint8_t> availableEffects = {
> + ANDROID_CONTROL_EFFECT_MODE_OFF,
> + };
> + staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_EFFECTS,
> + availableEffects);
> +
> + std::vector<uint8_t> availableSceneModes = {
> + ANDROID_CONTROL_SCENE_MODE_DISABLED,
> + };
> + staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
> + availableSceneModes);
> +
> + std::vector<uint8_t> availableStabilizationModes = {
> + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
> + };
> +
> staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
> + availableStabilizationModes);
> +
> + /*
> + * \todo Inspect the Camera capabilities to report the available
> + * AWB modes. Default to AUTO as CTS tests require it.
> + */
> + std::vector<uint8_t> availableAwbModes = {
> + ANDROID_CONTROL_AWB_MODE_AUTO,
> + };
> + staticMetadata_->addEntry(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
> + availableAwbModes);
> +
> + std::vector<int32_t> availableMaxRegions = {
> + 0, 0, 0,
> + };
> + staticMetadata_->addEntry(ANDROID_CONTROL_MAX_REGIONS,
> + availableMaxRegions);
> +
> + std::vector<uint8_t> sceneModesOverride = {
> + ANDROID_CONTROL_AE_MODE_ON,
> + ANDROID_CONTROL_AWB_MODE_AUTO,
> + ANDROID_CONTROL_AF_MODE_OFF,
> + };
> + staticMetadata_->addEntry(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
> + sceneModesOverride);
> +
> + uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
> + staticMetadata_->addEntry(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
> + aeLockAvailable);
> +
> + uint8_t awbLockAvailable =
> ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
> + staticMetadata_->addEntry(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
> + awbLockAvailable);
> +
> + char availableControlModes = ANDROID_CONTROL_MODE_AUTO;
> + staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_MODES,
> + availableControlModes);
> +
> + /* JPEG static metadata. */
> +
> + /*
> + * Create the list of supported thumbnail sizes by inspecting the
> + * available JPEG resolutions collected in streamConfigurations_
> and
> + * generate one entry for each aspect ratio.
> + *
> + * The JPEG thumbnailer can freely scale, so pick an arbitrary
> + * (160, 160) size as the bounding rectangle, which is then
> cropped to
> + * the different supported aspect ratios.
> + */
> + constexpr Size maxJpegThumbnail(160, 160);
> + std::vector<Size> thumbnailSizes;
> + thumbnailSizes.push_back({ 0, 0 });
> + for (const auto &entry : streamConfigurations_) {
> + if (entry.androidFormat != HAL_PIXEL_FORMAT_BLOB)
> + continue;
> +
> + Size thumbnailSize = maxJpegThumbnail
> + .boundedToAspectRatio({
> entry.resolution.width,
> +
> entry.resolution.height });
> + thumbnailSizes.push_back(thumbnailSize);
> + }
> +
> + std::sort(thumbnailSizes.begin(), thumbnailSizes.end());
> + auto last = std::unique(thumbnailSizes.begin(),
> thumbnailSizes.end());
> + thumbnailSizes.erase(last, thumbnailSizes.end());
> +
> + /* Transform sizes in to a list of integers that can be consumed.
> */
> + std::vector<int32_t> thumbnailEntries;
> + thumbnailEntries.reserve(thumbnailSizes.size() * 2);
> + for (const auto &size : thumbnailSizes) {
> + thumbnailEntries.push_back(size.width);
> + thumbnailEntries.push_back(size.height);
> + }
> + staticMetadata_->addEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
> + thumbnailEntries);
> +
> + staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE,
> maxJpegBufferSize_);
> +
> + /* Sensor static metadata. */
> + std::array<int32_t, 2> pixelArraySize;
> + {
> + const Size &size =
> properties.get(properties::PixelArraySize);
> + pixelArraySize[0] = size.width;
> + pixelArraySize[1] = size.height;
> +
> staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
> + pixelArraySize);
> + }
> +
> + if (properties.contains(properties::UnitCellSize)) {
> + const Size &cellSize =
> properties.get<Size>(properties::UnitCellSize);
> + std::array<float, 2> physicalSize{
> + cellSize.width * pixelArraySize[0] / 1e6f,
> + cellSize.height * pixelArraySize[1] / 1e6f
> + };
> +
> staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
> + physicalSize);
> + }
> +
> + {
> + const Span<const Rectangle> &rects =
> + properties.get(properties::PixelArrayActiveAreas);
> + std::vector<int32_t> data{
> + static_cast<int32_t>(rects[0].x),
> + static_cast<int32_t>(rects[0].y),
> + static_cast<int32_t>(rects[0].width),
> + static_cast<int32_t>(rects[0].height),
> + };
> +
> staticMetadata_->addEntry(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
> + data);
> + }
> +
> + int32_t sensitivityRange[] = {
> + 32, 2400,
> + };
> + staticMetadata_->addEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
> + sensitivityRange);
> +
> + /* Report the color filter arrangement if the camera reports it. */
> + if
> (properties.contains(properties::draft::ColorFilterArrangement)) {
> + uint8_t filterArr =
> properties.get(properties::draft::ColorFilterArrangement);
> +
> staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
> + filterArr);
> + }
> +
> + const auto &exposureInfo =
> controlsInfo.find(&controls::ExposureTime);
> + if (exposureInfo != controlsInfo.end()) {
> + int64_t exposureTimeRange[2] = {
> + exposureInfo->second.min().get<int32_t>() * 1000LL,
> + exposureInfo->second.max().get<int32_t>() * 1000LL,
> + };
> +
> staticMetadata_->addEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
> + exposureTimeRange, 2);
> + }
> +
> + staticMetadata_->addEntry(ANDROID_SENSOR_ORIENTATION,
> orientation_);
> +
> + std::vector<int32_t> testPatternModes = {
> + ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
> + };
> + const auto &testPatternsInfo =
> + controlsInfo.find(&controls::draft::TestPatternMode);
> + if (testPatternsInfo != controlsInfo.end()) {
> + const auto &values = testPatternsInfo->second.values();
> + ASSERT(!values.empty());
> + for (const auto &value : values) {
> + switch (value.get<int32_t>()) {
> + case controls::draft::TestPatternModeOff:
> + /*
> + * ANDROID_SENSOR_TEST_PATTERN_MODE_OFF is
> + * already in testPatternModes.
> + */
> + break;
> +
> + case controls::draft::TestPatternModeSolidColor:
> + testPatternModes.push_back(
> +
> ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR);
> + break;
> +
> + case controls::draft::TestPatternModeColorBars:
> + testPatternModes.push_back(
> +
> ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS);
> + break;
> +
> + case
> controls::draft::TestPatternModeColorBarsFadeToGray:
> + testPatternModes.push_back(
> +
> ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY);
> + break;
> +
> + case controls::draft::TestPatternModePn9:
> + testPatternModes.push_back(
> +
> ANDROID_SENSOR_TEST_PATTERN_MODE_PN9);
> + break;
> +
> + case controls::draft::TestPatternModeCustom1:
> + /* We don't support this yet. */
> + break;
> +
> + default:
> + LOG(HAL, Error) << "Unknown test pattern
> mode: "
> + << value.get<int32_t>();
> + continue;
> + }
> + }
> + }
> +
> staticMetadata_->addEntry(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
> + testPatternModes);
> +
> + uint8_t timestampSource =
> ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
> + staticMetadata_->addEntry(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
> + timestampSource);
> +
> + if (maxFrameDurationNsec > 0)
> +
> staticMetadata_->addEntry(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
> + maxFrameDurationNsec);
> +
> + /* Statistics static metadata. */
> + uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
> +
> staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
> + faceDetectMode);
> +
> + int32_t maxFaceCount = 0;
> + staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
> + maxFaceCount);
> +
> + {
> + std::vector<uint8_t> data;
> + data.reserve(2);
> + const auto &infoMap =
> controlsInfo.find(&controls::draft::LensShadingMapMode);
> + if (infoMap != controlsInfo.end()) {
> + for (const auto &value : infoMap->second.values())
> + data.push_back(value.get<int32_t>());
> + } else {
> +
> data.push_back(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF);
> + }
> +
> staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
> + data);
> + }
> +
> + /* Sync static metadata. */
> + int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
> + staticMetadata_->addEntry(ANDROID_SYNC_MAX_LATENCY, maxLatency);
> +
> + /* Flash static metadata. */
> + char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
> + staticMetadata_->addEntry(ANDROID_FLASH_INFO_AVAILABLE,
> + flashAvailable);
> +
> + /* Lens static metadata. */
> + std::vector<float> lensApertures = {
> + 2.53 / 100,
> + };
> + staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
> + lensApertures);
> +
> + uint8_t lensFacing;
> + switch (facing_) {
> + default:
> + case CAMERA_FACING_FRONT:
> + lensFacing = ANDROID_LENS_FACING_FRONT;
> + break;
> + case CAMERA_FACING_BACK:
> + lensFacing = ANDROID_LENS_FACING_BACK;
> + break;
> + case CAMERA_FACING_EXTERNAL:
> + lensFacing = ANDROID_LENS_FACING_EXTERNAL;
> + break;
> + }
> + staticMetadata_->addEntry(ANDROID_LENS_FACING, lensFacing);
> +
> + std::vector<float> lensFocalLengths = {
> + 1,
> + };
> +
> staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
> + lensFocalLengths);
> +
> + std::vector<uint8_t> opticalStabilizations = {
> + ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
> + };
> +
> staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
> + opticalStabilizations);
> +
> + float hypeFocalDistance = 0;
> + staticMetadata_->addEntry(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
> + hypeFocalDistance);
> +
> + float minFocusDistance = 0;
> + staticMetadata_->addEntry(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
> + minFocusDistance);
> +
> + /* Noise reduction modes. */
> + {
> + std::vector<uint8_t> data;
> + data.reserve(5);
> + const auto &infoMap =
> controlsInfo.find(&controls::draft::NoiseReductionMode);
> + if (infoMap != controlsInfo.end()) {
> + for (const auto &value : infoMap->second.values())
> + data.push_back(value.get<int32_t>());
> + } else {
> + data.push_back(ANDROID_NOISE_REDUCTION_MODE_OFF);
> + }
> +
> staticMetadata_->addEntry(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
> + data);
> + }
> +
> + /* Scaler static metadata. */
> +
> + /*
> + * \todo The digital zoom factor is a property that depends on the
> + * desired output configuration and the sensor frame size input to
> the
> + * ISP. This information is not available to the Android HAL, not
> at
> + * initialization time at least.
> + *
> + * As a workaround rely on pipeline handlers initializing the
> + * ScalerCrop control with the camera default configuration and
> use the
> + * maximum and minimum crop rectangles to calculate the digital
> zoom
> + * factor.
> + */
> + float maxZoom = 1.0f;
> + const auto scalerCrop = controlsInfo.find(&controls::ScalerCrop);
> + if (scalerCrop != controlsInfo.end()) {
> + Rectangle min = scalerCrop->second.min().get<Rectangle>();
> + Rectangle max = scalerCrop->second.max().get<Rectangle>();
> + maxZoom = std::min(1.0f * max.width / min.width,
> + 1.0f * max.height / min.height);
> + }
> +
> staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
> + maxZoom);
> +
> + std::vector<uint32_t> availableStreamConfigurations;
> + availableStreamConfigurations.reserve(streamConfigurations_.size()
> * 4);
> + for (const auto &entry : streamConfigurations_) {
> +
> availableStreamConfigurations.push_back(entry.androidFormat);
> +
> availableStreamConfigurations.push_back(entry.resolution.width);
> +
> availableStreamConfigurations.push_back(entry.resolution.height);
> + availableStreamConfigurations.push_back(
> +
> ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
> + }
> +
> staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
> + availableStreamConfigurations);
> +
> + std::vector<int64_t> availableStallDurations = {
> + ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920,
> 33333333,
> + };
> + staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
> + availableStallDurations);
> +
> + /* Use the minimum frame duration for all the YUV/RGB formats. */
> + if (minFrameDurationNsec > 0) {
> + std::vector<int64_t> minFrameDurations;
> + minFrameDurations.reserve(streamConfigurations_.size() *
> 4);
> + for (const auto &entry : streamConfigurations_) {
> + minFrameDurations.push_back(entry.androidFormat);
> +
> minFrameDurations.push_back(entry.resolution.width);
> +
> minFrameDurations.push_back(entry.resolution.height);
> + minFrameDurations.push_back(minFrameDurationNsec);
> + }
> +
> staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
> + minFrameDurations);
> + }
> +
> + uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
> + staticMetadata_->addEntry(ANDROID_SCALER_CROPPING_TYPE,
> croppingType);
> +
> + /* Info static metadata. */
> + uint8_t supportedHWLevel =
> ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
> + staticMetadata_->addEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
> + supportedHWLevel);
> +
> + /* Request static metadata. */
> + int32_t partialResultCount = 1;
> + staticMetadata_->addEntry(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
> + partialResultCount);
> +
> + {
> + /* Default the value to 2 if not reported by the camera. */
> + uint8_t maxPipelineDepth = 2;
> + const auto &infoMap =
> controlsInfo.find(&controls::draft::PipelineDepth);
> + if (infoMap != controlsInfo.end())
> + maxPipelineDepth =
> infoMap->second.max().get<int32_t>();
> +
> staticMetadata_->addEntry(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
> + maxPipelineDepth);
> + }
> +
> + /* LIMITED does not support reprocessing. */
> + uint32_t maxNumInputStreams = 0;
> + staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
> + maxNumInputStreams);
> +
> + std::vector<uint8_t> availableCapabilities = {
> + ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
> + };
> +
> + /* Report if camera supports RAW. */
> + bool rawStreamAvailable = false;
> + std::unique_ptr<CameraConfiguration> cameraConfig =
> + camera_->generateConfiguration({ StreamRole::Raw });
> + if (cameraConfig && !cameraConfig->empty()) {
> + const PixelFormatInfo &info =
> +
> PixelFormatInfo::info(cameraConfig->at(0).pixelFormat);
> + /* Only advertise RAW support if RAW16 is possible. */
> + if (info.colourEncoding ==
> PixelFormatInfo::ColourEncodingRAW &&
> + info.bitsPerPixel == 16) {
> + rawStreamAvailable = true;
> +
> availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
> + }
> + }
> +
> + /* Number of { RAW, YUV, JPEG } supported output streams */
> + int32_t numOutStreams[] = { rawStreamAvailable, 2, 1 };
> + staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
> + numOutStreams);
> +
> + staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
> + availableCapabilities);
> +
> + std::vector<int32_t> availableCharacteristicsKeys = {
> + ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
> + ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
> + ANDROID_CONTROL_AE_AVAILABLE_MODES,
> + ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
> + ANDROID_CONTROL_AE_COMPENSATION_RANGE,
> + ANDROID_CONTROL_AE_COMPENSATION_STEP,
> + ANDROID_CONTROL_AE_LOCK_AVAILABLE,
> + ANDROID_CONTROL_AF_AVAILABLE_MODES,
> + ANDROID_CONTROL_AVAILABLE_EFFECTS,
> + ANDROID_CONTROL_AVAILABLE_MODES,
> + ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
> + ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
> + ANDROID_CONTROL_AWB_AVAILABLE_MODES,
> + ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
> + ANDROID_CONTROL_MAX_REGIONS,
> + ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
> + ANDROID_FLASH_INFO_AVAILABLE,
> + ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
> + ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
> + ANDROID_JPEG_MAX_SIZE,
> + ANDROID_LENS_FACING,
> + ANDROID_LENS_INFO_AVAILABLE_APERTURES,
> + ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
> + ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
> + ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
> + ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
> + ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
> + ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
> + ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
> + ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
> + ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
> + ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
> + ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
> + ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
> + ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
> + ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
> + ANDROID_SCALER_CROPPING_TYPE,
> + ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
> + ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
> + ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
> + ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
> + ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
> + ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
> + ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
> + ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
> + ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
> + ANDROID_SENSOR_ORIENTATION,
> + ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
> + ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
> + ANDROID_SYNC_MAX_LATENCY,
> + };
> +
> staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
> + availableCharacteristicsKeys);
> +
> + std::vector<int32_t> availableRequestKeys = {
> + ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
> + ANDROID_CONTROL_AE_ANTIBANDING_MODE,
> + ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
> + ANDROID_CONTROL_AE_LOCK,
> + ANDROID_CONTROL_AE_MODE,
> + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
> + ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
> + ANDROID_CONTROL_AF_MODE,
> + ANDROID_CONTROL_AF_TRIGGER,
> + ANDROID_CONTROL_AWB_LOCK,
> + ANDROID_CONTROL_AWB_MODE,
> + ANDROID_CONTROL_CAPTURE_INTENT,
> + ANDROID_CONTROL_EFFECT_MODE,
> + ANDROID_CONTROL_MODE,
> + ANDROID_CONTROL_SCENE_MODE,
> + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
> + ANDROID_FLASH_MODE,
> + ANDROID_JPEG_ORIENTATION,
> + ANDROID_JPEG_QUALITY,
> + ANDROID_JPEG_THUMBNAIL_QUALITY,
> + ANDROID_JPEG_THUMBNAIL_SIZE,
> + ANDROID_LENS_APERTURE,
> + ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
> + ANDROID_NOISE_REDUCTION_MODE,
> + ANDROID_SCALER_CROP_REGION,
> + ANDROID_STATISTICS_FACE_DETECT_MODE
> + };
> + staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
> + availableRequestKeys);
> +
> + std::vector<int32_t> availableResultKeys = {
> + ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
> + ANDROID_CONTROL_AE_ANTIBANDING_MODE,
> + ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
> + ANDROID_CONTROL_AE_LOCK,
> + ANDROID_CONTROL_AE_MODE,
> + ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
> + ANDROID_CONTROL_AE_STATE,
> + ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
> + ANDROID_CONTROL_AF_MODE,
> + ANDROID_CONTROL_AF_STATE,
> + ANDROID_CONTROL_AF_TRIGGER,
> + ANDROID_CONTROL_AWB_LOCK,
> + ANDROID_CONTROL_AWB_MODE,
> + ANDROID_CONTROL_AWB_STATE,
> + ANDROID_CONTROL_CAPTURE_INTENT,
> + ANDROID_CONTROL_EFFECT_MODE,
> + ANDROID_CONTROL_MODE,
> + ANDROID_CONTROL_SCENE_MODE,
> + ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
> + ANDROID_FLASH_MODE,
> + ANDROID_FLASH_STATE,
> + ANDROID_JPEG_GPS_COORDINATES,
> + ANDROID_JPEG_GPS_PROCESSING_METHOD,
> + ANDROID_JPEG_GPS_TIMESTAMP,
> + ANDROID_JPEG_ORIENTATION,
> + ANDROID_JPEG_QUALITY,
> + ANDROID_JPEG_SIZE,
> + ANDROID_JPEG_THUMBNAIL_QUALITY,
> + ANDROID_JPEG_THUMBNAIL_SIZE,
> + ANDROID_LENS_APERTURE,
> + ANDROID_LENS_FOCAL_LENGTH,
> + ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
> + ANDROID_LENS_STATE,
> + ANDROID_NOISE_REDUCTION_MODE,
> + ANDROID_REQUEST_PIPELINE_DEPTH,
> + ANDROID_SCALER_CROP_REGION,
> + ANDROID_SENSOR_EXPOSURE_TIME,
> + ANDROID_SENSOR_FRAME_DURATION,
> + ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
> + ANDROID_SENSOR_TEST_PATTERN_MODE,
> + ANDROID_SENSOR_TIMESTAMP,
> + ANDROID_STATISTICS_FACE_DETECT_MODE,
> + ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
> + ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
> + ANDROID_STATISTICS_SCENE_FLICKER,
> + };
> + staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
> + availableResultKeys);
> +
> + if (!staticMetadata_->isValid()) {
> + LOG(HAL, Error) << "Failed to construct static metadata";
> + staticMetadata_.reset();
> + return -EINVAL;
> + }
> +
> + if (staticMetadata_->resized()) {
> + auto [entryCount, dataCount] = staticMetadata_->usage();
> + LOG(HAL, Info)
> + << "Static metadata resized: " << entryCount
> + << " entries and " << dataCount << " bytes used";
> + }
> +
> + return 0;
> +}
> +
> +/* Translate Android format code to libcamera pixel format. */
> +PixelFormat CameraCapabilities::toPixelFormat(int format) const
> +{
> + auto it = formatsMap_.find(format);
> + if (it == formatsMap_.end()) {
> + LOG(HAL, Error) << "Requested format " <<
> utils::hex(format)
> + << " not supported";
> + return PixelFormat();
> + }
> +
> + return it->second;
> +}
> +
> +std::unique_ptr<CameraMetadata>
> CameraCapabilities::requestTemplatePreview() const
> +{
> + /*
> + * \todo Keep this in sync with the actual number of entries.
> + * Currently: 20 entries, 35 bytes
> + */
> + auto requestTemplate = std::make_unique<CameraMetadata>(21, 36);
> + if (!requestTemplate->isValid()) {
> + return nullptr;
> + }
> +
> + /* Get the FPS range registered in the static metadata. */
> + camera_metadata_ro_entry_t entry;
> + bool found =
> staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
> + &entry);
> + if (!found) {
> + LOG(HAL, Error) << "Cannot create capture template without
> FPS range";
> + return nullptr;
> + }
> +
> + /*
> + * Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata
> + * has been assembled as {{min, max} {max, max}}.
> + */
> + requestTemplate->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
> + entry.data.i32, 2);
> +
> + uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
> + requestTemplate->addEntry(ANDROID_CONTROL_AE_MODE, aeMode);
> +
> + int32_t aeExposureCompensation = 0;
> + requestTemplate->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
> + aeExposureCompensation);
> +
> + uint8_t aePrecaptureTrigger =
> ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
> + requestTemplate->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
> + aePrecaptureTrigger);
> +
> + uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
> + requestTemplate->addEntry(ANDROID_CONTROL_AE_LOCK, aeLock);
> +
> + uint8_t aeAntibandingMode =
> ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
> + requestTemplate->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
> + aeAntibandingMode);
> +
> + uint8_t afMode = ANDROID_CONTROL_AF_MODE_OFF;
> + requestTemplate->addEntry(ANDROID_CONTROL_AF_MODE, afMode);
> +
> + uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
> + requestTemplate->addEntry(ANDROID_CONTROL_AF_TRIGGER, afTrigger);
> +
> + uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
> + requestTemplate->addEntry(ANDROID_CONTROL_AWB_MODE, awbMode);
> +
> + uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
> + requestTemplate->addEntry(ANDROID_CONTROL_AWB_LOCK, awbLock);
> +
> + uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
> + requestTemplate->addEntry(ANDROID_FLASH_MODE, flashMode);
> +
> + uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
> + requestTemplate->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE,
> + faceDetectMode);
> +
> + uint8_t noiseReduction = ANDROID_NOISE_REDUCTION_MODE_OFF;
> + requestTemplate->addEntry(ANDROID_NOISE_REDUCTION_MODE,
> + noiseReduction);
> +
> + uint8_t aberrationMode =
> ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
> + requestTemplate->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
> + aberrationMode);
> +
> + uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
> + requestTemplate->addEntry(ANDROID_CONTROL_MODE, controlMode);
> +
> + float lensAperture = 2.53 / 100;
> + requestTemplate->addEntry(ANDROID_LENS_APERTURE, lensAperture);
> +
> + uint8_t opticalStabilization =
> ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
> + requestTemplate->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
> + opticalStabilization);
> +
> + uint8_t captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
> + requestTemplate->addEntry(ANDROID_CONTROL_CAPTURE_INTENT,
> + captureIntent);
> +
> + return requestTemplate;
> +}
> +
> +std::unique_ptr<CameraMetadata>
> CameraCapabilities::requestTemplateVideo() const
> +{
> + std::unique_ptr<CameraMetadata> previewTemplate =
> requestTemplatePreview();
> + if (!previewTemplate)
> + return nullptr;
> +
> + /*
> + * The video template requires a fixed FPS range. Everything else
> + * stays the same as the preview template.
> + */
> + camera_metadata_ro_entry_t entry;
> +
> staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
> + &entry);
> +
> + /*
> + * Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata
> + * has been assembled as {{min, max} {max, max}}.
> + */
> + previewTemplate->updateEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
> + entry.data.i32 + 2, 2);
> +
> + return previewTemplate;
> +}
> diff --git a/src/android/camera_capabilities.h
> b/src/android/camera_capabilities.h
> new file mode 100644
> index 000000000000..f511607bbd90
> --- /dev/null
> +++ b/src/android/camera_capabilities.h
> @@ -0,0 +1,65 @@
> +/* SPDX-License-Identifier: LGPL-2.1-or-later */
> +/*
> + * Copyright (C) 2021, Google Inc.
> + *
> + * camera_capabilities.h - Camera static properties manager
> + */
> +#ifndef __ANDROID_CAMERA_CAPABILITIES_H__
> +#define __ANDROID_CAMERA_CAPABILITIES_H__
> +
> +#include <map>
> +#include <memory>
> +#include <vector>
> +
> +#include <libcamera/camera.h>
> +#include <libcamera/class.h>
> +#include <libcamera/formats.h>
> +#include <libcamera/geometry.h>
> +
> +#include "camera_metadata.h"
> +
> +class CameraCapabilities
> +{
> +public:
> + CameraCapabilities() = default;
> +
> + int initialize(std::shared_ptr<libcamera::Camera> camera,
> + int orientation, int facing);
> +
> + CameraMetadata *staticMetadata() const { return
> staticMetadata_.get(); }
> + libcamera::PixelFormat toPixelFormat(int format) const;
> + unsigned int maxJpegBufferSize() const { return
> maxJpegBufferSize_; }
> +
> + std::unique_ptr<CameraMetadata> requestTemplatePreview() const;
> + std::unique_ptr<CameraMetadata> requestTemplateVideo() const;
> +
> +private:
> + LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraCapabilities)
> +
> + struct Camera3StreamConfiguration {
> + libcamera::Size resolution;
> + int androidFormat;
> + };
> +
> + std::vector<libcamera::Size>
> + getYUVResolutions(libcamera::CameraConfiguration *cameraConfig,
> + const libcamera::PixelFormat &pixelFormat,
> + const std::vector<libcamera::Size> &resolutions);
> + std::vector<libcamera::Size>
> + getRawResolutions(const libcamera::PixelFormat &pixelFormat);
> + int initializeStreamConfigurations();
> +
> + int initializeStaticMetadata();
> +
> + std::shared_ptr<libcamera::Camera> camera_;
> +
> + int facing_;
> + int orientation_;
> +
> + std::vector<Camera3StreamConfiguration> streamConfigurations_;
> + std::map<int, libcamera::PixelFormat> formatsMap_;
> + std::unique_ptr<CameraMetadata> staticMetadata_;
> + unsigned int maxJpegBufferSize_;
> +};
> +
> +#endif /* __ANDROID_CAMERA_CAPABILITIES_H__ */
> diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp
> index 8c71fd0675d3..4bd125d7020a 100644
> --- a/src/android/camera_device.cpp
> +++ b/src/android/camera_device.cpp
> @@ -10,11 +10,8 @@
> #include "camera_ops.h"
> #include "post_processor.h"
>
> -#include <array>
> -#include <cmath>
> #include <fstream>
> #include <sys/mman.h>
> -#include <tuple>
> #include <unistd.h>
> #include <vector>
>
> @@ -23,7 +20,6 @@
> #include <libcamera/formats.h>
> #include <libcamera/property_ids.h>
>
> -#include "libcamera/internal/formats.h"
> #include "libcamera/internal/log.h"
> #include "libcamera/internal/thread.h"
> #include "libcamera/internal/utils.h"
> @@ -36,94 +32,6 @@ LOG_DECLARE_CATEGORY(HAL)
>
> namespace {
>
> -/*
> - * \var camera3Resolutions
> - * \brief The list of image resolutions defined as mandatory to be
> supported by
> - * the Android Camera3 specification
> - */
> -const std::vector<Size> camera3Resolutions = {
> - { 320, 240 },
> - { 640, 480 },
> - { 1280, 720 },
> - { 1920, 1080 }
> -};
> -
> -/*
> - * \struct Camera3Format
> - * \brief Data associated with an Android format identifier
> - * \var libcameraFormats List of libcamera pixel formats compatible with
> the
> - * Android format
> - * \var name The human-readable representation of the Android format code
> - */
> -struct Camera3Format {
> - std::vector<PixelFormat> libcameraFormats;
> - bool mandatory;
> - const char *name;
> -};
> -
> -/*
> - * \var camera3FormatsMap
> - * \brief Associate Android format code with ancillary data
> - */
> -const std::map<int, const Camera3Format> camera3FormatsMap = {
> - {
> - HAL_PIXEL_FORMAT_BLOB, {
> - { formats::MJPEG },
> - true,
> - "BLOB"
> - }
> - }, {
> - HAL_PIXEL_FORMAT_YCbCr_420_888, {
> - { formats::NV12, formats::NV21 },
> - true,
> - "YCbCr_420_888"
> - }
> - }, {
> - /*
> - * \todo Translate IMPLEMENTATION_DEFINED inspecting the
> gralloc
> - * usage flag. For now, copy the YCbCr_420 configuration.
> - */
> - HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, {
> - { formats::NV12, formats::NV21 },
> - true,
> - "IMPLEMENTATION_DEFINED"
> - }
> - }, {
> - HAL_PIXEL_FORMAT_RAW10, {
> - {
> - formats::SBGGR10_CSI2P,
> - formats::SGBRG10_CSI2P,
> - formats::SGRBG10_CSI2P,
> - formats::SRGGB10_CSI2P
> - },
> - false,
> - "RAW10"
> - }
> - }, {
> - HAL_PIXEL_FORMAT_RAW12, {
> - {
> - formats::SBGGR12_CSI2P,
> - formats::SGBRG12_CSI2P,
> - formats::SGRBG12_CSI2P,
> - formats::SRGGB12_CSI2P
> - },
> - false,
> - "RAW12"
> - }
> - }, {
> - HAL_PIXEL_FORMAT_RAW16, {
> - {
> - formats::SBGGR16,
> - formats::SGBRG16,
> - formats::SGRBG16,
> - formats::SRGGB16
> - },
> - false,
> - "RAW16"
> - }
> - },
> -};
> -
> /*
> * \struct Camera3StreamConfig
> * \brief Data to store StreamConfiguration associated with
> camera3_stream(s)
> @@ -512,242 +420,7 @@ int CameraDevice::initialize(const CameraConfigData
> *cameraConfigData)
> orientation_ = 0;
> }
>
> - /* Acquire the camera and initialize available stream
> configurations. */
> - int ret = camera_->acquire();
> - if (ret) {
> - LOG(HAL, Error) << "Failed to temporarily acquire the
> camera";
> - return ret;
> - }
> -
> - ret = initializeStreamConfigurations();
> - camera_->release();
> - return ret;
> -}
> -
> -std::vector<Size> CameraDevice::getYUVResolutions(CameraConfiguration
> *cameraConfig,
> - const PixelFormat
> &pixelFormat,
> - const std::vector<Size>
> &resolutions)
> -{
> - std::vector<Size> supportedResolutions;
> -
> - StreamConfiguration &cfg = cameraConfig->at(0);
> - for (const Size &res : resolutions) {
> - cfg.pixelFormat = pixelFormat;
> - cfg.size = res;
> -
> - CameraConfiguration::Status status =
> cameraConfig->validate();
> - if (status != CameraConfiguration::Valid) {
> - LOG(HAL, Debug) << cfg.toString() << " not
> supported";
> - continue;
> - }
> -
> - LOG(HAL, Debug) << cfg.toString() << " supported";
> -
> - supportedResolutions.push_back(res);
> - }
> -
> - return supportedResolutions;
> -}
> -
> -std::vector<Size> CameraDevice::getRawResolutions(const
> libcamera::PixelFormat &pixelFormat)
> -{
> - std::unique_ptr<CameraConfiguration> cameraConfig =
> - camera_->generateConfiguration({ StreamRole::Raw });
> - StreamConfiguration &cfg = cameraConfig->at(0);
> - const StreamFormats &formats = cfg.formats();
> - std::vector<Size> supportedResolutions =
> formats.sizes(pixelFormat);
> -
> - return supportedResolutions;
> -}
> -
> -/*
> - * Initialize the format conversion map to translate from Android format
> - * identifier to libcamera pixel formats and fill in the list of supported
> - * stream configurations to be reported to the Android camera framework
> through
> - * the static stream configuration metadata.
> - */
> -int CameraDevice::initializeStreamConfigurations()
> -{
> - /*
> - * Get the maximum output resolutions
> - * \todo Get this from the camera properties once defined
> - */
> - std::unique_ptr<CameraConfiguration> cameraConfig =
> - camera_->generateConfiguration({ StillCapture });
> - if (!cameraConfig) {
> - LOG(HAL, Error) << "Failed to get maximum resolution";
> - return -EINVAL;
> - }
> - StreamConfiguration &cfg = cameraConfig->at(0);
> -
> - /*
> - * \todo JPEG - Adjust the maximum available resolution by taking
> the
> - * JPEG encoder requirements into account (alignment and aspect
> ratio).
> - */
> - const Size maxRes = cfg.size;
> - LOG(HAL, Debug) << "Maximum supported resolution: " <<
> maxRes.toString();
> -
> - /*
> - * Build the list of supported image resolutions.
> - *
> - * The resolutions listed in camera3Resolution are mandatory to be
> - * supported, up to the camera maximum resolution.
> - *
> - * Augment the list by adding resolutions calculated from the
> camera
> - * maximum one.
> - */
> - std::vector<Size> cameraResolutions;
> - std::copy_if(camera3Resolutions.begin(), camera3Resolutions.end(),
> - std::back_inserter(cameraResolutions),
> - [&](const Size &res) { return res < maxRes; });
> -
> - /*
> - * The Camera3 specification suggests adding 1/2 and 1/4 of the
> maximum
> - * resolution.
> - */
> - for (unsigned int divider = 2;; divider <<= 1) {
> - Size derivedSize{
> - maxRes.width / divider,
> - maxRes.height / divider,
> - };
> -
> - if (derivedSize.width < 320 ||
> - derivedSize.height < 240)
> - break;
> -
> - cameraResolutions.push_back(derivedSize);
> - }
> - cameraResolutions.push_back(maxRes);
> -
> - /* Remove duplicated entries from the list of supported
> resolutions. */
> - std::sort(cameraResolutions.begin(), cameraResolutions.end());
> - auto last = std::unique(cameraResolutions.begin(),
> cameraResolutions.end());
> - cameraResolutions.erase(last, cameraResolutions.end());
> -
> - /*
> - * Build the list of supported camera formats.
> - *
> - * To each Android format a list of compatible libcamera formats is
> - * associated. The first libcamera format that tests successful is
> added
> - * to the format translation map used when configuring the streams.
> - * It is then tested against the list of supported camera
> resolutions to
> - * build the stream configuration map reported through the camera
> static
> - * metadata.
> - */
> - Size maxJpegSize;
> - for (const auto &format : camera3FormatsMap) {
> - int androidFormat = format.first;
> - const Camera3Format &camera3Format = format.second;
> - const std::vector<PixelFormat> &libcameraFormats =
> - camera3Format.libcameraFormats;
> -
> - LOG(HAL, Debug) << "Trying to map Android format "
> - << camera3Format.name;
> -
> - /*
> - * JPEG is always supported, either produced directly by
> the
> - * camera, or encoded in the HAL.
> - */
> - if (androidFormat == HAL_PIXEL_FORMAT_BLOB) {
> - formatsMap_[androidFormat] = formats::MJPEG;
> - LOG(HAL, Debug) << "Mapped Android format "
> - << camera3Format.name << " to "
> - << formats::MJPEG.toString()
> - << " (fixed mapping)";
> - continue;
> - }
> -
> - /*
> - * Test the libcamera formats that can produce images
> - * compatible with the format defined by Android.
> - */
> - PixelFormat mappedFormat;
> - for (const PixelFormat &pixelFormat : libcameraFormats) {
> -
> - LOG(HAL, Debug) << "Testing " <<
> pixelFormat.toString();
> -
> - /*
> - * The stream configuration size can be adjusted,
> - * not the pixel format.
> - *
> - * \todo This could be simplified once all pipeline
> - * handlers will report the StreamFormats list of
> - * supported formats.
> - */
> - cfg.pixelFormat = pixelFormat;
> -
> - CameraConfiguration::Status status =
> cameraConfig->validate();
> - if (status != CameraConfiguration::Invalid &&
> - cfg.pixelFormat == pixelFormat) {
> - mappedFormat = pixelFormat;
> - break;
> - }
> - }
> -
> - if (!mappedFormat.isValid()) {
> - /* If the format is not mandatory, skip it. */
> - if (!camera3Format.mandatory)
> - continue;
> -
> - LOG(HAL, Error)
> - << "Failed to map mandatory Android format
> "
> - << camera3Format.name << " ("
> - << utils::hex(androidFormat) << "):
> aborting";
> - return -EINVAL;
> - }
> -
> - /*
> - * Record the mapping and then proceed to generate the
> - * stream configurations map, by testing the image
> resolutions.
> - */
> - formatsMap_[androidFormat] = mappedFormat;
> - LOG(HAL, Debug) << "Mapped Android format "
> - << camera3Format.name << " to "
> - << mappedFormat.toString();
> -
> - std::vector<Size> resolutions;
> - const PixelFormatInfo &info =
> PixelFormatInfo::info(mappedFormat);
> - if (info.colourEncoding ==
> PixelFormatInfo::ColourEncodingRAW)
> - resolutions = getRawResolutions(mappedFormat);
> - else
> - resolutions = getYUVResolutions(cameraConfig.get(),
> - mappedFormat,
> - cameraResolutions);
> -
> - for (const Size &res : resolutions) {
> - streamConfigurations_.push_back({ res,
> androidFormat });
> -
> - /*
> - * If the format is HAL_PIXEL_FORMAT_YCbCr_420_888
> - * from which JPEG is produced, add an entry for
> - * the JPEG stream.
> - *
> - * \todo Wire the JPEG encoder to query the
> supported
> - * sizes provided a list of formats it can encode.
> - *
> - * \todo Support JPEG streams produced by the
> Camera
> - * natively.
> - */
> - if (androidFormat ==
> HAL_PIXEL_FORMAT_YCbCr_420_888) {
> - streamConfigurations_.push_back(
> - { res, HAL_PIXEL_FORMAT_BLOB });
> - maxJpegSize = std::max(maxJpegSize, res);
> - }
> - }
> -
> - /*
> - * \todo Calculate the maximum JPEG buffer size by asking
> the
> - * encoder giving the maximum frame size required.
> - */
> - maxJpegBufferSize_ = maxJpegSize.width *
> maxJpegSize.height * 1.5;
> - }
> -
> - LOG(HAL, Debug) << "Collected stream configuration map: ";
> - for (const auto &entry : streamConfigurations_)
> - LOG(HAL, Debug) << "{ " << entry.resolution.toString() <<
> " - "
> - << utils::hex(entry.androidFormat) << " }";
> -
> - return 0;
> + return capabilities_.initialize(camera_, orientation_, facing_);
> }
>
> /*
> @@ -817,802 +490,19 @@ void CameraDevice::stop()
> state_ = State::Stopped;
> }
>
> -void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks)
> +unsigned int CameraDevice::maxJpegBufferSize() const
> {
> - callbacks_ = callbacks;
> + return capabilities_.maxJpegBufferSize();
> }
>
> -/*
> - * Return static information for the camera.
> - */
> -const camera_metadata_t *CameraDevice::getStaticMetadata()
> -{
> - if (staticMetadata_)
> - return staticMetadata_->get();
> -
> - staticMetadata_ = std::make_unique<CameraMetadata>(64, 1024);
> - if (!staticMetadata_->isValid()) {
> - LOG(HAL, Error) << "Failed to allocate static metadata";
> - staticMetadata_.reset();
> - return nullptr;
> - }
> -
> - const ControlInfoMap &controlsInfo = camera_->controls();
> - const ControlList &properties = camera_->properties();
> -
> - /* Color correction static metadata. */
> - {
> - std::vector<uint8_t> data;
> - data.reserve(3);
> - const auto &infoMap =
> controlsInfo.find(&controls::draft::ColorCorrectionAberrationMode);
> - if (infoMap != controlsInfo.end()) {
> - for (const auto &value : infoMap->second.values())
> - data.push_back(value.get<int32_t>());
> - } else {
> -
> data.push_back(ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF);
> - }
> -
> staticMetadata_->addEntry(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
> - data);
> - }
> -
> - /* Control static metadata. */
> - std::vector<uint8_t> aeAvailableAntiBandingModes = {
> - ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,
> - ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,
> - ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,
> - ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,
> - };
> -
> staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
> - aeAvailableAntiBandingModes);
> -
> - std::vector<uint8_t> aeAvailableModes = {
> - ANDROID_CONTROL_AE_MODE_ON,
> - };
> - staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_MODES,
> - aeAvailableModes);
> -
> - int64_t minFrameDurationNsec = -1;
> - int64_t maxFrameDurationNsec = -1;
> - const auto frameDurationsInfo =
> controlsInfo.find(&controls::FrameDurationLimits);
> - if (frameDurationsInfo != controlsInfo.end()) {
> - minFrameDurationNsec =
> frameDurationsInfo->second.min().get<int64_t>() * 1000;
> - maxFrameDurationNsec =
> frameDurationsInfo->second.max().get<int64_t>() * 1000;
> -
> - /*
> - * Adjust the minimum frame duration to comply with Android
> - * requirements. The camera service mandates all
> preview/record
> - * streams to have a minimum frame duration < 33,366
> milliseconds
> - * (see MAX_PREVIEW_RECORD_DURATION_NS in the camera
> service
> - * implementation).
> - *
> - * If we're close enough (+ 500 useconds) to that value,
> round
> - * the minimum frame duration of the camera to an accepted
> - * value.
> - */
> - static constexpr int64_t MAX_PREVIEW_RECORD_DURATION_NS =
> 1e9 / 29.97;
> - if (minFrameDurationNsec > MAX_PREVIEW_RECORD_DURATION_NS
> &&
> - minFrameDurationNsec < MAX_PREVIEW_RECORD_DURATION_NS
> + 500000)
> - minFrameDurationNsec =
> MAX_PREVIEW_RECORD_DURATION_NS - 1000;
> -
> - /*
> - * The AE routine frame rate limits are computed using the
> frame
> - * duration limits, as libcamera clips the AE routine to
> the
> - * frame durations.
> - */
> - int32_t maxFps = std::round(1e9 / minFrameDurationNsec);
> - int32_t minFps = std::round(1e9 / maxFrameDurationNsec);
> - minFps = std::max(1, minFps);
> -
> - /*
> - * Force rounding errors so that we have the proper frame
> - * durations for when we reuse these variables later
> - */
> - minFrameDurationNsec = 1e9 / maxFps;
> - maxFrameDurationNsec = 1e9 / minFps;
> -
> - /*
> - * Register to the camera service {min, max} and {max, max}
> - * intervals as requested by the metadata documentation.
> - */
> - int32_t availableAeFpsTarget[] = {
> - minFps, maxFps, maxFps, maxFps
> - };
> -
> staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
> - availableAeFpsTarget);
> - }
> -
> - std::vector<int32_t> aeCompensationRange = {
> - 0, 0,
> - };
> - staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
> - aeCompensationRange);
> -
> - const camera_metadata_rational_t aeCompensationStep[] = {
> - { 0, 1 }
> - };
> - staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_STEP,
> - aeCompensationStep);
> -
> - std::vector<uint8_t> availableAfModes = {
> - ANDROID_CONTROL_AF_MODE_OFF,
> - };
> - staticMetadata_->addEntry(ANDROID_CONTROL_AF_AVAILABLE_MODES,
> - availableAfModes);
> -
> - std::vector<uint8_t> availableEffects = {
> - ANDROID_CONTROL_EFFECT_MODE_OFF,
> - };
> - staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_EFFECTS,
> - availableEffects);
> -
> - std::vector<uint8_t> availableSceneModes = {
> - ANDROID_CONTROL_SCENE_MODE_DISABLED,
> - };
> - staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
> - availableSceneModes);
> -
> - std::vector<uint8_t> availableStabilizationModes = {
> - ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,
> - };
> -
> staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
> - availableStabilizationModes);
> -
> - /*
> - * \todo Inspect the Camera capabilities to report the available
> - * AWB modes. Default to AUTO as CTS tests require it.
> - */
> - std::vector<uint8_t> availableAwbModes = {
> - ANDROID_CONTROL_AWB_MODE_AUTO,
> - };
> - staticMetadata_->addEntry(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
> - availableAwbModes);
> -
> - std::vector<int32_t> availableMaxRegions = {
> - 0, 0, 0,
> - };
> - staticMetadata_->addEntry(ANDROID_CONTROL_MAX_REGIONS,
> - availableMaxRegions);
> -
> - std::vector<uint8_t> sceneModesOverride = {
> - ANDROID_CONTROL_AE_MODE_ON,
> - ANDROID_CONTROL_AWB_MODE_AUTO,
> - ANDROID_CONTROL_AF_MODE_OFF,
> - };
> - staticMetadata_->addEntry(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
> - sceneModesOverride);
> -
> - uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
> - staticMetadata_->addEntry(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
> - aeLockAvailable);
> -
> - uint8_t awbLockAvailable =
> ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
> - staticMetadata_->addEntry(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
> - awbLockAvailable);
> -
> - char availableControlModes = ANDROID_CONTROL_MODE_AUTO;
> - staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_MODES,
> - availableControlModes);
> -
> - /* JPEG static metadata. */
> -
> - /*
> - * Create the list of supported thumbnail sizes by inspecting the
> - * available JPEG resolutions collected in streamConfigurations_
> and
> - * generate one entry for each aspect ratio.
> - *
> - * The JPEG thumbnailer can freely scale, so pick an arbitrary
> - * (160, 160) size as the bounding rectangle, which is then
> cropped to
> - * the different supported aspect ratios.
> - */
> - constexpr Size maxJpegThumbnail(160, 160);
> - std::vector<Size> thumbnailSizes;
> - thumbnailSizes.push_back({ 0, 0 });
> - for (const auto &entry : streamConfigurations_) {
> - if (entry.androidFormat != HAL_PIXEL_FORMAT_BLOB)
> - continue;
> -
> - Size thumbnailSize = maxJpegThumbnail
> - .boundedToAspectRatio({
> entry.resolution.width,
> -
> entry.resolution.height });
> - thumbnailSizes.push_back(thumbnailSize);
> - }
> -
> - std::sort(thumbnailSizes.begin(), thumbnailSizes.end());
> - auto last = std::unique(thumbnailSizes.begin(),
> thumbnailSizes.end());
> - thumbnailSizes.erase(last, thumbnailSizes.end());
> -
> - /* Transform sizes in to a list of integers that can be consumed.
> */
> - std::vector<int32_t> thumbnailEntries;
> - thumbnailEntries.reserve(thumbnailSizes.size() * 2);
> - for (const auto &size : thumbnailSizes) {
> - thumbnailEntries.push_back(size.width);
> - thumbnailEntries.push_back(size.height);
> - }
> - staticMetadata_->addEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
> - thumbnailEntries);
> -
> - staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE,
> maxJpegBufferSize_);
> -
> - /* Sensor static metadata. */
> - std::array<int32_t, 2> pixelArraySize;
> - {
> - const Size &size =
> properties.get(properties::PixelArraySize);
> - pixelArraySize[0] = size.width;
> - pixelArraySize[1] = size.height;
> -
> staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
> - pixelArraySize);
> - }
> -
> - if (properties.contains(properties::UnitCellSize)) {
> - const Size &cellSize =
> properties.get<Size>(properties::UnitCellSize);
> - std::array<float, 2> physicalSize{
> - cellSize.width * pixelArraySize[0] / 1e6f,
> - cellSize.height * pixelArraySize[1] / 1e6f
> - };
> -
> staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
> - physicalSize);
> - }
> -
> - {
> - const Span<const Rectangle> &rects =
> - properties.get(properties::PixelArrayActiveAreas);
> - std::vector<int32_t> data{
> - static_cast<int32_t>(rects[0].x),
> - static_cast<int32_t>(rects[0].y),
> - static_cast<int32_t>(rects[0].width),
> - static_cast<int32_t>(rects[0].height),
> - };
> -
> staticMetadata_->addEntry(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
> - data);
> - }
> -
> - int32_t sensitivityRange[] = {
> - 32, 2400,
> - };
> - staticMetadata_->addEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
> - sensitivityRange);
> -
> - /* Report the color filter arrangement if the camera reports it. */
> - if
> (properties.contains(properties::draft::ColorFilterArrangement)) {
> - uint8_t filterArr =
> properties.get(properties::draft::ColorFilterArrangement);
> -
> staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
> - filterArr);
> - }
> -
> - const auto &exposureInfo =
> controlsInfo.find(&controls::ExposureTime);
> - if (exposureInfo != controlsInfo.end()) {
> - int64_t exposureTimeRange[2] = {
> - exposureInfo->second.min().get<int32_t>() * 1000LL,
> - exposureInfo->second.max().get<int32_t>() * 1000LL,
> - };
> -
> staticMetadata_->addEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
> - exposureTimeRange, 2);
> - }
> -
> - staticMetadata_->addEntry(ANDROID_SENSOR_ORIENTATION,
> orientation_);
> -
> - std::vector<int32_t> testPatternModes = {
> - ANDROID_SENSOR_TEST_PATTERN_MODE_OFF
> - };
> - const auto &testPatternsInfo =
> - controlsInfo.find(&controls::draft::TestPatternMode);
> - if (testPatternsInfo != controlsInfo.end()) {
> - const auto &values = testPatternsInfo->second.values();
> - ASSERT(!values.empty());
> - for (const auto &value : values) {
> - switch (value.get<int32_t>()) {
> - case controls::draft::TestPatternModeOff:
> - /*
> - * ANDROID_SENSOR_TEST_PATTERN_MODE_OFF is
> - * already in testPatternModes.
> - */
> - break;
> -
> - case controls::draft::TestPatternModeSolidColor:
> - testPatternModes.push_back(
> -
> ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR);
> - break;
> -
> - case controls::draft::TestPatternModeColorBars:
> - testPatternModes.push_back(
> -
> ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS);
> - break;
> -
> - case
> controls::draft::TestPatternModeColorBarsFadeToGray:
> - testPatternModes.push_back(
> -
> ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY);
> - break;
> -
> - case controls::draft::TestPatternModePn9:
> - testPatternModes.push_back(
> -
> ANDROID_SENSOR_TEST_PATTERN_MODE_PN9);
> - break;
> -
> - case controls::draft::TestPatternModeCustom1:
> - /* We don't support this yet. */
> - break;
> -
> - default:
> - LOG(HAL, Error) << "Unknown test pattern
> mode: "
> - << value.get<int32_t>();
> - continue;
> - }
> - }
> - }
> -
> staticMetadata_->addEntry(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
> - testPatternModes);
> -
> - uint8_t timestampSource =
> ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
> - staticMetadata_->addEntry(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
> - timestampSource);
> -
> - if (maxFrameDurationNsec > 0)
> -
> staticMetadata_->addEntry(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
> - maxFrameDurationNsec);
> -
> - /* Statistics static metadata. */
> - uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
> -
> staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
> - faceDetectMode);
> -
> - int32_t maxFaceCount = 0;
> - staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
> - maxFaceCount);
> -
> - {
> - std::vector<uint8_t> data;
> - data.reserve(2);
> - const auto &infoMap =
> controlsInfo.find(&controls::draft::LensShadingMapMode);
> - if (infoMap != controlsInfo.end()) {
> - for (const auto &value : infoMap->second.values())
> - data.push_back(value.get<int32_t>());
> - } else {
> -
> data.push_back(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF);
> - }
> -
> staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
> - data);
> - }
> -
> - /* Sync static metadata. */
> - int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;
> - staticMetadata_->addEntry(ANDROID_SYNC_MAX_LATENCY, maxLatency);
> -
> - /* Flash static metadata. */
> - char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
> - staticMetadata_->addEntry(ANDROID_FLASH_INFO_AVAILABLE,
> - flashAvailable);
> -
> - /* Lens static metadata. */
> - std::vector<float> lensApertures = {
> - 2.53 / 100,
> - };
> - staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
> - lensApertures);
> -
> - uint8_t lensFacing;
> - switch (facing_) {
> - default:
> - case CAMERA_FACING_FRONT:
> - lensFacing = ANDROID_LENS_FACING_FRONT;
> - break;
> - case CAMERA_FACING_BACK:
> - lensFacing = ANDROID_LENS_FACING_BACK;
> - break;
> - case CAMERA_FACING_EXTERNAL:
> - lensFacing = ANDROID_LENS_FACING_EXTERNAL;
> - break;
> - }
> - staticMetadata_->addEntry(ANDROID_LENS_FACING, lensFacing);
> -
> - std::vector<float> lensFocalLengths = {
> - 1,
> - };
> -
> staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
> - lensFocalLengths);
> -
> - std::vector<uint8_t> opticalStabilizations = {
> - ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,
> - };
> -
> staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
> - opticalStabilizations);
> -
> - float hypeFocalDistance = 0;
> - staticMetadata_->addEntry(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
> - hypeFocalDistance);
> -
> - float minFocusDistance = 0;
> - staticMetadata_->addEntry(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
> - minFocusDistance);
> -
> - /* Noise reduction modes. */
> - {
> - std::vector<uint8_t> data;
> - data.reserve(5);
> - const auto &infoMap =
> controlsInfo.find(&controls::draft::NoiseReductionMode);
> - if (infoMap != controlsInfo.end()) {
> - for (const auto &value : infoMap->second.values())
> - data.push_back(value.get<int32_t>());
> - } else {
> - data.push_back(ANDROID_NOISE_REDUCTION_MODE_OFF);
> - }
> -
> staticMetadata_->addEntry(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
> - data);
> - }
> -
> - /* Scaler static metadata. */
> -
> - /*
> - * \todo The digital zoom factor is a property that depends on the
> - * desired output configuration and the sensor frame size input to
> the
> - * ISP. This information is not available to the Android HAL, not
> at
> - * initialization time at least.
> - *
> - * As a workaround rely on pipeline handlers initializing the
> - * ScalerCrop control with the camera default configuration and
> use the
> - * maximum and minimum crop rectangles to calculate the digital
> zoom
> - * factor.
> - */
> - float maxZoom = 1.0f;
> - const auto scalerCrop = controlsInfo.find(&controls::ScalerCrop);
> - if (scalerCrop != controlsInfo.end()) {
> - Rectangle min = scalerCrop->second.min().get<Rectangle>();
> - Rectangle max = scalerCrop->second.max().get<Rectangle>();
> - maxZoom = std::min(1.0f * max.width / min.width,
> - 1.0f * max.height / min.height);
> - }
> -
> staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
> - maxZoom);
> -
> - std::vector<uint32_t> availableStreamConfigurations;
> - availableStreamConfigurations.reserve(streamConfigurations_.size()
> * 4);
> - for (const auto &entry : streamConfigurations_) {
> -
> availableStreamConfigurations.push_back(entry.androidFormat);
> -
> availableStreamConfigurations.push_back(entry.resolution.width);
> -
> availableStreamConfigurations.push_back(entry.resolution.height);
> - availableStreamConfigurations.push_back(
> -
> ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
> - }
> -
> staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
> - availableStreamConfigurations);
> -
> - std::vector<int64_t> availableStallDurations = {
> - ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920,
> 33333333,
> - };
> - staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
> - availableStallDurations);
> -
> - /* Use the minimum frame duration for all the YUV/RGB formats. */
> - if (minFrameDurationNsec > 0) {
> - std::vector<int64_t> minFrameDurations;
> - minFrameDurations.reserve(streamConfigurations_.size() *
> 4);
> - for (const auto &entry : streamConfigurations_) {
> - minFrameDurations.push_back(entry.androidFormat);
> -
> minFrameDurations.push_back(entry.resolution.width);
> -
> minFrameDurations.push_back(entry.resolution.height);
> - minFrameDurations.push_back(minFrameDurationNsec);
> - }
> -
> staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
> - minFrameDurations);
> - }
> -
> - uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
> - staticMetadata_->addEntry(ANDROID_SCALER_CROPPING_TYPE,
> croppingType);
> -
> - /* Info static metadata. */
> - uint8_t supportedHWLevel =
> ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;
> - staticMetadata_->addEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
> - supportedHWLevel);
> -
> - /* Request static metadata. */
> - int32_t partialResultCount = 1;
> - staticMetadata_->addEntry(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
> - partialResultCount);
> -
> - {
> - /* Default the value to 2 if not reported by the camera. */
> - uint8_t maxPipelineDepth = 2;
> - const auto &infoMap =
> controlsInfo.find(&controls::draft::PipelineDepth);
> - if (infoMap != controlsInfo.end())
> - maxPipelineDepth =
> infoMap->second.max().get<int32_t>();
> -
> staticMetadata_->addEntry(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
> - maxPipelineDepth);
> - }
> -
> - /* LIMITED does not support reprocessing. */
> - uint32_t maxNumInputStreams = 0;
> - staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
> - maxNumInputStreams);
> -
> - std::vector<uint8_t> availableCapabilities = {
> - ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,
> - };
> -
> - /* Report if camera supports RAW. */
> - bool rawStreamAvailable = false;
> - std::unique_ptr<CameraConfiguration> cameraConfig =
> - camera_->generateConfiguration({ StreamRole::Raw });
> - if (cameraConfig && !cameraConfig->empty()) {
> - const PixelFormatInfo &info =
> -
> PixelFormatInfo::info(cameraConfig->at(0).pixelFormat);
> - /* Only advertise RAW support if RAW16 is possible. */
> - if (info.colourEncoding ==
> PixelFormatInfo::ColourEncodingRAW &&
> - info.bitsPerPixel == 16) {
> - rawStreamAvailable = true;
> -
> availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
> - }
> - }
> -
> - /* Number of { RAW, YUV, JPEG } supported output streams */
> - int32_t numOutStreams[] = { rawStreamAvailable, 2, 1 };
> - staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
> - numOutStreams);
> -
> - staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
> - availableCapabilities);
> -
> - std::vector<int32_t> availableCharacteristicsKeys = {
> - ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
> - ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
> - ANDROID_CONTROL_AE_AVAILABLE_MODES,
> - ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
> - ANDROID_CONTROL_AE_COMPENSATION_RANGE,
> - ANDROID_CONTROL_AE_COMPENSATION_STEP,
> - ANDROID_CONTROL_AE_LOCK_AVAILABLE,
> - ANDROID_CONTROL_AF_AVAILABLE_MODES,
> - ANDROID_CONTROL_AVAILABLE_EFFECTS,
> - ANDROID_CONTROL_AVAILABLE_MODES,
> - ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
> - ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
> - ANDROID_CONTROL_AWB_AVAILABLE_MODES,
> - ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
> - ANDROID_CONTROL_MAX_REGIONS,
> - ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
> - ANDROID_FLASH_INFO_AVAILABLE,
> - ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
> - ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
> - ANDROID_JPEG_MAX_SIZE,
> - ANDROID_LENS_FACING,
> - ANDROID_LENS_INFO_AVAILABLE_APERTURES,
> - ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
> - ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
> - ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
> - ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
> - ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
> - ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
> - ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
> - ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
> - ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
> - ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
> - ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
> - ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
> - ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
> - ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
> - ANDROID_SCALER_CROPPING_TYPE,
> - ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
> - ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
> - ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
> - ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
> - ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
> - ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
> - ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
> - ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
> - ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
> - ANDROID_SENSOR_ORIENTATION,
> - ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
> - ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
> - ANDROID_SYNC_MAX_LATENCY,
> - };
> -
> staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
> - availableCharacteristicsKeys);
> -
> - std::vector<int32_t> availableRequestKeys = {
> - ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
> - ANDROID_CONTROL_AE_ANTIBANDING_MODE,
> - ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
> - ANDROID_CONTROL_AE_LOCK,
> - ANDROID_CONTROL_AE_MODE,
> - ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
> - ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
> - ANDROID_CONTROL_AF_MODE,
> - ANDROID_CONTROL_AF_TRIGGER,
> - ANDROID_CONTROL_AWB_LOCK,
> - ANDROID_CONTROL_AWB_MODE,
> - ANDROID_CONTROL_CAPTURE_INTENT,
> - ANDROID_CONTROL_EFFECT_MODE,
> - ANDROID_CONTROL_MODE,
> - ANDROID_CONTROL_SCENE_MODE,
> - ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
> - ANDROID_FLASH_MODE,
> - ANDROID_JPEG_ORIENTATION,
> - ANDROID_JPEG_QUALITY,
> - ANDROID_JPEG_THUMBNAIL_QUALITY,
> - ANDROID_JPEG_THUMBNAIL_SIZE,
> - ANDROID_LENS_APERTURE,
> - ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
> - ANDROID_NOISE_REDUCTION_MODE,
> - ANDROID_SCALER_CROP_REGION,
> - ANDROID_STATISTICS_FACE_DETECT_MODE
> - };
> - staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
> - availableRequestKeys);
> -
> - std::vector<int32_t> availableResultKeys = {
> - ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
> - ANDROID_CONTROL_AE_ANTIBANDING_MODE,
> - ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
> - ANDROID_CONTROL_AE_LOCK,
> - ANDROID_CONTROL_AE_MODE,
> - ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
> - ANDROID_CONTROL_AE_STATE,
> - ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
> - ANDROID_CONTROL_AF_MODE,
> - ANDROID_CONTROL_AF_STATE,
> - ANDROID_CONTROL_AF_TRIGGER,
> - ANDROID_CONTROL_AWB_LOCK,
> - ANDROID_CONTROL_AWB_MODE,
> - ANDROID_CONTROL_AWB_STATE,
> - ANDROID_CONTROL_CAPTURE_INTENT,
> - ANDROID_CONTROL_EFFECT_MODE,
> - ANDROID_CONTROL_MODE,
> - ANDROID_CONTROL_SCENE_MODE,
> - ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
> - ANDROID_FLASH_MODE,
> - ANDROID_FLASH_STATE,
> - ANDROID_JPEG_GPS_COORDINATES,
> - ANDROID_JPEG_GPS_PROCESSING_METHOD,
> - ANDROID_JPEG_GPS_TIMESTAMP,
> - ANDROID_JPEG_ORIENTATION,
> - ANDROID_JPEG_QUALITY,
> - ANDROID_JPEG_SIZE,
> - ANDROID_JPEG_THUMBNAIL_QUALITY,
> - ANDROID_JPEG_THUMBNAIL_SIZE,
> - ANDROID_LENS_APERTURE,
> - ANDROID_LENS_FOCAL_LENGTH,
> - ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
> - ANDROID_LENS_STATE,
> - ANDROID_NOISE_REDUCTION_MODE,
> - ANDROID_REQUEST_PIPELINE_DEPTH,
> - ANDROID_SCALER_CROP_REGION,
> - ANDROID_SENSOR_EXPOSURE_TIME,
> - ANDROID_SENSOR_FRAME_DURATION,
> - ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
> - ANDROID_SENSOR_TEST_PATTERN_MODE,
> - ANDROID_SENSOR_TIMESTAMP,
> - ANDROID_STATISTICS_FACE_DETECT_MODE,
> - ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
> - ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
> - ANDROID_STATISTICS_SCENE_FLICKER,
> - };
> - staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
> - availableResultKeys);
> -
> - if (!staticMetadata_->isValid()) {
> - LOG(HAL, Error) << "Failed to construct static metadata";
> - staticMetadata_.reset();
> - return nullptr;
> - }
> -
> - if (staticMetadata_->resized()) {
> - auto [entryCount, dataCount] = staticMetadata_->usage();
> - LOG(HAL, Info)
> - << "Static metadata resized: " << entryCount
> - << " entries and " << dataCount << " bytes used";
> - }
> -
> - return staticMetadata_->get();
> -}
> -
> -std::unique_ptr<CameraMetadata> CameraDevice::requestTemplatePreview()
> +void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks)
> {
> - /*
> - * \todo Keep this in sync with the actual number of entries.
> - * Currently: 20 entries, 35 bytes
> - */
> - auto requestTemplate = std::make_unique<CameraMetadata>(21, 36);
> - if (!requestTemplate->isValid()) {
> - return nullptr;
> - }
> -
> - /* Get the FPS range registered in the static metadata. */
> - camera_metadata_ro_entry_t entry;
> - bool found =
> staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
> - &entry);
> - if (!found) {
> - LOG(HAL, Error) << "Cannot create capture template without
> FPS range";
> - return nullptr;
> - }
> -
> - /*
> - * Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata
> - * has been assembled as {{min, max} {max, max}}.
> - */
> - requestTemplate->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
> - entry.data.i32, 2);
> -
> - uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
> - requestTemplate->addEntry(ANDROID_CONTROL_AE_MODE, aeMode);
> -
> - int32_t aeExposureCompensation = 0;
> - requestTemplate->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
> - aeExposureCompensation);
> -
> - uint8_t aePrecaptureTrigger =
> ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
> - requestTemplate->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
> - aePrecaptureTrigger);
> -
> - uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
> - requestTemplate->addEntry(ANDROID_CONTROL_AE_LOCK, aeLock);
> -
> - uint8_t aeAntibandingMode =
> ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
> - requestTemplate->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE,
> - aeAntibandingMode);
> -
> - uint8_t afMode = ANDROID_CONTROL_AF_MODE_OFF;
> - requestTemplate->addEntry(ANDROID_CONTROL_AF_MODE, afMode);
> -
> - uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
> - requestTemplate->addEntry(ANDROID_CONTROL_AF_TRIGGER, afTrigger);
> -
> - uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
> - requestTemplate->addEntry(ANDROID_CONTROL_AWB_MODE, awbMode);
> -
> - uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
> - requestTemplate->addEntry(ANDROID_CONTROL_AWB_LOCK, awbLock);
> -
> - uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
> - requestTemplate->addEntry(ANDROID_FLASH_MODE, flashMode);
> -
> - uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
> - requestTemplate->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE,
> - faceDetectMode);
> -
> - uint8_t noiseReduction = ANDROID_NOISE_REDUCTION_MODE_OFF;
> - requestTemplate->addEntry(ANDROID_NOISE_REDUCTION_MODE,
> - noiseReduction);
> -
> - uint8_t aberrationMode =
> ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
> - requestTemplate->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
> - aberrationMode);
> -
> - uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
> - requestTemplate->addEntry(ANDROID_CONTROL_MODE, controlMode);
> -
> - float lensAperture = 2.53 / 100;
> - requestTemplate->addEntry(ANDROID_LENS_APERTURE, lensAperture);
> -
> - uint8_t opticalStabilization =
> ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
> - requestTemplate->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
> - opticalStabilization);
> -
> - uint8_t captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
> - requestTemplate->addEntry(ANDROID_CONTROL_CAPTURE_INTENT,
> - captureIntent);
> -
> - return requestTemplate;
> + callbacks_ = callbacks;
> }
>
> -std::unique_ptr<CameraMetadata> CameraDevice::requestTemplateVideo()
> +const camera_metadata_t *CameraDevice::getStaticMetadata()
> {
> - std::unique_ptr<CameraMetadata> previewTemplate =
> requestTemplatePreview();
> - if (!previewTemplate)
> - return nullptr;
> -
> - /*
> - * The video template requires a fixed FPS range. Everything else
> - * stays the same as the preview template.
> - */
> - camera_metadata_ro_entry_t entry;
> -
> staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
> - &entry);
> -
> - /*
> - * Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata
> - * has been assembled as {{min, max} {max, max}}.
> - */
> - previewTemplate->updateEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
> - entry.data.i32 + 2, 2);
> -
> - return previewTemplate;
> + return capabilities_.staticMetadata()->get();
> }
>
> /*
> @@ -1630,7 +520,7 @@ const camera_metadata_t
> *CameraDevice::constructDefaultRequestSettings(int type)
> switch (type) {
> case CAMERA3_TEMPLATE_PREVIEW:
> captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
> - requestTemplate = requestTemplatePreview();
> + requestTemplate = capabilities_.requestTemplatePreview();
> break;
> case CAMERA3_TEMPLATE_STILL_CAPTURE:
> /*
> @@ -1638,15 +528,15 @@ const camera_metadata_t
> *CameraDevice::constructDefaultRequestSettings(int type)
> * for the torch mode we currently do not support.
> */
> captureIntent =
> ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
> - requestTemplate = requestTemplatePreview();
> + requestTemplate = capabilities_.requestTemplatePreview();
> break;
> case CAMERA3_TEMPLATE_VIDEO_RECORD:
> captureIntent =
> ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
> - requestTemplate = requestTemplateVideo();
> + requestTemplate = capabilities_.requestTemplateVideo();
> break;
> case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
> captureIntent =
> ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
> - requestTemplate = requestTemplateVideo();
> + requestTemplate = capabilities_.requestTemplateVideo();
> break;
> /* \todo Implement templates generation for the remaining use
> cases. */
> case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
> @@ -1668,19 +558,6 @@ const camera_metadata_t
> *CameraDevice::constructDefaultRequestSettings(int type)
> return requestTemplates_[type]->get();
> }
>
> -PixelFormat CameraDevice::toPixelFormat(int format) const
> -{
> - /* Translate Android format code to libcamera pixel format. */
> - auto it = formatsMap_.find(format);
> - if (it == formatsMap_.end()) {
> - LOG(HAL, Error) << "Requested format " <<
> utils::hex(format)
> - << " not supported";
> - return PixelFormat();
> - }
> -
> - return it->second;
> -}
> -
> /*
> * Inspect the stream_list to produce a list of StreamConfiguration to
> * be use to configure the Camera.
> @@ -1727,7 +604,7 @@ int
> CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)
> camera3_stream_t *stream = stream_list->streams[i];
> Size size(stream->width, stream->height);
>
> - PixelFormat format = toPixelFormat(stream->format);
> + PixelFormat format =
> capabilities_.toPixelFormat(stream->format);
>
> LOG(HAL, Info) << "Stream #" << i
> << ", direction: " << stream->stream_type
> diff --git a/src/android/camera_device.h b/src/android/camera_device.h
> index 4aadb27c562c..090fe28a551e 100644
> --- a/src/android/camera_device.h
> +++ b/src/android/camera_device.h
> @@ -10,14 +10,12 @@
> #include <map>
> #include <memory>
> #include <mutex>
> -#include <tuple>
> #include <vector>
>
> #include <hardware/camera3.h>
>
> #include <libcamera/buffer.h>
> #include <libcamera/camera.h>
> -#include <libcamera/geometry.h>
> #include <libcamera/request.h>
> #include <libcamera/stream.h>
>
> @@ -26,6 +24,7 @@
> #include "libcamera/internal/message.h"
> #include "libcamera/internal/thread.h"
>
> +#include "camera_capabilities.h"
> #include "camera_metadata.h"
> #include "camera_stream.h"
> #include "camera_worker.h"
> @@ -57,7 +56,7 @@ public:
> const std::string &model() const { return model_; }
> int facing() const { return facing_; }
> int orientation() const { return orientation_; }
> - unsigned int maxJpegBufferSize() const { return
> maxJpegBufferSize_; }
> + unsigned int maxJpegBufferSize() const;
>
> void setCallbacks(const camera3_callback_ops_t *callbacks);
> const camera_metadata_t *getStaticMetadata();
> @@ -86,11 +85,6 @@ private:
> std::unique_ptr<CaptureRequest> request_;
> };
>
> - struct Camera3StreamConfiguration {
> - libcamera::Size resolution;
> - int androidFormat;
> - };
> -
> enum class State {
> Stopped,
> Flushing,
> @@ -99,22 +93,11 @@ private:
>
> void stop();
>
> - int initializeStreamConfigurations();
> - std::vector<libcamera::Size>
> - getYUVResolutions(libcamera::CameraConfiguration *cameraConfig,
> - const libcamera::PixelFormat &pixelFormat,
> - const std::vector<libcamera::Size> &resolutions);
> - std::vector<libcamera::Size>
> - getRawResolutions(const libcamera::PixelFormat &pixelFormat);
> -
> libcamera::FrameBuffer *createFrameBuffer(const buffer_handle_t
> camera3buffer);
> void abortRequest(camera3_capture_request_t *request);
> void notifyShutter(uint32_t frameNumber, uint64_t timestamp);
> void notifyError(uint32_t frameNumber, camera3_stream_t *stream,
> camera3_error_msg_code code);
> - std::unique_ptr<CameraMetadata> requestTemplatePreview();
> - std::unique_ptr<CameraMetadata> requestTemplateVideo();
> - libcamera::PixelFormat toPixelFormat(int format) const;
> int processControls(Camera3RequestDescriptor *descriptor);
> std::unique_ptr<CameraMetadata> getResultMetadata(
> const Camera3RequestDescriptor &descriptor) const;
> @@ -129,13 +112,11 @@ private:
>
> std::shared_ptr<libcamera::Camera> camera_;
> std::unique_ptr<libcamera::CameraConfiguration> config_;
> + CameraCapabilities capabilities_;
>
> - std::unique_ptr<CameraMetadata> staticMetadata_;
> std::map<unsigned int, std::unique_ptr<CameraMetadata>>
> requestTemplates_;
> const camera3_callback_ops_t *callbacks_;
>
> - std::vector<Camera3StreamConfiguration> streamConfigurations_;
> - std::map<int, libcamera::PixelFormat> formatsMap_;
> std::vector<CameraStream> streams_;
>
> libcamera::Mutex descriptorsMutex_; /* Protects descriptors_. */
> @@ -147,8 +128,6 @@ private:
> int facing_;
> int orientation_;
>
> - unsigned int maxJpegBufferSize_;
> -
> CameraMetadata lastSettings_;
> };
>
> diff --git a/src/android/meson.build b/src/android/meson.build
> index f27fd5316705..6270fb201338 100644
> --- a/src/android/meson.build
> +++ b/src/android/meson.build
> @@ -44,6 +44,7 @@ subdir('cros')
>
> android_hal_sources = files([
> 'camera3_hal.cpp',
> + 'camera_capabilities.cpp',
> 'camera_device.cpp',
> 'camera_hal_config.cpp',
> 'camera_hal_manager.cpp',
> --
> 2.31.1
>
>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.libcamera.org/pipermail/libcamera-devel/attachments/20210622/0bae65fb/attachment-0001.htm>
More information about the libcamera-devel
mailing list