<div dir="ltr"><div dir="ltr">Hi Jacopo, thank you for the patch.</div><div><br></div>I failed to apply the patch on the top of the latest tree to review.<div>Could you tell me the parent commit which I can apply this patch?</div><div><br></div><div>-Hiro<br><div class="gmail_quote"><div dir="ltr" class="gmail_attr">On Tue, Jun 22, 2021 at 12:29 AM Jacopo Mondi <<a href="mailto:jacopo@jmondi.org">jacopo@jmondi.org</a>> wrote:<br></div><blockquote class="gmail_quote" style="margin:0px 0px 0px 0.8ex;border-left:1px solid rgb(204,204,204);padding-left:1ex">The camera_device.cpp has grown a little too much, and it has quickly<br>
become hard to maintain. Break out the handling of the static<br>
information collected at camera initialization time to a new<br>
CameraCapabilities class.<br>
<br>
Break out from the camera_device.cpp file all the functions relative to:<br>
- Initialization of supported stream configurations<br>
- Initialization of static metadata<br>
- Initialization of request templates<br>
<br>
Signed-off-by: Jacopo Mondi <<a href="mailto:jacopo@jmondi.org" target="_blank">jacopo@jmondi.org</a>><br>
Acked-by: Paul Elder <<a href="mailto:paul.elder@ideasonboard.com" target="_blank">paul.elder@ideasonboard.com</a>><br>
Tested-by: Paul Elder <<a href="mailto:paul.elder@ideasonboard.com" target="_blank">paul.elder@ideasonboard.com</a>><br>
---<br>
src/android/camera_capabilities.cpp | 1164 +++++++++++++++++++++++++++<br>
src/android/camera_capabilities.h | 65 ++<br>
src/android/camera_device.cpp | 1147 +-------------------------<br>
src/android/camera_device.h | 27 +-<br>
src/android/meson.build | 1 +<br>
5 files changed, 1245 insertions(+), 1159 deletions(-)<br>
create mode 100644 src/android/camera_capabilities.cpp<br>
create mode 100644 src/android/camera_capabilities.h<br>
<br>
diff --git a/src/android/camera_capabilities.cpp b/src/android/camera_capabilities.cpp<br>
new file mode 100644<br>
index 000000000000..311a2c839586<br>
--- /dev/null<br>
+++ b/src/android/camera_capabilities.cpp<br>
@@ -0,0 +1,1164 @@<br>
+/* SPDX-License-Identifier: LGPL-2.1-or-later */<br>
+/*<br>
+ * Copyright (C) 2021, Google Inc.<br>
+ *<br>
+ * camera_capabilities.cpp - Camera static properties manager<br>
+ */<br>
+<br>
+#include "camera_capabilities.h"<br>
+<br>
+#include <array><br>
+#include <cmath><br>
+<br>
+#include <hardware/camera3.h><br>
+<br>
+#include <libcamera/control_ids.h><br>
+#include <libcamera/controls.h><br>
+#include <libcamera/property_ids.h><br>
+<br>
+#include "libcamera/internal/formats.h"<br>
+#include "libcamera/internal/log.h"<br>
+<br>
+using namespace libcamera;<br>
+<br>
+LOG_DECLARE_CATEGORY(HAL)<br>
+<br>
+namespace {<br>
+<br>
+/*<br>
+ * \var camera3Resolutions<br>
+ * \brief The list of image resolutions defined as mandatory to be supported by<br>
+ * the Android Camera3 specification<br>
+ */<br>
+const std::vector<Size> camera3Resolutions = {<br>
+ { 320, 240 },<br>
+ { 640, 480 },<br>
+ { 1280, 720 },<br>
+ { 1920, 1080 }<br>
+};<br>
+<br>
+/*<br>
+ * \struct Camera3Format<br>
+ * \brief Data associated with an Android format identifier<br>
+ * \var libcameraFormats List of libcamera pixel formats compatible with the<br>
+ * Android format<br>
+ * \var name The human-readable representation of the Android format code<br>
+ */<br>
+struct Camera3Format {<br>
+ std::vector<PixelFormat> libcameraFormats;<br>
+ bool mandatory;<br>
+ const char *name;<br>
+};<br>
+<br>
+/*<br>
+ * \var camera3FormatsMap<br>
+ * \brief Associate Android format code with ancillary data<br>
+ */<br>
+const std::map<int, const Camera3Format> camera3FormatsMap = {<br>
+ {<br>
+ HAL_PIXEL_FORMAT_BLOB, {<br>
+ { formats::MJPEG },<br>
+ true,<br>
+ "BLOB"<br>
+ }<br>
+ }, {<br>
+ HAL_PIXEL_FORMAT_YCbCr_420_888, {<br>
+ { formats::NV12, formats::NV21 },<br>
+ true,<br>
+ "YCbCr_420_888"<br>
+ }<br>
+ }, {<br>
+ /*<br>
+ * \todo Translate IMPLEMENTATION_DEFINED inspecting the gralloc<br>
+ * usage flag. For now, copy the YCbCr_420 configuration.<br>
+ */<br>
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, {<br>
+ { formats::NV12, formats::NV21 },<br>
+ true,<br>
+ "IMPLEMENTATION_DEFINED"<br>
+ }<br>
+ }, {<br>
+ HAL_PIXEL_FORMAT_RAW10, {<br>
+ {<br>
+ formats::SBGGR10_CSI2P,<br>
+ formats::SGBRG10_CSI2P,<br>
+ formats::SGRBG10_CSI2P,<br>
+ formats::SRGGB10_CSI2P<br>
+ },<br>
+ false,<br>
+ "RAW10"<br>
+ }<br>
+ }, {<br>
+ HAL_PIXEL_FORMAT_RAW12, {<br>
+ {<br>
+ formats::SBGGR12_CSI2P,<br>
+ formats::SGBRG12_CSI2P,<br>
+ formats::SGRBG12_CSI2P,<br>
+ formats::SRGGB12_CSI2P<br>
+ },<br>
+ false,<br>
+ "RAW12"<br>
+ }<br>
+ }, {<br>
+ HAL_PIXEL_FORMAT_RAW16, {<br>
+ {<br>
+ formats::SBGGR16,<br>
+ formats::SGBRG16,<br>
+ formats::SGRBG16,<br>
+ formats::SRGGB16<br>
+ },<br>
+ false,<br>
+ "RAW16"<br>
+ }<br>
+ },<br>
+};<br>
+<br>
+} /* namespace */<br>
+<br>
+int CameraCapabilities::initialize(std::shared_ptr<libcamera::Camera> camera,<br>
+ int orientation, int facing)<br>
+{<br>
+ camera_ = camera;<br>
+ orientation_ = orientation;<br>
+ facing_ = facing;<br>
+<br>
+ /* Acquire the camera and initialize available stream configurations. */<br>
+ int ret = camera_->acquire();<br>
+ if (ret) {<br>
+ LOG(HAL, Error) << "Failed to temporarily acquire the camera";<br>
+ return ret;<br>
+ }<br>
+<br>
+ ret = initializeStreamConfigurations();<br>
+ camera_->release();<br>
+ if (ret)<br>
+ return ret;<br>
+<br>
+ return initializeStaticMetadata();<br>
+}<br>
+<br>
+std::vector<Size> CameraCapabilities::getYUVResolutions(CameraConfiguration *cameraConfig,<br>
+ const PixelFormat &pixelFormat,<br>
+ const std::vector<Size> &resolutions)<br>
+{<br>
+ std::vector<Size> supportedResolutions;<br>
+<br>
+ StreamConfiguration &cfg = cameraConfig->at(0);<br>
+ for (const Size &res : resolutions) {<br>
+ cfg.pixelFormat = pixelFormat;<br>
+ cfg.size = res;<br>
+<br>
+ CameraConfiguration::Status status = cameraConfig->validate();<br>
+ if (status != CameraConfiguration::Valid) {<br>
+ LOG(HAL, Debug) << cfg.toString() << " not supported";<br>
+ continue;<br>
+ }<br>
+<br>
+ LOG(HAL, Debug) << cfg.toString() << " supported";<br>
+<br>
+ supportedResolutions.push_back(res);<br>
+ }<br>
+<br>
+ return supportedResolutions;<br>
+}<br>
+<br>
+std::vector<Size> CameraCapabilities::getRawResolutions(const libcamera::PixelFormat &pixelFormat)<br>
+{<br>
+ std::unique_ptr<CameraConfiguration> cameraConfig =<br>
+ camera_->generateConfiguration({ StreamRole::Raw });<br>
+ StreamConfiguration &cfg = cameraConfig->at(0);<br>
+ const StreamFormats &formats = cfg.formats();<br>
+ std::vector<Size> supportedResolutions = formats.sizes(pixelFormat);<br>
+<br>
+ return supportedResolutions;<br>
+}<br>
+<br>
+/*<br>
+ * Initialize the format conversion map to translate from Android format<br>
+ * identifier to libcamera pixel formats and fill in the list of supported<br>
+ * stream configurations to be reported to the Android camera framework through<br>
+ * the Camera static metadata.<br>
+ */<br>
+int CameraCapabilities::initializeStreamConfigurations()<br>
+{<br>
+ /*<br>
+ * Get the maximum output resolutions<br>
+ * \todo Get this from the camera properties once defined<br>
+ */<br>
+ std::unique_ptr<CameraConfiguration> cameraConfig =<br>
+ camera_->generateConfiguration({ StillCapture });<br>
+ if (!cameraConfig) {<br>
+ LOG(HAL, Error) << "Failed to get maximum resolution";<br>
+ return -EINVAL;<br>
+ }<br>
+ StreamConfiguration &cfg = cameraConfig->at(0);<br>
+<br>
+ /*<br>
+ * \todo JPEG - Adjust the maximum available resolution by taking the<br>
+ * JPEG encoder requirements into account (alignment and aspect ratio).<br>
+ */<br>
+ const Size maxRes = cfg.size;<br>
+ LOG(HAL, Debug) << "Maximum supported resolution: " << maxRes.toString();<br>
+<br>
+ /*<br>
+ * Build the list of supported image resolutions.<br>
+ *<br>
+ * The resolutions listed in camera3Resolution are mandatory to be<br>
+ * supported, up to the camera maximum resolution.<br>
+ *<br>
+ * Augment the list by adding resolutions calculated from the camera<br>
+ * maximum one.<br>
+ */<br>
+ std::vector<Size> cameraResolutions;<br>
+ std::copy_if(camera3Resolutions.begin(), camera3Resolutions.end(),<br>
+ std::back_inserter(cameraResolutions),<br>
+ [&](const Size &res) { return res < maxRes; });<br>
+<br>
+ /*<br>
+ * The Camera3 specification suggests adding 1/2 and 1/4 of the maximum<br>
+ * resolution.<br>
+ */<br>
+ for (unsigned int divider = 2;; divider <<= 1) {<br>
+ Size derivedSize{<br>
+ maxRes.width / divider,<br>
+ maxRes.height / divider,<br>
+ };<br>
+<br>
+ if (derivedSize.width < 320 ||<br>
+ derivedSize.height < 240)<br>
+ break;<br>
+<br>
+ cameraResolutions.push_back(derivedSize);<br>
+ }<br>
+ cameraResolutions.push_back(maxRes);<br>
+<br>
+ /* Remove duplicated entries from the list of supported resolutions. */<br>
+ std::sort(cameraResolutions.begin(), cameraResolutions.end());<br>
+ auto last = std::unique(cameraResolutions.begin(), cameraResolutions.end());<br>
+ cameraResolutions.erase(last, cameraResolutions.end());<br>
+<br>
+ /*<br>
+ * Build the list of supported camera formats.<br>
+ *<br>
+ * To each Android format a list of compatible libcamera formats is<br>
+ * associated. The first libcamera format that tests successful is added<br>
+ * to the format translation map used when configuring the streams.<br>
+ * It is then tested against the list of supported camera resolutions to<br>
+ * build the stream configuration map reported through the camera static<br>
+ * metadata.<br>
+ */<br>
+ Size maxJpegSize;<br>
+ for (const auto &format : camera3FormatsMap) {<br>
+ int androidFormat = format.first;<br>
+ const Camera3Format &camera3Format = format.second;<br>
+ const std::vector<PixelFormat> &libcameraFormats =<br>
+ camera3Format.libcameraFormats;<br>
+<br>
+ LOG(HAL, Debug) << "Trying to map Android format "<br>
+ << camera3Format.name;<br>
+<br>
+ /*<br>
+ * JPEG is always supported, either produced directly by the<br>
+ * camera, or encoded in the HAL.<br>
+ */<br>
+ if (androidFormat == HAL_PIXEL_FORMAT_BLOB) {<br>
+ formatsMap_[androidFormat] = formats::MJPEG;<br>
+ LOG(HAL, Debug) << "Mapped Android format "<br>
+ << camera3Format.name << " to "<br>
+ << formats::MJPEG.toString()<br>
+ << " (fixed mapping)";<br>
+ continue;<br>
+ }<br>
+<br>
+ /*<br>
+ * Test the libcamera formats that can produce images<br>
+ * compatible with the format defined by Android.<br>
+ */<br>
+ PixelFormat mappedFormat;<br>
+ for (const PixelFormat &pixelFormat : libcameraFormats) {<br>
+<br>
+ LOG(HAL, Debug) << "Testing " << pixelFormat.toString();<br>
+<br>
+ /*<br>
+ * The stream configuration size can be adjusted,<br>
+ * not the pixel format.<br>
+ *<br>
+ * \todo This could be simplified once all pipeline<br>
+ * handlers will report the StreamFormats list of<br>
+ * supported formats.<br>
+ */<br>
+ cfg.pixelFormat = pixelFormat;<br>
+<br>
+ CameraConfiguration::Status status = cameraConfig->validate();<br>
+ if (status != CameraConfiguration::Invalid &&<br>
+ cfg.pixelFormat == pixelFormat) {<br>
+ mappedFormat = pixelFormat;<br>
+ break;<br>
+ }<br>
+ }<br>
+<br>
+ if (!mappedFormat.isValid()) {<br>
+ /* If the format is not mandatory, skip it. */<br>
+ if (!camera3Format.mandatory)<br>
+ continue;<br>
+<br>
+ LOG(HAL, Error)<br>
+ << "Failed to map mandatory Android format "<br>
+ << camera3Format.name << " ("<br>
+ << utils::hex(androidFormat) << "): aborting";<br>
+ return -EINVAL;<br>
+ }<br>
+<br>
+ /*<br>
+ * Record the mapping and then proceed to generate the<br>
+ * stream configurations map, by testing the image resolutions.<br>
+ */<br>
+ formatsMap_[androidFormat] = mappedFormat;<br>
+ LOG(HAL, Debug) << "Mapped Android format "<br>
+ << camera3Format.name << " to "<br>
+ << mappedFormat.toString();<br>
+<br>
+ std::vector<Size> resolutions;<br>
+ const PixelFormatInfo &info = PixelFormatInfo::info(mappedFormat);<br>
+ if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW)<br>
+ resolutions = getRawResolutions(mappedFormat);<br>
+ else<br>
+ resolutions = getYUVResolutions(cameraConfig.get(),<br>
+ mappedFormat,<br>
+ cameraResolutions);<br>
+<br>
+ for (const Size &res : resolutions) {<br>
+ streamConfigurations_.push_back({ res, androidFormat });<br>
+<br>
+ /*<br>
+ * If the format is HAL_PIXEL_FORMAT_YCbCr_420_888<br>
+ * from which JPEG is produced, add an entry for<br>
+ * the JPEG stream.<br>
+ *<br>
+ * \todo Wire the JPEG encoder to query the supported<br>
+ * sizes provided a list of formats it can encode.<br>
+ *<br>
+ * \todo Support JPEG streams produced by the Camera<br>
+ * natively.<br>
+ */<br>
+ if (androidFormat == HAL_PIXEL_FORMAT_YCbCr_420_888) {<br>
+ streamConfigurations_.push_back(<br>
+ { res, HAL_PIXEL_FORMAT_BLOB });<br>
+ maxJpegSize = std::max(maxJpegSize, res);<br>
+ }<br>
+ }<br>
+<br>
+ /*<br>
+ * \todo Calculate the maximum JPEG buffer size by asking the<br>
+ * encoder giving the maximum frame size required.<br>
+ */<br>
+ maxJpegBufferSize_ = maxJpegSize.width * maxJpegSize.height * 1.5;<br>
+ }<br>
+<br>
+ LOG(HAL, Debug) << "Collected stream configuration map: ";<br>
+ for (const auto &entry : streamConfigurations_)<br>
+ LOG(HAL, Debug) << "{ " << entry.resolution.toString() << " - "<br>
+ << utils::hex(entry.androidFormat) << " }";<br>
+<br>
+ return 0;<br>
+}<br>
+<br>
+int CameraCapabilities::initializeStaticMetadata()<br>
+{<br>
+ staticMetadata_ = std::make_unique<CameraMetadata>(64, 1024);<br>
+ if (!staticMetadata_->isValid()) {<br>
+ LOG(HAL, Error) << "Failed to allocate static metadata";<br>
+ staticMetadata_.reset();<br>
+ return -EINVAL;<br>
+ }<br>
+<br>
+ const ControlInfoMap &controlsInfo = camera_->controls();<br>
+ const ControlList &properties = camera_->properties();<br>
+<br>
+ /* Color correction static metadata. */<br>
+ {<br>
+ std::vector<uint8_t> data;<br>
+ data.reserve(3);<br>
+ const auto &infoMap = controlsInfo.find(&controls::draft::ColorCorrectionAberrationMode);<br>
+ if (infoMap != controlsInfo.end()) {<br>
+ for (const auto &value : infoMap->second.values())<br>
+ data.push_back(value.get<int32_t>());<br>
+ } else {<br>
+ data.push_back(ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF);<br>
+ }<br>
+ staticMetadata_->addEntry(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,<br>
+ data);<br>
+ }<br>
+<br>
+ /* Control static metadata. */<br>
+ std::vector<uint8_t> aeAvailableAntiBandingModes = {<br>
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,<br>
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,<br>
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,<br>
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,<br>
+ aeAvailableAntiBandingModes);<br>
+<br>
+ std::vector<uint8_t> aeAvailableModes = {<br>
+ ANDROID_CONTROL_AE_MODE_ON,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_MODES,<br>
+ aeAvailableModes);<br>
+<br>
+ int64_t minFrameDurationNsec = -1;<br>
+ int64_t maxFrameDurationNsec = -1;<br>
+ const auto frameDurationsInfo = controlsInfo.find(&controls::FrameDurationLimits);<br>
+ if (frameDurationsInfo != controlsInfo.end()) {<br>
+ minFrameDurationNsec = frameDurationsInfo->second.min().get<int64_t>() * 1000;<br>
+ maxFrameDurationNsec = frameDurationsInfo->second.max().get<int64_t>() * 1000;<br>
+<br>
+ /*<br>
+ * Adjust the minimum frame duration to comply with Android<br>
+ * requirements. The camera service mandates all preview/record<br>
+ * streams to have a minimum frame duration < 33,366 milliseconds<br>
+ * (see MAX_PREVIEW_RECORD_DURATION_NS in the camera service<br>
+ * implementation).<br>
+ *<br>
+ * If we're close enough (+ 500 useconds) to that value, round<br>
+ * the minimum frame duration of the camera to an accepted<br>
+ * value.<br>
+ */<br>
+ static constexpr int64_t MAX_PREVIEW_RECORD_DURATION_NS = 1e9 / 29.97;<br>
+ if (minFrameDurationNsec > MAX_PREVIEW_RECORD_DURATION_NS &&<br>
+ minFrameDurationNsec < MAX_PREVIEW_RECORD_DURATION_NS + 500000)<br>
+ minFrameDurationNsec = MAX_PREVIEW_RECORD_DURATION_NS - 1000;<br>
+<br>
+ /*<br>
+ * The AE routine frame rate limits are computed using the frame<br>
+ * duration limits, as libcamera clips the AE routine to the<br>
+ * frame durations.<br>
+ */<br>
+ int32_t maxFps = std::round(1e9 / minFrameDurationNsec);<br>
+ int32_t minFps = std::round(1e9 / maxFrameDurationNsec);<br>
+ minFps = std::max(1, minFps);<br>
+<br>
+ /*<br>
+ * Force rounding errors so that we have the proper frame<br>
+ * durations for when we reuse these variables later<br>
+ */<br>
+ minFrameDurationNsec = 1e9 / maxFps;<br>
+ maxFrameDurationNsec = 1e9 / minFps;<br>
+<br>
+ /*<br>
+ * Register to the camera service {min, max} and {max, max}<br>
+ * intervals as requested by the metadata documentation.<br>
+ */<br>
+ int32_t availableAeFpsTarget[] = {<br>
+ minFps, maxFps, maxFps, maxFps<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,<br>
+ availableAeFpsTarget);<br>
+ }<br>
+<br>
+ std::vector<int32_t> aeCompensationRange = {<br>
+ 0, 0,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_RANGE,<br>
+ aeCompensationRange);<br>
+<br>
+ const camera_metadata_rational_t aeCompensationStep[] = {<br>
+ { 0, 1 }<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_STEP,<br>
+ aeCompensationStep);<br>
+<br>
+ std::vector<uint8_t> availableAfModes = {<br>
+ ANDROID_CONTROL_AF_MODE_OFF,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AF_AVAILABLE_MODES,<br>
+ availableAfModes);<br>
+<br>
+ std::vector<uint8_t> availableEffects = {<br>
+ ANDROID_CONTROL_EFFECT_MODE_OFF,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_EFFECTS,<br>
+ availableEffects);<br>
+<br>
+ std::vector<uint8_t> availableSceneModes = {<br>
+ ANDROID_CONTROL_SCENE_MODE_DISABLED,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,<br>
+ availableSceneModes);<br>
+<br>
+ std::vector<uint8_t> availableStabilizationModes = {<br>
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,<br>
+ availableStabilizationModes);<br>
+<br>
+ /*<br>
+ * \todo Inspect the Camera capabilities to report the available<br>
+ * AWB modes. Default to AUTO as CTS tests require it.<br>
+ */<br>
+ std::vector<uint8_t> availableAwbModes = {<br>
+ ANDROID_CONTROL_AWB_MODE_AUTO,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AWB_AVAILABLE_MODES,<br>
+ availableAwbModes);<br>
+<br>
+ std::vector<int32_t> availableMaxRegions = {<br>
+ 0, 0, 0,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_MAX_REGIONS,<br>
+ availableMaxRegions);<br>
+<br>
+ std::vector<uint8_t> sceneModesOverride = {<br>
+ ANDROID_CONTROL_AE_MODE_ON,<br>
+ ANDROID_CONTROL_AWB_MODE_AUTO,<br>
+ ANDROID_CONTROL_AF_MODE_OFF,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,<br>
+ sceneModesOverride);<br>
+<br>
+ uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AE_LOCK_AVAILABLE,<br>
+ aeLockAvailable);<br>
+<br>
+ uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,<br>
+ awbLockAvailable);<br>
+<br>
+ char availableControlModes = ANDROID_CONTROL_MODE_AUTO;<br>
+ staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_MODES,<br>
+ availableControlModes);<br>
+<br>
+ /* JPEG static metadata. */<br>
+<br>
+ /*<br>
+ * Create the list of supported thumbnail sizes by inspecting the<br>
+ * available JPEG resolutions collected in streamConfigurations_ and<br>
+ * generate one entry for each aspect ratio.<br>
+ *<br>
+ * The JPEG thumbnailer can freely scale, so pick an arbitrary<br>
+ * (160, 160) size as the bounding rectangle, which is then cropped to<br>
+ * the different supported aspect ratios.<br>
+ */<br>
+ constexpr Size maxJpegThumbnail(160, 160);<br>
+ std::vector<Size> thumbnailSizes;<br>
+ thumbnailSizes.push_back({ 0, 0 });<br>
+ for (const auto &entry : streamConfigurations_) {<br>
+ if (entry.androidFormat != HAL_PIXEL_FORMAT_BLOB)<br>
+ continue;<br>
+<br>
+ Size thumbnailSize = maxJpegThumbnail<br>
+ .boundedToAspectRatio({ entry.resolution.width,<br>
+ entry.resolution.height });<br>
+ thumbnailSizes.push_back(thumbnailSize);<br>
+ }<br>
+<br>
+ std::sort(thumbnailSizes.begin(), thumbnailSizes.end());<br>
+ auto last = std::unique(thumbnailSizes.begin(), thumbnailSizes.end());<br>
+ thumbnailSizes.erase(last, thumbnailSizes.end());<br>
+<br>
+ /* Transform sizes in to a list of integers that can be consumed. */<br>
+ std::vector<int32_t> thumbnailEntries;<br>
+ thumbnailEntries.reserve(thumbnailSizes.size() * 2);<br>
+ for (const auto &size : thumbnailSizes) {<br>
+ thumbnailEntries.push_back(size.width);<br>
+ thumbnailEntries.push_back(size.height);<br>
+ }<br>
+ staticMetadata_->addEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,<br>
+ thumbnailEntries);<br>
+<br>
+ staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, maxJpegBufferSize_);<br>
+<br>
+ /* Sensor static metadata. */<br>
+ std::array<int32_t, 2> pixelArraySize;<br>
+ {<br>
+ const Size &size = properties.get(properties::PixelArraySize);<br>
+ pixelArraySize[0] = size.width;<br>
+ pixelArraySize[1] = size.height;<br>
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,<br>
+ pixelArraySize);<br>
+ }<br>
+<br>
+ if (properties.contains(properties::UnitCellSize)) {<br>
+ const Size &cellSize = properties.get<Size>(properties::UnitCellSize);<br>
+ std::array<float, 2> physicalSize{<br>
+ cellSize.width * pixelArraySize[0] / 1e6f,<br>
+ cellSize.height * pixelArraySize[1] / 1e6f<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,<br>
+ physicalSize);<br>
+ }<br>
+<br>
+ {<br>
+ const Span<const Rectangle> &rects =<br>
+ properties.get(properties::PixelArrayActiveAreas);<br>
+ std::vector<int32_t> data{<br>
+ static_cast<int32_t>(rects[0].x),<br>
+ static_cast<int32_t>(rects[0].y),<br>
+ static_cast<int32_t>(rects[0].width),<br>
+ static_cast<int32_t>(rects[0].height),<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,<br>
+ data);<br>
+ }<br>
+<br>
+ int32_t sensitivityRange[] = {<br>
+ 32, 2400,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,<br>
+ sensitivityRange);<br>
+<br>
+ /* Report the color filter arrangement if the camera reports it. */<br>
+ if (properties.contains(properties::draft::ColorFilterArrangement)) {<br>
+ uint8_t filterArr = properties.get(properties::draft::ColorFilterArrangement);<br>
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,<br>
+ filterArr);<br>
+ }<br>
+<br>
+ const auto &exposureInfo = controlsInfo.find(&controls::ExposureTime);<br>
+ if (exposureInfo != controlsInfo.end()) {<br>
+ int64_t exposureTimeRange[2] = {<br>
+ exposureInfo->second.min().get<int32_t>() * 1000LL,<br>
+ exposureInfo->second.max().get<int32_t>() * 1000LL,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,<br>
+ exposureTimeRange, 2);<br>
+ }<br>
+<br>
+ staticMetadata_->addEntry(ANDROID_SENSOR_ORIENTATION, orientation_);<br>
+<br>
+ std::vector<int32_t> testPatternModes = {<br>
+ ANDROID_SENSOR_TEST_PATTERN_MODE_OFF<br>
+ };<br>
+ const auto &testPatternsInfo =<br>
+ controlsInfo.find(&controls::draft::TestPatternMode);<br>
+ if (testPatternsInfo != controlsInfo.end()) {<br>
+ const auto &values = testPatternsInfo->second.values();<br>
+ ASSERT(!values.empty());<br>
+ for (const auto &value : values) {<br>
+ switch (value.get<int32_t>()) {<br>
+ case controls::draft::TestPatternModeOff:<br>
+ /*<br>
+ * ANDROID_SENSOR_TEST_PATTERN_MODE_OFF is<br>
+ * already in testPatternModes.<br>
+ */<br>
+ break;<br>
+<br>
+ case controls::draft::TestPatternModeSolidColor:<br>
+ testPatternModes.push_back(<br>
+ ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR);<br>
+ break;<br>
+<br>
+ case controls::draft::TestPatternModeColorBars:<br>
+ testPatternModes.push_back(<br>
+ ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS);<br>
+ break;<br>
+<br>
+ case controls::draft::TestPatternModeColorBarsFadeToGray:<br>
+ testPatternModes.push_back(<br>
+ ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY);<br>
+ break;<br>
+<br>
+ case controls::draft::TestPatternModePn9:<br>
+ testPatternModes.push_back(<br>
+ ANDROID_SENSOR_TEST_PATTERN_MODE_PN9);<br>
+ break;<br>
+<br>
+ case controls::draft::TestPatternModeCustom1:<br>
+ /* We don't support this yet. */<br>
+ break;<br>
+<br>
+ default:<br>
+ LOG(HAL, Error) << "Unknown test pattern mode: "<br>
+ << value.get<int32_t>();<br>
+ continue;<br>
+ }<br>
+ }<br>
+ }<br>
+ staticMetadata_->addEntry(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,<br>
+ testPatternModes);<br>
+<br>
+ uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;<br>
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,<br>
+ timestampSource);<br>
+<br>
+ if (maxFrameDurationNsec > 0)<br>
+ staticMetadata_->addEntry(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,<br>
+ maxFrameDurationNsec);<br>
+<br>
+ /* Statistics static metadata. */<br>
+ uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;<br>
+ staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,<br>
+ faceDetectMode);<br>
+<br>
+ int32_t maxFaceCount = 0;<br>
+ staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,<br>
+ maxFaceCount);<br>
+<br>
+ {<br>
+ std::vector<uint8_t> data;<br>
+ data.reserve(2);<br>
+ const auto &infoMap = controlsInfo.find(&controls::draft::LensShadingMapMode);<br>
+ if (infoMap != controlsInfo.end()) {<br>
+ for (const auto &value : infoMap->second.values())<br>
+ data.push_back(value.get<int32_t>());<br>
+ } else {<br>
+ data.push_back(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF);<br>
+ }<br>
+ staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,<br>
+ data);<br>
+ }<br>
+<br>
+ /* Sync static metadata. */<br>
+ int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;<br>
+ staticMetadata_->addEntry(ANDROID_SYNC_MAX_LATENCY, maxLatency);<br>
+<br>
+ /* Flash static metadata. */<br>
+ char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;<br>
+ staticMetadata_->addEntry(ANDROID_FLASH_INFO_AVAILABLE,<br>
+ flashAvailable);<br>
+<br>
+ /* Lens static metadata. */<br>
+ std::vector<float> lensApertures = {<br>
+ 2.53 / 100,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_APERTURES,<br>
+ lensApertures);<br>
+<br>
+ uint8_t lensFacing;<br>
+ switch (facing_) {<br>
+ default:<br>
+ case CAMERA_FACING_FRONT:<br>
+ lensFacing = ANDROID_LENS_FACING_FRONT;<br>
+ break;<br>
+ case CAMERA_FACING_BACK:<br>
+ lensFacing = ANDROID_LENS_FACING_BACK;<br>
+ break;<br>
+ case CAMERA_FACING_EXTERNAL:<br>
+ lensFacing = ANDROID_LENS_FACING_EXTERNAL;<br>
+ break;<br>
+ }<br>
+ staticMetadata_->addEntry(ANDROID_LENS_FACING, lensFacing);<br>
+<br>
+ std::vector<float> lensFocalLengths = {<br>
+ 1,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,<br>
+ lensFocalLengths);<br>
+<br>
+ std::vector<uint8_t> opticalStabilizations = {<br>
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,<br>
+ opticalStabilizations);<br>
+<br>
+ float hypeFocalDistance = 0;<br>
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,<br>
+ hypeFocalDistance);<br>
+<br>
+ float minFocusDistance = 0;<br>
+ staticMetadata_->addEntry(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,<br>
+ minFocusDistance);<br>
+<br>
+ /* Noise reduction modes. */<br>
+ {<br>
+ std::vector<uint8_t> data;<br>
+ data.reserve(5);<br>
+ const auto &infoMap = controlsInfo.find(&controls::draft::NoiseReductionMode);<br>
+ if (infoMap != controlsInfo.end()) {<br>
+ for (const auto &value : infoMap->second.values())<br>
+ data.push_back(value.get<int32_t>());<br>
+ } else {<br>
+ data.push_back(ANDROID_NOISE_REDUCTION_MODE_OFF);<br>
+ }<br>
+ staticMetadata_->addEntry(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,<br>
+ data);<br>
+ }<br>
+<br>
+ /* Scaler static metadata. */<br>
+<br>
+ /*<br>
+ * \todo The digital zoom factor is a property that depends on the<br>
+ * desired output configuration and the sensor frame size input to the<br>
+ * ISP. This information is not available to the Android HAL, not at<br>
+ * initialization time at least.<br>
+ *<br>
+ * As a workaround rely on pipeline handlers initializing the<br>
+ * ScalerCrop control with the camera default configuration and use the<br>
+ * maximum and minimum crop rectangles to calculate the digital zoom<br>
+ * factor.<br>
+ */<br>
+ float maxZoom = 1.0f;<br>
+ const auto scalerCrop = controlsInfo.find(&controls::ScalerCrop);<br>
+ if (scalerCrop != controlsInfo.end()) {<br>
+ Rectangle min = scalerCrop->second.min().get<Rectangle>();<br>
+ Rectangle max = scalerCrop->second.max().get<Rectangle>();<br>
+ maxZoom = std::min(1.0f * max.width / min.width,<br>
+ 1.0f * max.height / min.height);<br>
+ }<br>
+ staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,<br>
+ maxZoom);<br>
+<br>
+ std::vector<uint32_t> availableStreamConfigurations;<br>
+ availableStreamConfigurations.reserve(streamConfigurations_.size() * 4);<br>
+ for (const auto &entry : streamConfigurations_) {<br>
+ availableStreamConfigurations.push_back(entry.androidFormat);<br>
+ availableStreamConfigurations.push_back(entry.resolution.width);<br>
+ availableStreamConfigurations.push_back(entry.resolution.height);<br>
+ availableStreamConfigurations.push_back(<br>
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);<br>
+ }<br>
+ staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,<br>
+ availableStreamConfigurations);<br>
+<br>
+ std::vector<int64_t> availableStallDurations = {<br>
+ ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,<br>
+ availableStallDurations);<br>
+<br>
+ /* Use the minimum frame duration for all the YUV/RGB formats. */<br>
+ if (minFrameDurationNsec > 0) {<br>
+ std::vector<int64_t> minFrameDurations;<br>
+ minFrameDurations.reserve(streamConfigurations_.size() * 4);<br>
+ for (const auto &entry : streamConfigurations_) {<br>
+ minFrameDurations.push_back(entry.androidFormat);<br>
+ minFrameDurations.push_back(entry.resolution.width);<br>
+ minFrameDurations.push_back(entry.resolution.height);<br>
+ minFrameDurations.push_back(minFrameDurationNsec);<br>
+ }<br>
+ staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,<br>
+ minFrameDurations);<br>
+ }<br>
+<br>
+ uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;<br>
+ staticMetadata_->addEntry(ANDROID_SCALER_CROPPING_TYPE, croppingType);<br>
+<br>
+ /* Info static metadata. */<br>
+ uint8_t supportedHWLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;<br>
+ staticMetadata_->addEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,<br>
+ supportedHWLevel);<br>
+<br>
+ /* Request static metadata. */<br>
+ int32_t partialResultCount = 1;<br>
+ staticMetadata_->addEntry(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,<br>
+ partialResultCount);<br>
+<br>
+ {<br>
+ /* Default the value to 2 if not reported by the camera. */<br>
+ uint8_t maxPipelineDepth = 2;<br>
+ const auto &infoMap = controlsInfo.find(&controls::draft::PipelineDepth);<br>
+ if (infoMap != controlsInfo.end())<br>
+ maxPipelineDepth = infoMap->second.max().get<int32_t>();<br>
+ staticMetadata_->addEntry(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,<br>
+ maxPipelineDepth);<br>
+ }<br>
+<br>
+ /* LIMITED does not support reprocessing. */<br>
+ uint32_t maxNumInputStreams = 0;<br>
+ staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,<br>
+ maxNumInputStreams);<br>
+<br>
+ std::vector<uint8_t> availableCapabilities = {<br>
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,<br>
+ };<br>
+<br>
+ /* Report if camera supports RAW. */<br>
+ bool rawStreamAvailable = false;<br>
+ std::unique_ptr<CameraConfiguration> cameraConfig =<br>
+ camera_->generateConfiguration({ StreamRole::Raw });<br>
+ if (cameraConfig && !cameraConfig->empty()) {<br>
+ const PixelFormatInfo &info =<br>
+ PixelFormatInfo::info(cameraConfig->at(0).pixelFormat);<br>
+ /* Only advertise RAW support if RAW16 is possible. */<br>
+ if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW &&<br>
+ info.bitsPerPixel == 16) {<br>
+ rawStreamAvailable = true;<br>
+ availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);<br>
+ }<br>
+ }<br>
+<br>
+ /* Number of { RAW, YUV, JPEG } supported output streams */<br>
+ int32_t numOutStreams[] = { rawStreamAvailable, 2, 1 };<br>
+ staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,<br>
+ numOutStreams);<br>
+<br>
+ staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,<br>
+ availableCapabilities);<br>
+<br>
+ std::vector<int32_t> availableCharacteristicsKeys = {<br>
+ ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,<br>
+ ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,<br>
+ ANDROID_CONTROL_AE_AVAILABLE_MODES,<br>
+ ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,<br>
+ ANDROID_CONTROL_AE_COMPENSATION_RANGE,<br>
+ ANDROID_CONTROL_AE_COMPENSATION_STEP,<br>
+ ANDROID_CONTROL_AE_LOCK_AVAILABLE,<br>
+ ANDROID_CONTROL_AF_AVAILABLE_MODES,<br>
+ ANDROID_CONTROL_AVAILABLE_EFFECTS,<br>
+ ANDROID_CONTROL_AVAILABLE_MODES,<br>
+ ANDROID_CONTROL_AVAILABLE_SCENE_MODES,<br>
+ ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,<br>
+ ANDROID_CONTROL_AWB_AVAILABLE_MODES,<br>
+ ANDROID_CONTROL_AWB_LOCK_AVAILABLE,<br>
+ ANDROID_CONTROL_MAX_REGIONS,<br>
+ ANDROID_CONTROL_SCENE_MODE_OVERRIDES,<br>
+ ANDROID_FLASH_INFO_AVAILABLE,<br>
+ ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,<br>
+ ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,<br>
+ ANDROID_JPEG_MAX_SIZE,<br>
+ ANDROID_LENS_FACING,<br>
+ ANDROID_LENS_INFO_AVAILABLE_APERTURES,<br>
+ ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,<br>
+ ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,<br>
+ ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,<br>
+ ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,<br>
+ ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,<br>
+ ANDROID_REQUEST_AVAILABLE_CAPABILITIES,<br>
+ ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,<br>
+ ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,<br>
+ ANDROID_REQUEST_PARTIAL_RESULT_COUNT,<br>
+ ANDROID_REQUEST_PIPELINE_MAX_DEPTH,<br>
+ ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,<br>
+ ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,<br>
+ ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,<br>
+ ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,<br>
+ ANDROID_SCALER_CROPPING_TYPE,<br>
+ ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,<br>
+ ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,<br>
+ ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,<br>
+ ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,<br>
+ ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,<br>
+ ANDROID_SENSOR_INFO_PHYSICAL_SIZE,<br>
+ ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,<br>
+ ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,<br>
+ ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,<br>
+ ANDROID_SENSOR_ORIENTATION,<br>
+ ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,<br>
+ ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,<br>
+ ANDROID_SYNC_MAX_LATENCY,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,<br>
+ availableCharacteristicsKeys);<br>
+<br>
+ std::vector<int32_t> availableRequestKeys = {<br>
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE,<br>
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE,<br>
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,<br>
+ ANDROID_CONTROL_AE_LOCK,<br>
+ ANDROID_CONTROL_AE_MODE,<br>
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,<br>
+ ANDROID_CONTROL_AE_TARGET_FPS_RANGE,<br>
+ ANDROID_CONTROL_AF_MODE,<br>
+ ANDROID_CONTROL_AF_TRIGGER,<br>
+ ANDROID_CONTROL_AWB_LOCK,<br>
+ ANDROID_CONTROL_AWB_MODE,<br>
+ ANDROID_CONTROL_CAPTURE_INTENT,<br>
+ ANDROID_CONTROL_EFFECT_MODE,<br>
+ ANDROID_CONTROL_MODE,<br>
+ ANDROID_CONTROL_SCENE_MODE,<br>
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,<br>
+ ANDROID_FLASH_MODE,<br>
+ ANDROID_JPEG_ORIENTATION,<br>
+ ANDROID_JPEG_QUALITY,<br>
+ ANDROID_JPEG_THUMBNAIL_QUALITY,<br>
+ ANDROID_JPEG_THUMBNAIL_SIZE,<br>
+ ANDROID_LENS_APERTURE,<br>
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE,<br>
+ ANDROID_NOISE_REDUCTION_MODE,<br>
+ ANDROID_SCALER_CROP_REGION,<br>
+ ANDROID_STATISTICS_FACE_DETECT_MODE<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,<br>
+ availableRequestKeys);<br>
+<br>
+ std::vector<int32_t> availableResultKeys = {<br>
+ ANDROID_COLOR_CORRECTION_ABERRATION_MODE,<br>
+ ANDROID_CONTROL_AE_ANTIBANDING_MODE,<br>
+ ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,<br>
+ ANDROID_CONTROL_AE_LOCK,<br>
+ ANDROID_CONTROL_AE_MODE,<br>
+ ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,<br>
+ ANDROID_CONTROL_AE_STATE,<br>
+ ANDROID_CONTROL_AE_TARGET_FPS_RANGE,<br>
+ ANDROID_CONTROL_AF_MODE,<br>
+ ANDROID_CONTROL_AF_STATE,<br>
+ ANDROID_CONTROL_AF_TRIGGER,<br>
+ ANDROID_CONTROL_AWB_LOCK,<br>
+ ANDROID_CONTROL_AWB_MODE,<br>
+ ANDROID_CONTROL_AWB_STATE,<br>
+ ANDROID_CONTROL_CAPTURE_INTENT,<br>
+ ANDROID_CONTROL_EFFECT_MODE,<br>
+ ANDROID_CONTROL_MODE,<br>
+ ANDROID_CONTROL_SCENE_MODE,<br>
+ ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,<br>
+ ANDROID_FLASH_MODE,<br>
+ ANDROID_FLASH_STATE,<br>
+ ANDROID_JPEG_GPS_COORDINATES,<br>
+ ANDROID_JPEG_GPS_PROCESSING_METHOD,<br>
+ ANDROID_JPEG_GPS_TIMESTAMP,<br>
+ ANDROID_JPEG_ORIENTATION,<br>
+ ANDROID_JPEG_QUALITY,<br>
+ ANDROID_JPEG_SIZE,<br>
+ ANDROID_JPEG_THUMBNAIL_QUALITY,<br>
+ ANDROID_JPEG_THUMBNAIL_SIZE,<br>
+ ANDROID_LENS_APERTURE,<br>
+ ANDROID_LENS_FOCAL_LENGTH,<br>
+ ANDROID_LENS_OPTICAL_STABILIZATION_MODE,<br>
+ ANDROID_LENS_STATE,<br>
+ ANDROID_NOISE_REDUCTION_MODE,<br>
+ ANDROID_REQUEST_PIPELINE_DEPTH,<br>
+ ANDROID_SCALER_CROP_REGION,<br>
+ ANDROID_SENSOR_EXPOSURE_TIME,<br>
+ ANDROID_SENSOR_FRAME_DURATION,<br>
+ ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,<br>
+ ANDROID_SENSOR_TEST_PATTERN_MODE,<br>
+ ANDROID_SENSOR_TIMESTAMP,<br>
+ ANDROID_STATISTICS_FACE_DETECT_MODE,<br>
+ ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,<br>
+ ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,<br>
+ ANDROID_STATISTICS_SCENE_FLICKER,<br>
+ };<br>
+ staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,<br>
+ availableResultKeys);<br>
+<br>
+ if (!staticMetadata_->isValid()) {<br>
+ LOG(HAL, Error) << "Failed to construct static metadata";<br>
+ staticMetadata_.reset();<br>
+ return -EINVAL;<br>
+ }<br>
+<br>
+ if (staticMetadata_->resized()) {<br>
+ auto [entryCount, dataCount] = staticMetadata_->usage();<br>
+ LOG(HAL, Info)<br>
+ << "Static metadata resized: " << entryCount<br>
+ << " entries and " << dataCount << " bytes used";<br>
+ }<br>
+<br>
+ return 0;<br>
+}<br>
+<br>
+/* Translate Android format code to libcamera pixel format. */<br>
+PixelFormat CameraCapabilities::toPixelFormat(int format) const<br>
+{<br>
+ auto it = formatsMap_.find(format);<br>
+ if (it == formatsMap_.end()) {<br>
+ LOG(HAL, Error) << "Requested format " << utils::hex(format)<br>
+ << " not supported";<br>
+ return PixelFormat();<br>
+ }<br>
+<br>
+ return it->second;<br>
+}<br>
+<br>
+std::unique_ptr<CameraMetadata> CameraCapabilities::requestTemplatePreview() const<br>
+{<br>
+ /*<br>
+ * \todo Keep this in sync with the actual number of entries.<br>
+ * Currently: 20 entries, 35 bytes<br>
+ */<br>
+ auto requestTemplate = std::make_unique<CameraMetadata>(21, 36);<br>
+ if (!requestTemplate->isValid()) {<br>
+ return nullptr;<br>
+ }<br>
+<br>
+ /* Get the FPS range registered in the static metadata. */<br>
+ camera_metadata_ro_entry_t entry;<br>
+ bool found = staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,<br>
+ &entry);<br>
+ if (!found) {<br>
+ LOG(HAL, Error) << "Cannot create capture template without FPS range";<br>
+ return nullptr;<br>
+ }<br>
+<br>
+ /*<br>
+ * Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata<br>
+ * has been assembled as {{min, max} {max, max}}.<br>
+ */<br>
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,<br>
+ entry.data.i32, 2);<br>
+<br>
+ uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;<br>
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_MODE, aeMode);<br>
+<br>
+ int32_t aeExposureCompensation = 0;<br>
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,<br>
+ aeExposureCompensation);<br>
+<br>
+ uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;<br>
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,<br>
+ aePrecaptureTrigger);<br>
+<br>
+ uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;<br>
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_LOCK, aeLock);<br>
+<br>
+ uint8_t aeAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;<br>
+ requestTemplate->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE,<br>
+ aeAntibandingMode);<br>
+<br>
+ uint8_t afMode = ANDROID_CONTROL_AF_MODE_OFF;<br>
+ requestTemplate->addEntry(ANDROID_CONTROL_AF_MODE, afMode);<br>
+<br>
+ uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;<br>
+ requestTemplate->addEntry(ANDROID_CONTROL_AF_TRIGGER, afTrigger);<br>
+<br>
+ uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;<br>
+ requestTemplate->addEntry(ANDROID_CONTROL_AWB_MODE, awbMode);<br>
+<br>
+ uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;<br>
+ requestTemplate->addEntry(ANDROID_CONTROL_AWB_LOCK, awbLock);<br>
+<br>
+ uint8_t flashMode = ANDROID_FLASH_MODE_OFF;<br>
+ requestTemplate->addEntry(ANDROID_FLASH_MODE, flashMode);<br>
+<br>
+ uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;<br>
+ requestTemplate->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE,<br>
+ faceDetectMode);<br>
+<br>
+ uint8_t noiseReduction = ANDROID_NOISE_REDUCTION_MODE_OFF;<br>
+ requestTemplate->addEntry(ANDROID_NOISE_REDUCTION_MODE,<br>
+ noiseReduction);<br>
+<br>
+ uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;<br>
+ requestTemplate->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,<br>
+ aberrationMode);<br>
+<br>
+ uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;<br>
+ requestTemplate->addEntry(ANDROID_CONTROL_MODE, controlMode);<br>
+<br>
+ float lensAperture = 2.53 / 100;<br>
+ requestTemplate->addEntry(ANDROID_LENS_APERTURE, lensAperture);<br>
+<br>
+ uint8_t opticalStabilization = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;<br>
+ requestTemplate->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,<br>
+ opticalStabilization);<br>
+<br>
+ uint8_t captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;<br>
+ requestTemplate->addEntry(ANDROID_CONTROL_CAPTURE_INTENT,<br>
+ captureIntent);<br>
+<br>
+ return requestTemplate;<br>
+}<br>
+<br>
+std::unique_ptr<CameraMetadata> CameraCapabilities::requestTemplateVideo() const<br>
+{<br>
+ std::unique_ptr<CameraMetadata> previewTemplate = requestTemplatePreview();<br>
+ if (!previewTemplate)<br>
+ return nullptr;<br>
+<br>
+ /*<br>
+ * The video template requires a fixed FPS range. Everything else<br>
+ * stays the same as the preview template.<br>
+ */<br>
+ camera_metadata_ro_entry_t entry;<br>
+ staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,<br>
+ &entry);<br>
+<br>
+ /*<br>
+ * Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata<br>
+ * has been assembled as {{min, max} {max, max}}.<br>
+ */<br>
+ previewTemplate->updateEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,<br>
+ entry.data.i32 + 2, 2);<br>
+<br>
+ return previewTemplate;<br>
+}<br>
diff --git a/src/android/camera_capabilities.h b/src/android/camera_capabilities.h<br>
new file mode 100644<br>
index 000000000000..f511607bbd90<br>
--- /dev/null<br>
+++ b/src/android/camera_capabilities.h<br>
@@ -0,0 +1,65 @@<br>
+/* SPDX-License-Identifier: LGPL-2.1-or-later */<br>
+/*<br>
+ * Copyright (C) 2021, Google Inc.<br>
+ *<br>
+ * camera_capabilities.h - Camera static properties manager<br>
+ */<br>
+#ifndef __ANDROID_CAMERA_CAPABILITIES_H__<br>
+#define __ANDROID_CAMERA_CAPABILITIES_H__<br>
+<br>
+#include <map><br>
+#include <memory><br>
+#include <vector><br>
+<br>
+#include <libcamera/camera.h><br>
+#include <libcamera/class.h><br>
+#include <libcamera/formats.h><br>
+#include <libcamera/geometry.h><br>
+<br>
+#include "camera_metadata.h"<br>
+<br>
+class CameraCapabilities<br>
+{<br>
+public:<br>
+ CameraCapabilities() = default;<br>
+<br>
+ int initialize(std::shared_ptr<libcamera::Camera> camera,<br>
+ int orientation, int facing);<br>
+<br>
+ CameraMetadata *staticMetadata() const { return staticMetadata_.get(); }<br>
+ libcamera::PixelFormat toPixelFormat(int format) const;<br>
+ unsigned int maxJpegBufferSize() const { return maxJpegBufferSize_; }<br>
+<br>
+ std::unique_ptr<CameraMetadata> requestTemplatePreview() const;<br>
+ std::unique_ptr<CameraMetadata> requestTemplateVideo() const;<br>
+<br>
+private:<br>
+ LIBCAMERA_DISABLE_COPY_AND_MOVE(CameraCapabilities)<br>
+<br>
+ struct Camera3StreamConfiguration {<br>
+ libcamera::Size resolution;<br>
+ int androidFormat;<br>
+ };<br>
+<br>
+ std::vector<libcamera::Size><br>
+ getYUVResolutions(libcamera::CameraConfiguration *cameraConfig,<br>
+ const libcamera::PixelFormat &pixelFormat,<br>
+ const std::vector<libcamera::Size> &resolutions);<br>
+ std::vector<libcamera::Size><br>
+ getRawResolutions(const libcamera::PixelFormat &pixelFormat);<br>
+ int initializeStreamConfigurations();<br>
+<br>
+ int initializeStaticMetadata();<br>
+<br>
+ std::shared_ptr<libcamera::Camera> camera_;<br>
+<br>
+ int facing_;<br>
+ int orientation_;<br>
+<br>
+ std::vector<Camera3StreamConfiguration> streamConfigurations_;<br>
+ std::map<int, libcamera::PixelFormat> formatsMap_;<br>
+ std::unique_ptr<CameraMetadata> staticMetadata_;<br>
+ unsigned int maxJpegBufferSize_;<br>
+};<br>
+<br>
+#endif /* __ANDROID_CAMERA_CAPABILITIES_H__ */<br>
diff --git a/src/android/camera_device.cpp b/src/android/camera_device.cpp<br>
index 8c71fd0675d3..4bd125d7020a 100644<br>
--- a/src/android/camera_device.cpp<br>
+++ b/src/android/camera_device.cpp<br>
@@ -10,11 +10,8 @@<br>
#include "camera_ops.h"<br>
#include "post_processor.h"<br>
<br>
-#include <array><br>
-#include <cmath><br>
#include <fstream><br>
#include <sys/mman.h><br>
-#include <tuple><br>
#include <unistd.h><br>
#include <vector><br>
<br>
@@ -23,7 +20,6 @@<br>
#include <libcamera/formats.h><br>
#include <libcamera/property_ids.h><br>
<br>
-#include "libcamera/internal/formats.h"<br>
#include "libcamera/internal/log.h"<br>
#include "libcamera/internal/thread.h"<br>
#include "libcamera/internal/utils.h"<br>
@@ -36,94 +32,6 @@ LOG_DECLARE_CATEGORY(HAL)<br>
<br>
namespace {<br>
<br>
-/*<br>
- * \var camera3Resolutions<br>
- * \brief The list of image resolutions defined as mandatory to be supported by<br>
- * the Android Camera3 specification<br>
- */<br>
-const std::vector<Size> camera3Resolutions = {<br>
- { 320, 240 },<br>
- { 640, 480 },<br>
- { 1280, 720 },<br>
- { 1920, 1080 }<br>
-};<br>
-<br>
-/*<br>
- * \struct Camera3Format<br>
- * \brief Data associated with an Android format identifier<br>
- * \var libcameraFormats List of libcamera pixel formats compatible with the<br>
- * Android format<br>
- * \var name The human-readable representation of the Android format code<br>
- */<br>
-struct Camera3Format {<br>
- std::vector<PixelFormat> libcameraFormats;<br>
- bool mandatory;<br>
- const char *name;<br>
-};<br>
-<br>
-/*<br>
- * \var camera3FormatsMap<br>
- * \brief Associate Android format code with ancillary data<br>
- */<br>
-const std::map<int, const Camera3Format> camera3FormatsMap = {<br>
- {<br>
- HAL_PIXEL_FORMAT_BLOB, {<br>
- { formats::MJPEG },<br>
- true,<br>
- "BLOB"<br>
- }<br>
- }, {<br>
- HAL_PIXEL_FORMAT_YCbCr_420_888, {<br>
- { formats::NV12, formats::NV21 },<br>
- true,<br>
- "YCbCr_420_888"<br>
- }<br>
- }, {<br>
- /*<br>
- * \todo Translate IMPLEMENTATION_DEFINED inspecting the gralloc<br>
- * usage flag. For now, copy the YCbCr_420 configuration.<br>
- */<br>
- HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, {<br>
- { formats::NV12, formats::NV21 },<br>
- true,<br>
- "IMPLEMENTATION_DEFINED"<br>
- }<br>
- }, {<br>
- HAL_PIXEL_FORMAT_RAW10, {<br>
- {<br>
- formats::SBGGR10_CSI2P,<br>
- formats::SGBRG10_CSI2P,<br>
- formats::SGRBG10_CSI2P,<br>
- formats::SRGGB10_CSI2P<br>
- },<br>
- false,<br>
- "RAW10"<br>
- }<br>
- }, {<br>
- HAL_PIXEL_FORMAT_RAW12, {<br>
- {<br>
- formats::SBGGR12_CSI2P,<br>
- formats::SGBRG12_CSI2P,<br>
- formats::SGRBG12_CSI2P,<br>
- formats::SRGGB12_CSI2P<br>
- },<br>
- false,<br>
- "RAW12"<br>
- }<br>
- }, {<br>
- HAL_PIXEL_FORMAT_RAW16, {<br>
- {<br>
- formats::SBGGR16,<br>
- formats::SGBRG16,<br>
- formats::SGRBG16,<br>
- formats::SRGGB16<br>
- },<br>
- false,<br>
- "RAW16"<br>
- }<br>
- },<br>
-};<br>
-<br>
/*<br>
* \struct Camera3StreamConfig<br>
* \brief Data to store StreamConfiguration associated with camera3_stream(s)<br>
@@ -512,242 +420,7 @@ int CameraDevice::initialize(const CameraConfigData *cameraConfigData)<br>
orientation_ = 0;<br>
}<br>
<br>
- /* Acquire the camera and initialize available stream configurations. */<br>
- int ret = camera_->acquire();<br>
- if (ret) {<br>
- LOG(HAL, Error) << "Failed to temporarily acquire the camera";<br>
- return ret;<br>
- }<br>
-<br>
- ret = initializeStreamConfigurations();<br>
- camera_->release();<br>
- return ret;<br>
-}<br>
-<br>
-std::vector<Size> CameraDevice::getYUVResolutions(CameraConfiguration *cameraConfig,<br>
- const PixelFormat &pixelFormat,<br>
- const std::vector<Size> &resolutions)<br>
-{<br>
- std::vector<Size> supportedResolutions;<br>
-<br>
- StreamConfiguration &cfg = cameraConfig->at(0);<br>
- for (const Size &res : resolutions) {<br>
- cfg.pixelFormat = pixelFormat;<br>
- cfg.size = res;<br>
-<br>
- CameraConfiguration::Status status = cameraConfig->validate();<br>
- if (status != CameraConfiguration::Valid) {<br>
- LOG(HAL, Debug) << cfg.toString() << " not supported";<br>
- continue;<br>
- }<br>
-<br>
- LOG(HAL, Debug) << cfg.toString() << " supported";<br>
-<br>
- supportedResolutions.push_back(res);<br>
- }<br>
-<br>
- return supportedResolutions;<br>
-}<br>
-<br>
-std::vector<Size> CameraDevice::getRawResolutions(const libcamera::PixelFormat &pixelFormat)<br>
-{<br>
- std::unique_ptr<CameraConfiguration> cameraConfig =<br>
- camera_->generateConfiguration({ StreamRole::Raw });<br>
- StreamConfiguration &cfg = cameraConfig->at(0);<br>
- const StreamFormats &formats = cfg.formats();<br>
- std::vector<Size> supportedResolutions = formats.sizes(pixelFormat);<br>
-<br>
- return supportedResolutions;<br>
-}<br>
-<br>
-/*<br>
- * Initialize the format conversion map to translate from Android format<br>
- * identifier to libcamera pixel formats and fill in the list of supported<br>
- * stream configurations to be reported to the Android camera framework through<br>
- * the static stream configuration metadata.<br>
- */<br>
-int CameraDevice::initializeStreamConfigurations()<br>
-{<br>
- /*<br>
- * Get the maximum output resolutions<br>
- * \todo Get this from the camera properties once defined<br>
- */<br>
- std::unique_ptr<CameraConfiguration> cameraConfig =<br>
- camera_->generateConfiguration({ StillCapture });<br>
- if (!cameraConfig) {<br>
- LOG(HAL, Error) << "Failed to get maximum resolution";<br>
- return -EINVAL;<br>
- }<br>
- StreamConfiguration &cfg = cameraConfig->at(0);<br>
-<br>
- /*<br>
- * \todo JPEG - Adjust the maximum available resolution by taking the<br>
- * JPEG encoder requirements into account (alignment and aspect ratio).<br>
- */<br>
- const Size maxRes = cfg.size;<br>
- LOG(HAL, Debug) << "Maximum supported resolution: " << maxRes.toString();<br>
-<br>
- /*<br>
- * Build the list of supported image resolutions.<br>
- *<br>
- * The resolutions listed in camera3Resolution are mandatory to be<br>
- * supported, up to the camera maximum resolution.<br>
- *<br>
- * Augment the list by adding resolutions calculated from the camera<br>
- * maximum one.<br>
- */<br>
- std::vector<Size> cameraResolutions;<br>
- std::copy_if(camera3Resolutions.begin(), camera3Resolutions.end(),<br>
- std::back_inserter(cameraResolutions),<br>
- [&](const Size &res) { return res < maxRes; });<br>
-<br>
- /*<br>
- * The Camera3 specification suggests adding 1/2 and 1/4 of the maximum<br>
- * resolution.<br>
- */<br>
- for (unsigned int divider = 2;; divider <<= 1) {<br>
- Size derivedSize{<br>
- maxRes.width / divider,<br>
- maxRes.height / divider,<br>
- };<br>
-<br>
- if (derivedSize.width < 320 ||<br>
- derivedSize.height < 240)<br>
- break;<br>
-<br>
- cameraResolutions.push_back(derivedSize);<br>
- }<br>
- cameraResolutions.push_back(maxRes);<br>
-<br>
- /* Remove duplicated entries from the list of supported resolutions. */<br>
- std::sort(cameraResolutions.begin(), cameraResolutions.end());<br>
- auto last = std::unique(cameraResolutions.begin(), cameraResolutions.end());<br>
- cameraResolutions.erase(last, cameraResolutions.end());<br>
-<br>
- /*<br>
- * Build the list of supported camera formats.<br>
- *<br>
- * To each Android format a list of compatible libcamera formats is<br>
- * associated. The first libcamera format that tests successful is added<br>
- * to the format translation map used when configuring the streams.<br>
- * It is then tested against the list of supported camera resolutions to<br>
- * build the stream configuration map reported through the camera static<br>
- * metadata.<br>
- */<br>
- Size maxJpegSize;<br>
- for (const auto &format : camera3FormatsMap) {<br>
- int androidFormat = format.first;<br>
- const Camera3Format &camera3Format = format.second;<br>
- const std::vector<PixelFormat> &libcameraFormats =<br>
- camera3Format.libcameraFormats;<br>
-<br>
- LOG(HAL, Debug) << "Trying to map Android format "<br>
- << camera3Format.name;<br>
-<br>
- /*<br>
- * JPEG is always supported, either produced directly by the<br>
- * camera, or encoded in the HAL.<br>
- */<br>
- if (androidFormat == HAL_PIXEL_FORMAT_BLOB) {<br>
- formatsMap_[androidFormat] = formats::MJPEG;<br>
- LOG(HAL, Debug) << "Mapped Android format "<br>
- << camera3Format.name << " to "<br>
- << formats::MJPEG.toString()<br>
- << " (fixed mapping)";<br>
- continue;<br>
- }<br>
-<br>
- /*<br>
- * Test the libcamera formats that can produce images<br>
- * compatible with the format defined by Android.<br>
- */<br>
- PixelFormat mappedFormat;<br>
- for (const PixelFormat &pixelFormat : libcameraFormats) {<br>
-<br>
- LOG(HAL, Debug) << "Testing " << pixelFormat.toString();<br>
-<br>
- /*<br>
- * The stream configuration size can be adjusted,<br>
- * not the pixel format.<br>
- *<br>
- * \todo This could be simplified once all pipeline<br>
- * handlers will report the StreamFormats list of<br>
- * supported formats.<br>
- */<br>
- cfg.pixelFormat = pixelFormat;<br>
-<br>
- CameraConfiguration::Status status = cameraConfig->validate();<br>
- if (status != CameraConfiguration::Invalid &&<br>
- cfg.pixelFormat == pixelFormat) {<br>
- mappedFormat = pixelFormat;<br>
- break;<br>
- }<br>
- }<br>
-<br>
- if (!mappedFormat.isValid()) {<br>
- /* If the format is not mandatory, skip it. */<br>
- if (!camera3Format.mandatory)<br>
- continue;<br>
-<br>
- LOG(HAL, Error)<br>
- << "Failed to map mandatory Android format "<br>
- << camera3Format.name << " ("<br>
- << utils::hex(androidFormat) << "): aborting";<br>
- return -EINVAL;<br>
- }<br>
-<br>
- /*<br>
- * Record the mapping and then proceed to generate the<br>
- * stream configurations map, by testing the image resolutions.<br>
- */<br>
- formatsMap_[androidFormat] = mappedFormat;<br>
- LOG(HAL, Debug) << "Mapped Android format "<br>
- << camera3Format.name << " to "<br>
- << mappedFormat.toString();<br>
-<br>
- std::vector<Size> resolutions;<br>
- const PixelFormatInfo &info = PixelFormatInfo::info(mappedFormat);<br>
- if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW)<br>
- resolutions = getRawResolutions(mappedFormat);<br>
- else<br>
- resolutions = getYUVResolutions(cameraConfig.get(),<br>
- mappedFormat,<br>
- cameraResolutions);<br>
-<br>
- for (const Size &res : resolutions) {<br>
- streamConfigurations_.push_back({ res, androidFormat });<br>
-<br>
- /*<br>
- * If the format is HAL_PIXEL_FORMAT_YCbCr_420_888<br>
- * from which JPEG is produced, add an entry for<br>
- * the JPEG stream.<br>
- *<br>
- * \todo Wire the JPEG encoder to query the supported<br>
- * sizes provided a list of formats it can encode.<br>
- *<br>
- * \todo Support JPEG streams produced by the Camera<br>
- * natively.<br>
- */<br>
- if (androidFormat == HAL_PIXEL_FORMAT_YCbCr_420_888) {<br>
- streamConfigurations_.push_back(<br>
- { res, HAL_PIXEL_FORMAT_BLOB });<br>
- maxJpegSize = std::max(maxJpegSize, res);<br>
- }<br>
- }<br>
-<br>
- /*<br>
- * \todo Calculate the maximum JPEG buffer size by asking the<br>
- * encoder giving the maximum frame size required.<br>
- */<br>
- maxJpegBufferSize_ = maxJpegSize.width * maxJpegSize.height * 1.5;<br>
- }<br>
-<br>
- LOG(HAL, Debug) << "Collected stream configuration map: ";<br>
- for (const auto &entry : streamConfigurations_)<br>
- LOG(HAL, Debug) << "{ " << entry.resolution.toString() << " - "<br>
- << utils::hex(entry.androidFormat) << " }";<br>
-<br>
- return 0;<br>
+ return capabilities_.initialize(camera_, orientation_, facing_);<br>
}<br>
<br>
/*<br>
@@ -817,802 +490,19 @@ void CameraDevice::stop()<br>
state_ = State::Stopped;<br>
}<br>
<br>
-void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks)<br>
+unsigned int CameraDevice::maxJpegBufferSize() const<br>
{<br>
- callbacks_ = callbacks;<br>
+ return capabilities_.maxJpegBufferSize();<br>
}<br>
<br>
-/*<br>
- * Return static information for the camera.<br>
- */<br>
-const camera_metadata_t *CameraDevice::getStaticMetadata()<br>
-{<br>
- if (staticMetadata_)<br>
- return staticMetadata_->get();<br>
-<br>
- staticMetadata_ = std::make_unique<CameraMetadata>(64, 1024);<br>
- if (!staticMetadata_->isValid()) {<br>
- LOG(HAL, Error) << "Failed to allocate static metadata";<br>
- staticMetadata_.reset();<br>
- return nullptr;<br>
- }<br>
-<br>
- const ControlInfoMap &controlsInfo = camera_->controls();<br>
- const ControlList &properties = camera_->properties();<br>
-<br>
- /* Color correction static metadata. */<br>
- {<br>
- std::vector<uint8_t> data;<br>
- data.reserve(3);<br>
- const auto &infoMap = controlsInfo.find(&controls::draft::ColorCorrectionAberrationMode);<br>
- if (infoMap != controlsInfo.end()) {<br>
- for (const auto &value : infoMap->second.values())<br>
- data.push_back(value.get<int32_t>());<br>
- } else {<br>
- data.push_back(ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF);<br>
- }<br>
- staticMetadata_->addEntry(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,<br>
- data);<br>
- }<br>
-<br>
- /* Control static metadata. */<br>
- std::vector<uint8_t> aeAvailableAntiBandingModes = {<br>
- ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF,<br>
- ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ,<br>
- ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ,<br>
- ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,<br>
- aeAvailableAntiBandingModes);<br>
-<br>
- std::vector<uint8_t> aeAvailableModes = {<br>
- ANDROID_CONTROL_AE_MODE_ON,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_MODES,<br>
- aeAvailableModes);<br>
-<br>
- int64_t minFrameDurationNsec = -1;<br>
- int64_t maxFrameDurationNsec = -1;<br>
- const auto frameDurationsInfo = controlsInfo.find(&controls::FrameDurationLimits);<br>
- if (frameDurationsInfo != controlsInfo.end()) {<br>
- minFrameDurationNsec = frameDurationsInfo->second.min().get<int64_t>() * 1000;<br>
- maxFrameDurationNsec = frameDurationsInfo->second.max().get<int64_t>() * 1000;<br>
-<br>
- /*<br>
- * Adjust the minimum frame duration to comply with Android<br>
- * requirements. The camera service mandates all preview/record<br>
- * streams to have a minimum frame duration < 33,366 milliseconds<br>
- * (see MAX_PREVIEW_RECORD_DURATION_NS in the camera service<br>
- * implementation).<br>
- *<br>
- * If we're close enough (+ 500 useconds) to that value, round<br>
- * the minimum frame duration of the camera to an accepted<br>
- * value.<br>
- */<br>
- static constexpr int64_t MAX_PREVIEW_RECORD_DURATION_NS = 1e9 / 29.97;<br>
- if (minFrameDurationNsec > MAX_PREVIEW_RECORD_DURATION_NS &&<br>
- minFrameDurationNsec < MAX_PREVIEW_RECORD_DURATION_NS + 500000)<br>
- minFrameDurationNsec = MAX_PREVIEW_RECORD_DURATION_NS - 1000;<br>
-<br>
- /*<br>
- * The AE routine frame rate limits are computed using the frame<br>
- * duration limits, as libcamera clips the AE routine to the<br>
- * frame durations.<br>
- */<br>
- int32_t maxFps = std::round(1e9 / minFrameDurationNsec);<br>
- int32_t minFps = std::round(1e9 / maxFrameDurationNsec);<br>
- minFps = std::max(1, minFps);<br>
-<br>
- /*<br>
- * Force rounding errors so that we have the proper frame<br>
- * durations for when we reuse these variables later<br>
- */<br>
- minFrameDurationNsec = 1e9 / maxFps;<br>
- maxFrameDurationNsec = 1e9 / minFps;<br>
-<br>
- /*<br>
- * Register to the camera service {min, max} and {max, max}<br>
- * intervals as requested by the metadata documentation.<br>
- */<br>
- int32_t availableAeFpsTarget[] = {<br>
- minFps, maxFps, maxFps, maxFps<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,<br>
- availableAeFpsTarget);<br>
- }<br>
-<br>
- std::vector<int32_t> aeCompensationRange = {<br>
- 0, 0,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_RANGE,<br>
- aeCompensationRange);<br>
-<br>
- const camera_metadata_rational_t aeCompensationStep[] = {<br>
- { 0, 1 }<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_COMPENSATION_STEP,<br>
- aeCompensationStep);<br>
-<br>
- std::vector<uint8_t> availableAfModes = {<br>
- ANDROID_CONTROL_AF_MODE_OFF,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AF_AVAILABLE_MODES,<br>
- availableAfModes);<br>
-<br>
- std::vector<uint8_t> availableEffects = {<br>
- ANDROID_CONTROL_EFFECT_MODE_OFF,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_EFFECTS,<br>
- availableEffects);<br>
-<br>
- std::vector<uint8_t> availableSceneModes = {<br>
- ANDROID_CONTROL_SCENE_MODE_DISABLED,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,<br>
- availableSceneModes);<br>
-<br>
- std::vector<uint8_t> availableStabilizationModes = {<br>
- ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,<br>
- availableStabilizationModes);<br>
-<br>
- /*<br>
- * \todo Inspect the Camera capabilities to report the available<br>
- * AWB modes. Default to AUTO as CTS tests require it.<br>
- */<br>
- std::vector<uint8_t> availableAwbModes = {<br>
- ANDROID_CONTROL_AWB_MODE_AUTO,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AWB_AVAILABLE_MODES,<br>
- availableAwbModes);<br>
-<br>
- std::vector<int32_t> availableMaxRegions = {<br>
- 0, 0, 0,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_MAX_REGIONS,<br>
- availableMaxRegions);<br>
-<br>
- std::vector<uint8_t> sceneModesOverride = {<br>
- ANDROID_CONTROL_AE_MODE_ON,<br>
- ANDROID_CONTROL_AWB_MODE_AUTO,<br>
- ANDROID_CONTROL_AF_MODE_OFF,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,<br>
- sceneModesOverride);<br>
-<br>
- uint8_t aeLockAvailable = ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AE_LOCK_AVAILABLE,<br>
- aeLockAvailable);<br>
-<br>
- uint8_t awbLockAvailable = ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,<br>
- awbLockAvailable);<br>
-<br>
- char availableControlModes = ANDROID_CONTROL_MODE_AUTO;<br>
- staticMetadata_->addEntry(ANDROID_CONTROL_AVAILABLE_MODES,<br>
- availableControlModes);<br>
-<br>
- /* JPEG static metadata. */<br>
-<br>
- /*<br>
- * Create the list of supported thumbnail sizes by inspecting the<br>
- * available JPEG resolutions collected in streamConfigurations_ and<br>
- * generate one entry for each aspect ratio.<br>
- *<br>
- * The JPEG thumbnailer can freely scale, so pick an arbitrary<br>
- * (160, 160) size as the bounding rectangle, which is then cropped to<br>
- * the different supported aspect ratios.<br>
- */<br>
- constexpr Size maxJpegThumbnail(160, 160);<br>
- std::vector<Size> thumbnailSizes;<br>
- thumbnailSizes.push_back({ 0, 0 });<br>
- for (const auto &entry : streamConfigurations_) {<br>
- if (entry.androidFormat != HAL_PIXEL_FORMAT_BLOB)<br>
- continue;<br>
-<br>
- Size thumbnailSize = maxJpegThumbnail<br>
- .boundedToAspectRatio({ entry.resolution.width,<br>
- entry.resolution.height });<br>
- thumbnailSizes.push_back(thumbnailSize);<br>
- }<br>
-<br>
- std::sort(thumbnailSizes.begin(), thumbnailSizes.end());<br>
- auto last = std::unique(thumbnailSizes.begin(), thumbnailSizes.end());<br>
- thumbnailSizes.erase(last, thumbnailSizes.end());<br>
-<br>
- /* Transform sizes in to a list of integers that can be consumed. */<br>
- std::vector<int32_t> thumbnailEntries;<br>
- thumbnailEntries.reserve(thumbnailSizes.size() * 2);<br>
- for (const auto &size : thumbnailSizes) {<br>
- thumbnailEntries.push_back(size.width);<br>
- thumbnailEntries.push_back(size.height);<br>
- }<br>
- staticMetadata_->addEntry(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,<br>
- thumbnailEntries);<br>
-<br>
- staticMetadata_->addEntry(ANDROID_JPEG_MAX_SIZE, maxJpegBufferSize_);<br>
-<br>
- /* Sensor static metadata. */<br>
- std::array<int32_t, 2> pixelArraySize;<br>
- {<br>
- const Size &size = properties.get(properties::PixelArraySize);<br>
- pixelArraySize[0] = size.width;<br>
- pixelArraySize[1] = size.height;<br>
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,<br>
- pixelArraySize);<br>
- }<br>
-<br>
- if (properties.contains(properties::UnitCellSize)) {<br>
- const Size &cellSize = properties.get<Size>(properties::UnitCellSize);<br>
- std::array<float, 2> physicalSize{<br>
- cellSize.width * pixelArraySize[0] / 1e6f,<br>
- cellSize.height * pixelArraySize[1] / 1e6f<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,<br>
- physicalSize);<br>
- }<br>
-<br>
- {<br>
- const Span<const Rectangle> &rects =<br>
- properties.get(properties::PixelArrayActiveAreas);<br>
- std::vector<int32_t> data{<br>
- static_cast<int32_t>(rects[0].x),<br>
- static_cast<int32_t>(rects[0].y),<br>
- static_cast<int32_t>(rects[0].width),<br>
- static_cast<int32_t>(rects[0].height),<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,<br>
- data);<br>
- }<br>
-<br>
- int32_t sensitivityRange[] = {<br>
- 32, 2400,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,<br>
- sensitivityRange);<br>
-<br>
- /* Report the color filter arrangement if the camera reports it. */<br>
- if (properties.contains(properties::draft::ColorFilterArrangement)) {<br>
- uint8_t filterArr = properties.get(properties::draft::ColorFilterArrangement);<br>
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,<br>
- filterArr);<br>
- }<br>
-<br>
- const auto &exposureInfo = controlsInfo.find(&controls::ExposureTime);<br>
- if (exposureInfo != controlsInfo.end()) {<br>
- int64_t exposureTimeRange[2] = {<br>
- exposureInfo->second.min().get<int32_t>() * 1000LL,<br>
- exposureInfo->second.max().get<int32_t>() * 1000LL,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,<br>
- exposureTimeRange, 2);<br>
- }<br>
-<br>
- staticMetadata_->addEntry(ANDROID_SENSOR_ORIENTATION, orientation_);<br>
-<br>
- std::vector<int32_t> testPatternModes = {<br>
- ANDROID_SENSOR_TEST_PATTERN_MODE_OFF<br>
- };<br>
- const auto &testPatternsInfo =<br>
- controlsInfo.find(&controls::draft::TestPatternMode);<br>
- if (testPatternsInfo != controlsInfo.end()) {<br>
- const auto &values = testPatternsInfo->second.values();<br>
- ASSERT(!values.empty());<br>
- for (const auto &value : values) {<br>
- switch (value.get<int32_t>()) {<br>
- case controls::draft::TestPatternModeOff:<br>
- /*<br>
- * ANDROID_SENSOR_TEST_PATTERN_MODE_OFF is<br>
- * already in testPatternModes.<br>
- */<br>
- break;<br>
-<br>
- case controls::draft::TestPatternModeSolidColor:<br>
- testPatternModes.push_back(<br>
- ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR);<br>
- break;<br>
-<br>
- case controls::draft::TestPatternModeColorBars:<br>
- testPatternModes.push_back(<br>
- ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS);<br>
- break;<br>
-<br>
- case controls::draft::TestPatternModeColorBarsFadeToGray:<br>
- testPatternModes.push_back(<br>
- ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY);<br>
- break;<br>
-<br>
- case controls::draft::TestPatternModePn9:<br>
- testPatternModes.push_back(<br>
- ANDROID_SENSOR_TEST_PATTERN_MODE_PN9);<br>
- break;<br>
-<br>
- case controls::draft::TestPatternModeCustom1:<br>
- /* We don't support this yet. */<br>
- break;<br>
-<br>
- default:<br>
- LOG(HAL, Error) << "Unknown test pattern mode: "<br>
- << value.get<int32_t>();<br>
- continue;<br>
- }<br>
- }<br>
- }<br>
- staticMetadata_->addEntry(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,<br>
- testPatternModes);<br>
-<br>
- uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;<br>
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,<br>
- timestampSource);<br>
-<br>
- if (maxFrameDurationNsec > 0)<br>
- staticMetadata_->addEntry(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,<br>
- maxFrameDurationNsec);<br>
-<br>
- /* Statistics static metadata. */<br>
- uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;<br>
- staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,<br>
- faceDetectMode);<br>
-<br>
- int32_t maxFaceCount = 0;<br>
- staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,<br>
- maxFaceCount);<br>
-<br>
- {<br>
- std::vector<uint8_t> data;<br>
- data.reserve(2);<br>
- const auto &infoMap = controlsInfo.find(&controls::draft::LensShadingMapMode);<br>
- if (infoMap != controlsInfo.end()) {<br>
- for (const auto &value : infoMap->second.values())<br>
- data.push_back(value.get<int32_t>());<br>
- } else {<br>
- data.push_back(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF);<br>
- }<br>
- staticMetadata_->addEntry(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,<br>
- data);<br>
- }<br>
-<br>
- /* Sync static metadata. */<br>
- int32_t maxLatency = ANDROID_SYNC_MAX_LATENCY_UNKNOWN;<br>
- staticMetadata_->addEntry(ANDROID_SYNC_MAX_LATENCY, maxLatency);<br>
-<br>
- /* Flash static metadata. */<br>
- char flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;<br>
- staticMetadata_->addEntry(ANDROID_FLASH_INFO_AVAILABLE,<br>
- flashAvailable);<br>
-<br>
- /* Lens static metadata. */<br>
- std::vector<float> lensApertures = {<br>
- 2.53 / 100,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_APERTURES,<br>
- lensApertures);<br>
-<br>
- uint8_t lensFacing;<br>
- switch (facing_) {<br>
- default:<br>
- case CAMERA_FACING_FRONT:<br>
- lensFacing = ANDROID_LENS_FACING_FRONT;<br>
- break;<br>
- case CAMERA_FACING_BACK:<br>
- lensFacing = ANDROID_LENS_FACING_BACK;<br>
- break;<br>
- case CAMERA_FACING_EXTERNAL:<br>
- lensFacing = ANDROID_LENS_FACING_EXTERNAL;<br>
- break;<br>
- }<br>
- staticMetadata_->addEntry(ANDROID_LENS_FACING, lensFacing);<br>
-<br>
- std::vector<float> lensFocalLengths = {<br>
- 1,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,<br>
- lensFocalLengths);<br>
-<br>
- std::vector<uint8_t> opticalStabilizations = {<br>
- ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,<br>
- opticalStabilizations);<br>
-<br>
- float hypeFocalDistance = 0;<br>
- staticMetadata_->addEntry(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,<br>
- hypeFocalDistance);<br>
-<br>
- float minFocusDistance = 0;<br>
- staticMetadata_->addEntry(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,<br>
- minFocusDistance);<br>
-<br>
- /* Noise reduction modes. */<br>
- {<br>
- std::vector<uint8_t> data;<br>
- data.reserve(5);<br>
- const auto &infoMap = controlsInfo.find(&controls::draft::NoiseReductionMode);<br>
- if (infoMap != controlsInfo.end()) {<br>
- for (const auto &value : infoMap->second.values())<br>
- data.push_back(value.get<int32_t>());<br>
- } else {<br>
- data.push_back(ANDROID_NOISE_REDUCTION_MODE_OFF);<br>
- }<br>
- staticMetadata_->addEntry(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,<br>
- data);<br>
- }<br>
-<br>
- /* Scaler static metadata. */<br>
-<br>
- /*<br>
- * \todo The digital zoom factor is a property that depends on the<br>
- * desired output configuration and the sensor frame size input to the<br>
- * ISP. This information is not available to the Android HAL, not at<br>
- * initialization time at least.<br>
- *<br>
- * As a workaround rely on pipeline handlers initializing the<br>
- * ScalerCrop control with the camera default configuration and use the<br>
- * maximum and minimum crop rectangles to calculate the digital zoom<br>
- * factor.<br>
- */<br>
- float maxZoom = 1.0f;<br>
- const auto scalerCrop = controlsInfo.find(&controls::ScalerCrop);<br>
- if (scalerCrop != controlsInfo.end()) {<br>
- Rectangle min = scalerCrop->second.min().get<Rectangle>();<br>
- Rectangle max = scalerCrop->second.max().get<Rectangle>();<br>
- maxZoom = std::min(1.0f * max.width / min.width,<br>
- 1.0f * max.height / min.height);<br>
- }<br>
- staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,<br>
- maxZoom);<br>
-<br>
- std::vector<uint32_t> availableStreamConfigurations;<br>
- availableStreamConfigurations.reserve(streamConfigurations_.size() * 4);<br>
- for (const auto &entry : streamConfigurations_) {<br>
- availableStreamConfigurations.push_back(entry.androidFormat);<br>
- availableStreamConfigurations.push_back(entry.resolution.width);<br>
- availableStreamConfigurations.push_back(entry.resolution.height);<br>
- availableStreamConfigurations.push_back(<br>
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);<br>
- }<br>
- staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,<br>
- availableStreamConfigurations);<br>
-<br>
- std::vector<int64_t> availableStallDurations = {<br>
- ANDROID_SCALER_AVAILABLE_FORMATS_BLOB, 2560, 1920, 33333333,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,<br>
- availableStallDurations);<br>
-<br>
- /* Use the minimum frame duration for all the YUV/RGB formats. */<br>
- if (minFrameDurationNsec > 0) {<br>
- std::vector<int64_t> minFrameDurations;<br>
- minFrameDurations.reserve(streamConfigurations_.size() * 4);<br>
- for (const auto &entry : streamConfigurations_) {<br>
- minFrameDurations.push_back(entry.androidFormat);<br>
- minFrameDurations.push_back(entry.resolution.width);<br>
- minFrameDurations.push_back(entry.resolution.height);<br>
- minFrameDurations.push_back(minFrameDurationNsec);<br>
- }<br>
- staticMetadata_->addEntry(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,<br>
- minFrameDurations);<br>
- }<br>
-<br>
- uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;<br>
- staticMetadata_->addEntry(ANDROID_SCALER_CROPPING_TYPE, croppingType);<br>
-<br>
- /* Info static metadata. */<br>
- uint8_t supportedHWLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED;<br>
- staticMetadata_->addEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,<br>
- supportedHWLevel);<br>
-<br>
- /* Request static metadata. */<br>
- int32_t partialResultCount = 1;<br>
- staticMetadata_->addEntry(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,<br>
- partialResultCount);<br>
-<br>
- {<br>
- /* Default the value to 2 if not reported by the camera. */<br>
- uint8_t maxPipelineDepth = 2;<br>
- const auto &infoMap = controlsInfo.find(&controls::draft::PipelineDepth);<br>
- if (infoMap != controlsInfo.end())<br>
- maxPipelineDepth = infoMap->second.max().get<int32_t>();<br>
- staticMetadata_->addEntry(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,<br>
- maxPipelineDepth);<br>
- }<br>
-<br>
- /* LIMITED does not support reprocessing. */<br>
- uint32_t maxNumInputStreams = 0;<br>
- staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,<br>
- maxNumInputStreams);<br>
-<br>
- std::vector<uint8_t> availableCapabilities = {<br>
- ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE,<br>
- };<br>
-<br>
- /* Report if camera supports RAW. */<br>
- bool rawStreamAvailable = false;<br>
- std::unique_ptr<CameraConfiguration> cameraConfig =<br>
- camera_->generateConfiguration({ StreamRole::Raw });<br>
- if (cameraConfig && !cameraConfig->empty()) {<br>
- const PixelFormatInfo &info =<br>
- PixelFormatInfo::info(cameraConfig->at(0).pixelFormat);<br>
- /* Only advertise RAW support if RAW16 is possible. */<br>
- if (info.colourEncoding == PixelFormatInfo::ColourEncodingRAW &&<br>
- info.bitsPerPixel == 16) {<br>
- rawStreamAvailable = true;<br>
- availableCapabilities.push_back(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);<br>
- }<br>
- }<br>
-<br>
- /* Number of { RAW, YUV, JPEG } supported output streams */<br>
- int32_t numOutStreams[] = { rawStreamAvailable, 2, 1 };<br>
- staticMetadata_->addEntry(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,<br>
- numOutStreams);<br>
-<br>
- staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,<br>
- availableCapabilities);<br>
-<br>
- std::vector<int32_t> availableCharacteristicsKeys = {<br>
- ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,<br>
- ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,<br>
- ANDROID_CONTROL_AE_AVAILABLE_MODES,<br>
- ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,<br>
- ANDROID_CONTROL_AE_COMPENSATION_RANGE,<br>
- ANDROID_CONTROL_AE_COMPENSATION_STEP,<br>
- ANDROID_CONTROL_AE_LOCK_AVAILABLE,<br>
- ANDROID_CONTROL_AF_AVAILABLE_MODES,<br>
- ANDROID_CONTROL_AVAILABLE_EFFECTS,<br>
- ANDROID_CONTROL_AVAILABLE_MODES,<br>
- ANDROID_CONTROL_AVAILABLE_SCENE_MODES,<br>
- ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,<br>
- ANDROID_CONTROL_AWB_AVAILABLE_MODES,<br>
- ANDROID_CONTROL_AWB_LOCK_AVAILABLE,<br>
- ANDROID_CONTROL_MAX_REGIONS,<br>
- ANDROID_CONTROL_SCENE_MODE_OVERRIDES,<br>
- ANDROID_FLASH_INFO_AVAILABLE,<br>
- ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,<br>
- ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,<br>
- ANDROID_JPEG_MAX_SIZE,<br>
- ANDROID_LENS_FACING,<br>
- ANDROID_LENS_INFO_AVAILABLE_APERTURES,<br>
- ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,<br>
- ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,<br>
- ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,<br>
- ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,<br>
- ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,<br>
- ANDROID_REQUEST_AVAILABLE_CAPABILITIES,<br>
- ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,<br>
- ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,<br>
- ANDROID_REQUEST_PARTIAL_RESULT_COUNT,<br>
- ANDROID_REQUEST_PIPELINE_MAX_DEPTH,<br>
- ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,<br>
- ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,<br>
- ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,<br>
- ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,<br>
- ANDROID_SCALER_CROPPING_TYPE,<br>
- ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,<br>
- ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,<br>
- ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,<br>
- ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,<br>
- ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,<br>
- ANDROID_SENSOR_INFO_PHYSICAL_SIZE,<br>
- ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,<br>
- ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,<br>
- ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,<br>
- ANDROID_SENSOR_ORIENTATION,<br>
- ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,<br>
- ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,<br>
- ANDROID_SYNC_MAX_LATENCY,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,<br>
- availableCharacteristicsKeys);<br>
-<br>
- std::vector<int32_t> availableRequestKeys = {<br>
- ANDROID_COLOR_CORRECTION_ABERRATION_MODE,<br>
- ANDROID_CONTROL_AE_ANTIBANDING_MODE,<br>
- ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,<br>
- ANDROID_CONTROL_AE_LOCK,<br>
- ANDROID_CONTROL_AE_MODE,<br>
- ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,<br>
- ANDROID_CONTROL_AE_TARGET_FPS_RANGE,<br>
- ANDROID_CONTROL_AF_MODE,<br>
- ANDROID_CONTROL_AF_TRIGGER,<br>
- ANDROID_CONTROL_AWB_LOCK,<br>
- ANDROID_CONTROL_AWB_MODE,<br>
- ANDROID_CONTROL_CAPTURE_INTENT,<br>
- ANDROID_CONTROL_EFFECT_MODE,<br>
- ANDROID_CONTROL_MODE,<br>
- ANDROID_CONTROL_SCENE_MODE,<br>
- ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,<br>
- ANDROID_FLASH_MODE,<br>
- ANDROID_JPEG_ORIENTATION,<br>
- ANDROID_JPEG_QUALITY,<br>
- ANDROID_JPEG_THUMBNAIL_QUALITY,<br>
- ANDROID_JPEG_THUMBNAIL_SIZE,<br>
- ANDROID_LENS_APERTURE,<br>
- ANDROID_LENS_OPTICAL_STABILIZATION_MODE,<br>
- ANDROID_NOISE_REDUCTION_MODE,<br>
- ANDROID_SCALER_CROP_REGION,<br>
- ANDROID_STATISTICS_FACE_DETECT_MODE<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,<br>
- availableRequestKeys);<br>
-<br>
- std::vector<int32_t> availableResultKeys = {<br>
- ANDROID_COLOR_CORRECTION_ABERRATION_MODE,<br>
- ANDROID_CONTROL_AE_ANTIBANDING_MODE,<br>
- ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,<br>
- ANDROID_CONTROL_AE_LOCK,<br>
- ANDROID_CONTROL_AE_MODE,<br>
- ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,<br>
- ANDROID_CONTROL_AE_STATE,<br>
- ANDROID_CONTROL_AE_TARGET_FPS_RANGE,<br>
- ANDROID_CONTROL_AF_MODE,<br>
- ANDROID_CONTROL_AF_STATE,<br>
- ANDROID_CONTROL_AF_TRIGGER,<br>
- ANDROID_CONTROL_AWB_LOCK,<br>
- ANDROID_CONTROL_AWB_MODE,<br>
- ANDROID_CONTROL_AWB_STATE,<br>
- ANDROID_CONTROL_CAPTURE_INTENT,<br>
- ANDROID_CONTROL_EFFECT_MODE,<br>
- ANDROID_CONTROL_MODE,<br>
- ANDROID_CONTROL_SCENE_MODE,<br>
- ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,<br>
- ANDROID_FLASH_MODE,<br>
- ANDROID_FLASH_STATE,<br>
- ANDROID_JPEG_GPS_COORDINATES,<br>
- ANDROID_JPEG_GPS_PROCESSING_METHOD,<br>
- ANDROID_JPEG_GPS_TIMESTAMP,<br>
- ANDROID_JPEG_ORIENTATION,<br>
- ANDROID_JPEG_QUALITY,<br>
- ANDROID_JPEG_SIZE,<br>
- ANDROID_JPEG_THUMBNAIL_QUALITY,<br>
- ANDROID_JPEG_THUMBNAIL_SIZE,<br>
- ANDROID_LENS_APERTURE,<br>
- ANDROID_LENS_FOCAL_LENGTH,<br>
- ANDROID_LENS_OPTICAL_STABILIZATION_MODE,<br>
- ANDROID_LENS_STATE,<br>
- ANDROID_NOISE_REDUCTION_MODE,<br>
- ANDROID_REQUEST_PIPELINE_DEPTH,<br>
- ANDROID_SCALER_CROP_REGION,<br>
- ANDROID_SENSOR_EXPOSURE_TIME,<br>
- ANDROID_SENSOR_FRAME_DURATION,<br>
- ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,<br>
- ANDROID_SENSOR_TEST_PATTERN_MODE,<br>
- ANDROID_SENSOR_TIMESTAMP,<br>
- ANDROID_STATISTICS_FACE_DETECT_MODE,<br>
- ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,<br>
- ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,<br>
- ANDROID_STATISTICS_SCENE_FLICKER,<br>
- };<br>
- staticMetadata_->addEntry(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,<br>
- availableResultKeys);<br>
-<br>
- if (!staticMetadata_->isValid()) {<br>
- LOG(HAL, Error) << "Failed to construct static metadata";<br>
- staticMetadata_.reset();<br>
- return nullptr;<br>
- }<br>
-<br>
- if (staticMetadata_->resized()) {<br>
- auto [entryCount, dataCount] = staticMetadata_->usage();<br>
- LOG(HAL, Info)<br>
- << "Static metadata resized: " << entryCount<br>
- << " entries and " << dataCount << " bytes used";<br>
- }<br>
-<br>
- return staticMetadata_->get();<br>
-}<br>
-<br>
-std::unique_ptr<CameraMetadata> CameraDevice::requestTemplatePreview()<br>
+void CameraDevice::setCallbacks(const camera3_callback_ops_t *callbacks)<br>
{<br>
- /*<br>
- * \todo Keep this in sync with the actual number of entries.<br>
- * Currently: 20 entries, 35 bytes<br>
- */<br>
- auto requestTemplate = std::make_unique<CameraMetadata>(21, 36);<br>
- if (!requestTemplate->isValid()) {<br>
- return nullptr;<br>
- }<br>
-<br>
- /* Get the FPS range registered in the static metadata. */<br>
- camera_metadata_ro_entry_t entry;<br>
- bool found = staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,<br>
- &entry);<br>
- if (!found) {<br>
- LOG(HAL, Error) << "Cannot create capture template without FPS range";<br>
- return nullptr;<br>
- }<br>
-<br>
- /*<br>
- * Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata<br>
- * has been assembled as {{min, max} {max, max}}.<br>
- */<br>
- requestTemplate->addEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,<br>
- entry.data.i32, 2);<br>
-<br>
- uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;<br>
- requestTemplate->addEntry(ANDROID_CONTROL_AE_MODE, aeMode);<br>
-<br>
- int32_t aeExposureCompensation = 0;<br>
- requestTemplate->addEntry(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,<br>
- aeExposureCompensation);<br>
-<br>
- uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;<br>
- requestTemplate->addEntry(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,<br>
- aePrecaptureTrigger);<br>
-<br>
- uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;<br>
- requestTemplate->addEntry(ANDROID_CONTROL_AE_LOCK, aeLock);<br>
-<br>
- uint8_t aeAntibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;<br>
- requestTemplate->addEntry(ANDROID_CONTROL_AE_ANTIBANDING_MODE,<br>
- aeAntibandingMode);<br>
-<br>
- uint8_t afMode = ANDROID_CONTROL_AF_MODE_OFF;<br>
- requestTemplate->addEntry(ANDROID_CONTROL_AF_MODE, afMode);<br>
-<br>
- uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;<br>
- requestTemplate->addEntry(ANDROID_CONTROL_AF_TRIGGER, afTrigger);<br>
-<br>
- uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;<br>
- requestTemplate->addEntry(ANDROID_CONTROL_AWB_MODE, awbMode);<br>
-<br>
- uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;<br>
- requestTemplate->addEntry(ANDROID_CONTROL_AWB_LOCK, awbLock);<br>
-<br>
- uint8_t flashMode = ANDROID_FLASH_MODE_OFF;<br>
- requestTemplate->addEntry(ANDROID_FLASH_MODE, flashMode);<br>
-<br>
- uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;<br>
- requestTemplate->addEntry(ANDROID_STATISTICS_FACE_DETECT_MODE,<br>
- faceDetectMode);<br>
-<br>
- uint8_t noiseReduction = ANDROID_NOISE_REDUCTION_MODE_OFF;<br>
- requestTemplate->addEntry(ANDROID_NOISE_REDUCTION_MODE,<br>
- noiseReduction);<br>
-<br>
- uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;<br>
- requestTemplate->addEntry(ANDROID_COLOR_CORRECTION_ABERRATION_MODE,<br>
- aberrationMode);<br>
-<br>
- uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;<br>
- requestTemplate->addEntry(ANDROID_CONTROL_MODE, controlMode);<br>
-<br>
- float lensAperture = 2.53 / 100;<br>
- requestTemplate->addEntry(ANDROID_LENS_APERTURE, lensAperture);<br>
-<br>
- uint8_t opticalStabilization = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;<br>
- requestTemplate->addEntry(ANDROID_LENS_OPTICAL_STABILIZATION_MODE,<br>
- opticalStabilization);<br>
-<br>
- uint8_t captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;<br>
- requestTemplate->addEntry(ANDROID_CONTROL_CAPTURE_INTENT,<br>
- captureIntent);<br>
-<br>
- return requestTemplate;<br>
+ callbacks_ = callbacks;<br>
}<br>
<br>
-std::unique_ptr<CameraMetadata> CameraDevice::requestTemplateVideo()<br>
+const camera_metadata_t *CameraDevice::getStaticMetadata()<br>
{<br>
- std::unique_ptr<CameraMetadata> previewTemplate = requestTemplatePreview();<br>
- if (!previewTemplate)<br>
- return nullptr;<br>
-<br>
- /*<br>
- * The video template requires a fixed FPS range. Everything else<br>
- * stays the same as the preview template.<br>
- */<br>
- camera_metadata_ro_entry_t entry;<br>
- staticMetadata_->getEntry(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,<br>
- &entry);<br>
-<br>
- /*<br>
- * Assume the AE_AVAILABLE_TARGET_FPS_RANGE static metadata<br>
- * has been assembled as {{min, max} {max, max}}.<br>
- */<br>
- previewTemplate->updateEntry(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,<br>
- entry.data.i32 + 2, 2);<br>
-<br>
- return previewTemplate;<br>
+ return capabilities_.staticMetadata()->get();<br>
}<br>
<br>
/*<br>
@@ -1630,7 +520,7 @@ const camera_metadata_t *CameraDevice::constructDefaultRequestSettings(int type)<br>
switch (type) {<br>
case CAMERA3_TEMPLATE_PREVIEW:<br>
captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;<br>
- requestTemplate = requestTemplatePreview();<br>
+ requestTemplate = capabilities_.requestTemplatePreview();<br>
break;<br>
case CAMERA3_TEMPLATE_STILL_CAPTURE:<br>
/*<br>
@@ -1638,15 +528,15 @@ const camera_metadata_t *CameraDevice::constructDefaultRequestSettings(int type)<br>
* for the torch mode we currently do not support.<br>
*/<br>
captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;<br>
- requestTemplate = requestTemplatePreview();<br>
+ requestTemplate = capabilities_.requestTemplatePreview();<br>
break;<br>
case CAMERA3_TEMPLATE_VIDEO_RECORD:<br>
captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;<br>
- requestTemplate = requestTemplateVideo();<br>
+ requestTemplate = capabilities_.requestTemplateVideo();<br>
break;<br>
case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:<br>
captureIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;<br>
- requestTemplate = requestTemplateVideo();<br>
+ requestTemplate = capabilities_.requestTemplateVideo();<br>
break;<br>
/* \todo Implement templates generation for the remaining use cases. */<br>
case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:<br>
@@ -1668,19 +558,6 @@ const camera_metadata_t *CameraDevice::constructDefaultRequestSettings(int type)<br>
return requestTemplates_[type]->get();<br>
}<br>
<br>
-PixelFormat CameraDevice::toPixelFormat(int format) const<br>
-{<br>
- /* Translate Android format code to libcamera pixel format. */<br>
- auto it = formatsMap_.find(format);<br>
- if (it == formatsMap_.end()) {<br>
- LOG(HAL, Error) << "Requested format " << utils::hex(format)<br>
- << " not supported";<br>
- return PixelFormat();<br>
- }<br>
-<br>
- return it->second;<br>
-}<br>
-<br>
/*<br>
* Inspect the stream_list to produce a list of StreamConfiguration to<br>
* be use to configure the Camera.<br>
@@ -1727,7 +604,7 @@ int CameraDevice::configureStreams(camera3_stream_configuration_t *stream_list)<br>
camera3_stream_t *stream = stream_list->streams[i];<br>
Size size(stream->width, stream->height);<br>
<br>
- PixelFormat format = toPixelFormat(stream->format);<br>
+ PixelFormat format = capabilities_.toPixelFormat(stream->format);<br>
<br>
LOG(HAL, Info) << "Stream #" << i<br>
<< ", direction: " << stream->stream_type<br>
diff --git a/src/android/camera_device.h b/src/android/camera_device.h<br>
index 4aadb27c562c..090fe28a551e 100644<br>
--- a/src/android/camera_device.h<br>
+++ b/src/android/camera_device.h<br>
@@ -10,14 +10,12 @@<br>
#include <map><br>
#include <memory><br>
#include <mutex><br>
-#include <tuple><br>
#include <vector><br>
<br>
#include <hardware/camera3.h><br>
<br>
#include <libcamera/buffer.h><br>
#include <libcamera/camera.h><br>
-#include <libcamera/geometry.h><br>
#include <libcamera/request.h><br>
#include <libcamera/stream.h><br>
<br>
@@ -26,6 +24,7 @@<br>
#include "libcamera/internal/message.h"<br>
#include "libcamera/internal/thread.h"<br>
<br>
+#include "camera_capabilities.h"<br>
#include "camera_metadata.h"<br>
#include "camera_stream.h"<br>
#include "camera_worker.h"<br>
@@ -57,7 +56,7 @@ public:<br>
const std::string &model() const { return model_; }<br>
int facing() const { return facing_; }<br>
int orientation() const { return orientation_; }<br>
- unsigned int maxJpegBufferSize() const { return maxJpegBufferSize_; }<br>
+ unsigned int maxJpegBufferSize() const;<br>
<br>
void setCallbacks(const camera3_callback_ops_t *callbacks);<br>
const camera_metadata_t *getStaticMetadata();<br>
@@ -86,11 +85,6 @@ private:<br>
std::unique_ptr<CaptureRequest> request_;<br>
};<br>
<br>
- struct Camera3StreamConfiguration {<br>
- libcamera::Size resolution;<br>
- int androidFormat;<br>
- };<br>
-<br>
enum class State {<br>
Stopped,<br>
Flushing,<br>
@@ -99,22 +93,11 @@ private:<br>
<br>
void stop();<br>
<br>
- int initializeStreamConfigurations();<br>
- std::vector<libcamera::Size><br>
- getYUVResolutions(libcamera::CameraConfiguration *cameraConfig,<br>
- const libcamera::PixelFormat &pixelFormat,<br>
- const std::vector<libcamera::Size> &resolutions);<br>
- std::vector<libcamera::Size><br>
- getRawResolutions(const libcamera::PixelFormat &pixelFormat);<br>
-<br>
libcamera::FrameBuffer *createFrameBuffer(const buffer_handle_t camera3buffer);<br>
void abortRequest(camera3_capture_request_t *request);<br>
void notifyShutter(uint32_t frameNumber, uint64_t timestamp);<br>
void notifyError(uint32_t frameNumber, camera3_stream_t *stream,<br>
camera3_error_msg_code code);<br>
- std::unique_ptr<CameraMetadata> requestTemplatePreview();<br>
- std::unique_ptr<CameraMetadata> requestTemplateVideo();<br>
- libcamera::PixelFormat toPixelFormat(int format) const;<br>
int processControls(Camera3RequestDescriptor *descriptor);<br>
std::unique_ptr<CameraMetadata> getResultMetadata(<br>
const Camera3RequestDescriptor &descriptor) const;<br>
@@ -129,13 +112,11 @@ private:<br>
<br>
std::shared_ptr<libcamera::Camera> camera_;<br>
std::unique_ptr<libcamera::CameraConfiguration> config_;<br>
+ CameraCapabilities capabilities_;<br>
<br>
- std::unique_ptr<CameraMetadata> staticMetadata_;<br>
std::map<unsigned int, std::unique_ptr<CameraMetadata>> requestTemplates_;<br>
const camera3_callback_ops_t *callbacks_;<br>
<br>
- std::vector<Camera3StreamConfiguration> streamConfigurations_;<br>
- std::map<int, libcamera::PixelFormat> formatsMap_;<br>
std::vector<CameraStream> streams_;<br>
<br>
libcamera::Mutex descriptorsMutex_; /* Protects descriptors_. */<br>
@@ -147,8 +128,6 @@ private:<br>
int facing_;<br>
int orientation_;<br>
<br>
- unsigned int maxJpegBufferSize_;<br>
-<br>
CameraMetadata lastSettings_;<br>
};<br>
<br>
diff --git a/src/android/meson.build b/src/android/meson.build<br>
index f27fd5316705..6270fb201338 100644<br>
--- a/src/android/meson.build<br>
+++ b/src/android/meson.build<br>
@@ -44,6 +44,7 @@ subdir('cros')<br>
<br>
android_hal_sources = files([<br>
'camera3_hal.cpp',<br>
+ 'camera_capabilities.cpp',<br>
'camera_device.cpp',<br>
'camera_hal_config.cpp',<br>
'camera_hal_manager.cpp',<br>
-- <br>
2.31.1<br>
<br>
</blockquote></div></div></div>