blob: 6f9b38f9cf3e682a7a8671904b3db459b887b769 [file] [log] [blame]
/*
* Copyright (C) 2022 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Autogenerated from camera metadata definitions in
* /system/media/camera/docs/metadata_definitions.xml
* *** DO NOT EDIT BY HAND ***
*/
package android.hardware.camera.metadata;
import android.hardware.camera.metadata.CameraMetadataSectionStart;
/**
* Main enumeration for defining camera metadata tags added in this revision
*
* <p>Partial documentation is included for each tag; for complete documentation, reference
* '/system/media/camera/docs/docs.html' in the corresponding Android source tree.</p>
*/
@VintfStability
@Backing(type="int")
enum CameraMetadataTag {
/**
* android.colorCorrection.mode [dynamic, enum, public]
*
* <p>The mode control selects how the image data is converted from the
* sensor's native color into linear sRGB color.</p>
*/
ANDROID_COLOR_CORRECTION_MODE = CameraMetadataSectionStart.ANDROID_COLOR_CORRECTION_START,
/**
* android.colorCorrection.transform [dynamic, rational[], public]
*
* <p>A color transform matrix to use to transform
* from sensor RGB color space to output linear sRGB color space.</p>
*/
ANDROID_COLOR_CORRECTION_TRANSFORM,
/**
* android.colorCorrection.gains [dynamic, float[], public]
*
* <p>Gains applying to Bayer raw color channels for
* white-balance.</p>
*/
ANDROID_COLOR_CORRECTION_GAINS,
/**
* android.colorCorrection.aberrationMode [dynamic, enum, public]
*
* <p>Mode of operation for the chromatic aberration correction algorithm.</p>
*/
ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
/**
* android.colorCorrection.availableAberrationModes [static, byte[], public]
*
* <p>List of aberration correction modes for ANDROID_COLOR_CORRECTION_ABERRATION_MODE that are
* supported by this camera device.</p>
*
* @see ANDROID_COLOR_CORRECTION_ABERRATION_MODE
*/
ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
/**
* android.control.aeAntibandingMode [dynamic, enum, public]
*
* <p>The desired setting for the camera device's auto-exposure
* algorithm's antibanding compensation.</p>
*/
ANDROID_CONTROL_AE_ANTIBANDING_MODE = CameraMetadataSectionStart.ANDROID_CONTROL_START,
/**
* android.control.aeExposureCompensation [dynamic, int32, public]
*
* <p>Adjustment to auto-exposure (AE) target image
* brightness.</p>
*/
ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
/**
* android.control.aeLock [dynamic, enum, public]
*
* <p>Whether auto-exposure (AE) is currently locked to its latest
* calculated values.</p>
*/
ANDROID_CONTROL_AE_LOCK,
/**
* android.control.aeMode [dynamic, enum, public]
*
* <p>The desired mode for the camera device's
* auto-exposure routine.</p>
*/
ANDROID_CONTROL_AE_MODE,
/**
* android.control.aeRegions [dynamic, int32[], public]
*
* <p>List of metering areas to use for auto-exposure adjustment.</p>
*/
ANDROID_CONTROL_AE_REGIONS,
/**
* android.control.aeTargetFpsRange [dynamic, int32[], public]
*
* <p>Range over which the auto-exposure routine can
* adjust the capture frame rate to maintain good
* exposure.</p>
*/
ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
/**
* android.control.aePrecaptureTrigger [dynamic, enum, public]
*
* <p>Whether the camera device will trigger a precapture
* metering sequence when it processes this request.</p>
*/
ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
/**
* android.control.afMode [dynamic, enum, public]
*
* <p>Whether auto-focus (AF) is currently enabled, and what
* mode it is set to.</p>
*/
ANDROID_CONTROL_AF_MODE,
/**
* android.control.afRegions [dynamic, int32[], public]
*
* <p>List of metering areas to use for auto-focus.</p>
*/
ANDROID_CONTROL_AF_REGIONS,
/**
* android.control.afTrigger [dynamic, enum, public]
*
* <p>Whether the camera device will trigger autofocus for this request.</p>
*/
ANDROID_CONTROL_AF_TRIGGER,
/**
* android.control.awbLock [dynamic, enum, public]
*
* <p>Whether auto-white balance (AWB) is currently locked to its
* latest calculated values.</p>
*/
ANDROID_CONTROL_AWB_LOCK,
/**
* android.control.awbMode [dynamic, enum, public]
*
* <p>Whether auto-white balance (AWB) is currently setting the color
* transform fields, and what its illumination target
* is.</p>
*/
ANDROID_CONTROL_AWB_MODE,
/**
* android.control.awbRegions [dynamic, int32[], public]
*
* <p>List of metering areas to use for auto-white-balance illuminant
* estimation.</p>
*/
ANDROID_CONTROL_AWB_REGIONS,
/**
* android.control.captureIntent [dynamic, enum, public]
*
* <p>Information to the camera device 3A (auto-exposure,
* auto-focus, auto-white balance) routines about the purpose
* of this capture, to help the camera device to decide optimal 3A
* strategy.</p>
*/
ANDROID_CONTROL_CAPTURE_INTENT,
/**
* android.control.effectMode [dynamic, enum, public]
*
* <p>A special color effect to apply.</p>
*/
ANDROID_CONTROL_EFFECT_MODE,
/**
* android.control.mode [dynamic, enum, public]
*
* <p>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control
* routines.</p>
*/
ANDROID_CONTROL_MODE,
/**
* android.control.sceneMode [dynamic, enum, public]
*
* <p>Control for which scene mode is currently active.</p>
*/
ANDROID_CONTROL_SCENE_MODE,
/**
* android.control.videoStabilizationMode [dynamic, enum, public]
*
* <p>Whether video stabilization is
* active.</p>
*/
ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
/**
* android.control.aeAvailableAntibandingModes [static, byte[], public]
*
* <p>List of auto-exposure antibanding modes for ANDROID_CONTROL_AE_ANTIBANDING_MODE that are
* supported by this camera device.</p>
*
* @see ANDROID_CONTROL_AE_ANTIBANDING_MODE
*/
ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
/**
* android.control.aeAvailableModes [static, byte[], public]
*
* <p>List of auto-exposure modes for ANDROID_CONTROL_AE_MODE that are supported by this camera
* device.</p>
*
* @see ANDROID_CONTROL_AE_MODE
*/
ANDROID_CONTROL_AE_AVAILABLE_MODES,
/**
* android.control.aeAvailableTargetFpsRanges [static, int32[], public]
*
* <p>List of frame rate ranges for ANDROID_CONTROL_AE_TARGET_FPS_RANGE supported by
* this camera device.</p>
*
* @see ANDROID_CONTROL_AE_TARGET_FPS_RANGE
*/
ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
/**
* android.control.aeCompensationRange [static, int32[], public]
*
* <p>Maximum and minimum exposure compensation values for
* ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, in counts of ANDROID_CONTROL_AE_COMPENSATION_STEP,
* that are supported by this camera device.</p>
*
* @see ANDROID_CONTROL_AE_COMPENSATION_STEP
* @see ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION
*/
ANDROID_CONTROL_AE_COMPENSATION_RANGE,
/**
* android.control.aeCompensationStep [static, rational, public]
*
* <p>Smallest step by which the exposure compensation
* can be changed.</p>
*/
ANDROID_CONTROL_AE_COMPENSATION_STEP,
/**
* android.control.afAvailableModes [static, byte[], public]
*
* <p>List of auto-focus (AF) modes for ANDROID_CONTROL_AF_MODE that are
* supported by this camera device.</p>
*
* @see ANDROID_CONTROL_AF_MODE
*/
ANDROID_CONTROL_AF_AVAILABLE_MODES,
/**
* android.control.availableEffects [static, byte[], public]
*
* <p>List of color effects for ANDROID_CONTROL_EFFECT_MODE that are supported by this camera
* device.</p>
*
* @see ANDROID_CONTROL_EFFECT_MODE
*/
ANDROID_CONTROL_AVAILABLE_EFFECTS,
/**
* android.control.availableSceneModes [static, byte[], public]
*
* <p>List of scene modes for ANDROID_CONTROL_SCENE_MODE that are supported by this camera
* device.</p>
*
* @see ANDROID_CONTROL_SCENE_MODE
*/
ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
/**
* android.control.availableVideoStabilizationModes [static, byte[], public]
*
* <p>List of video stabilization modes for ANDROID_CONTROL_VIDEO_STABILIZATION_MODE
* that are supported by this camera device.</p>
*
* @see ANDROID_CONTROL_VIDEO_STABILIZATION_MODE
*/
ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
/**
* android.control.awbAvailableModes [static, byte[], public]
*
* <p>List of auto-white-balance modes for ANDROID_CONTROL_AWB_MODE that are supported by this
* camera device.</p>
*
* @see ANDROID_CONTROL_AWB_MODE
*/
ANDROID_CONTROL_AWB_AVAILABLE_MODES,
/**
* android.control.maxRegions [static, int32[], ndk_public]
*
* <p>List of the maximum number of regions that can be used for metering in
* auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF);
* this corresponds to the maximum number of elements in
* ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AWB_REGIONS,
* and ANDROID_CONTROL_AF_REGIONS.</p>
*
* @see ANDROID_CONTROL_AE_REGIONS
* @see ANDROID_CONTROL_AF_REGIONS
* @see ANDROID_CONTROL_AWB_REGIONS
*/
ANDROID_CONTROL_MAX_REGIONS,
/**
* android.control.sceneModeOverrides [static, byte[], system]
*
* <p>Ordered list of auto-exposure, auto-white balance, and auto-focus
* settings to use with each available scene mode.</p>
*/
ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
/**
* android.control.aePrecaptureId [dynamic, int32, system]
*
* <p>The ID sent with the latest
* CAMERA2_TRIGGER_PRECAPTURE_METERING call</p>
*/
ANDROID_CONTROL_AE_PRECAPTURE_ID,
/**
* android.control.aeState [dynamic, enum, public]
*
* <p>Current state of the auto-exposure (AE) algorithm.</p>
*/
ANDROID_CONTROL_AE_STATE,
/**
* android.control.afState [dynamic, enum, public]
*
* <p>Current state of auto-focus (AF) algorithm.</p>
*/
ANDROID_CONTROL_AF_STATE,
/**
* android.control.afTriggerId [dynamic, int32, system]
*
* <p>The ID sent with the latest
* CAMERA2_TRIGGER_AUTOFOCUS call</p>
*/
ANDROID_CONTROL_AF_TRIGGER_ID,
/**
* android.control.awbState [dynamic, enum, public]
*
* <p>Current state of auto-white balance (AWB) algorithm.</p>
*/
ANDROID_CONTROL_AWB_STATE,
/**
* android.control.availableHighSpeedVideoConfigurations [static, int32[], hidden]
*
* <p>List of available high speed video size, fps range and max batch size configurations
* supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max).</p>
*/
ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
/**
* android.control.aeLockAvailable [static, enum, public]
*
* <p>Whether the camera device supports ANDROID_CONTROL_AE_LOCK</p>
*
* @see ANDROID_CONTROL_AE_LOCK
*/
ANDROID_CONTROL_AE_LOCK_AVAILABLE,
/**
* android.control.awbLockAvailable [static, enum, public]
*
* <p>Whether the camera device supports ANDROID_CONTROL_AWB_LOCK</p>
*
* @see ANDROID_CONTROL_AWB_LOCK
*/
ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
/**
* android.control.availableModes [static, byte[], public]
*
* <p>List of control modes for ANDROID_CONTROL_MODE that are supported by this camera
* device.</p>
*
* @see ANDROID_CONTROL_MODE
*/
ANDROID_CONTROL_AVAILABLE_MODES,
/**
* android.control.postRawSensitivityBoostRange [static, int32[], public]
*
* <p>Range of boosts for ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST supported
* by this camera device.</p>
*
* @see ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST
*/
ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
/**
* android.control.postRawSensitivityBoost [dynamic, int32, public]
*
* <p>The amount of additional sensitivity boost applied to output images
* after RAW sensor data is captured.</p>
*/
ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
/**
* android.control.enableZsl [dynamic, enum, public]
*
* <p>Allow camera device to enable zero-shutter-lag mode for requests with
* ANDROID_CONTROL_CAPTURE_INTENT == STILL_CAPTURE.</p>
*
* @see ANDROID_CONTROL_CAPTURE_INTENT
*/
ANDROID_CONTROL_ENABLE_ZSL,
/**
* android.control.afSceneChange [dynamic, enum, public]
*
* <p>Whether a significant scene change is detected within the currently-set AF
* region(s).</p>
*/
ANDROID_CONTROL_AF_SCENE_CHANGE,
/**
* android.control.availableExtendedSceneModeMaxSizes [static, int32[], ndk_public]
*
* <p>The list of extended scene modes for ANDROID_CONTROL_EXTENDED_SCENE_MODE that are supported
* by this camera device, and each extended scene mode's maximum streaming (non-stall) size
* with effect.</p>
*
* @see ANDROID_CONTROL_EXTENDED_SCENE_MODE
*/
ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES,
/**
* android.control.availableExtendedSceneModeZoomRatioRanges [static, float[], ndk_public]
*
* <p>The ranges of supported zoom ratio for non-DISABLED ANDROID_CONTROL_EXTENDED_SCENE_MODE.</p>
*
* @see ANDROID_CONTROL_EXTENDED_SCENE_MODE
*/
ANDROID_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_ZOOM_RATIO_RANGES,
/**
* android.control.extendedSceneMode [dynamic, enum, public]
*
* <p>Whether extended scene mode is enabled for a particular capture request.</p>
*/
ANDROID_CONTROL_EXTENDED_SCENE_MODE,
/**
* android.control.zoomRatioRange [static, float[], public]
*
* <p>Minimum and maximum zoom ratios supported by this camera device.</p>
*/
ANDROID_CONTROL_ZOOM_RATIO_RANGE,
/**
* android.control.zoomRatio [dynamic, float, public]
*
* <p>The desired zoom ratio</p>
*/
ANDROID_CONTROL_ZOOM_RATIO,
/**
* android.control.availableHighSpeedVideoConfigurationsMaximumResolution [static, int32[], hidden]
*
* <p>List of available high speed video size, fps range and max batch size configurations
* supported by the camera device, in the format of
* (width, height, fps_min, fps_max, batch_size_max),
* when ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS_MAXIMUM_RESOLUTION,
/**
* android.control.settingsOverride [dynamic, enum, public]
*
* <p>The desired CaptureRequest settings override with which certain keys are
* applied earlier so that they can take effect sooner.</p>
*/
ANDROID_CONTROL_SETTINGS_OVERRIDE = 65588,
/**
* android.control.availableSettingsOverrides [static, int32[], public]
*
* <p>List of available settings overrides supported by the camera device that can
* be used to speed up certain controls.</p>
*/
ANDROID_CONTROL_AVAILABLE_SETTINGS_OVERRIDES,
/**
* android.control.settingsOverridingFrameNumber [dynamic, int32, system]
*
* <p>The frame number of the newer request overriding this capture.</p>
*/
ANDROID_CONTROL_SETTINGS_OVERRIDING_FRAME_NUMBER,
/**
* android.control.autoframing [dynamic, enum, public]
*
* <p>Automatic crop, pan and zoom to keep objects in the center of the frame.</p>
*/
ANDROID_CONTROL_AUTOFRAMING,
/**
* android.control.autoframingAvailable [static, enum, public]
*
* <p>Whether the camera device supports ANDROID_CONTROL_AUTOFRAMING.</p>
*
* @see ANDROID_CONTROL_AUTOFRAMING
*/
ANDROID_CONTROL_AUTOFRAMING_AVAILABLE,
/**
* android.control.autoframingState [dynamic, enum, public]
*
* <p>Current state of auto-framing.</p>
*/
ANDROID_CONTROL_AUTOFRAMING_STATE,
/**
* android.control.lowLightBoostInfoLuminanceRange [static, float[], public]
*
* <p>The operating luminance range of low light boost measured in lux (lx).</p>
*/
ANDROID_CONTROL_LOW_LIGHT_BOOST_INFO_LUMINANCE_RANGE,
/**
* android.control.lowLightBoostState [dynamic, enum, public]
*
* <p>Current state of the low light boost AE mode.</p>
*/
ANDROID_CONTROL_LOW_LIGHT_BOOST_STATE,
/**
* android.demosaic.mode [controls, enum, system]
*
* <p>Controls the quality of the demosaicing
* processing.</p>
*/
ANDROID_DEMOSAIC_MODE = CameraMetadataSectionStart.ANDROID_DEMOSAIC_START,
/**
* android.edge.mode [dynamic, enum, public]
*
* <p>Operation mode for edge
* enhancement.</p>
*/
ANDROID_EDGE_MODE = CameraMetadataSectionStart.ANDROID_EDGE_START,
/**
* android.edge.strength [controls, byte, system]
*
* <p>Control the amount of edge enhancement
* applied to the images</p>
*/
ANDROID_EDGE_STRENGTH,
/**
* android.edge.availableEdgeModes [static, byte[], public]
*
* <p>List of edge enhancement modes for ANDROID_EDGE_MODE that are supported by this camera
* device.</p>
*
* @see ANDROID_EDGE_MODE
*/
ANDROID_EDGE_AVAILABLE_EDGE_MODES,
/**
* android.flash.firingPower [dynamic, byte, system]
*
* <p>Power for flash firing/torch</p>
*/
ANDROID_FLASH_FIRING_POWER = CameraMetadataSectionStart.ANDROID_FLASH_START,
/**
* android.flash.firingTime [dynamic, int64, system]
*
* <p>Firing time of flash relative to start of
* exposure</p>
*/
ANDROID_FLASH_FIRING_TIME,
/**
* android.flash.mode [dynamic, enum, public]
*
* <p>The desired mode for for the camera device's flash control.</p>
*/
ANDROID_FLASH_MODE,
/**
* android.flash.colorTemperature [static, byte, system]
*
* <p>The x,y whitepoint of the
* flash</p>
*/
ANDROID_FLASH_COLOR_TEMPERATURE,
/**
* android.flash.maxEnergy [static, byte, system]
*
* <p>Max energy output of the flash for a full
* power single flash</p>
*/
ANDROID_FLASH_MAX_ENERGY,
/**
* android.flash.state [dynamic, enum, public]
*
* <p>Current state of the flash
* unit.</p>
*/
ANDROID_FLASH_STATE,
/**
* android.flash.strengthLevel [dynamic, int32, public]
*
* <p>Flash strength level to be used when manual flash control is active.</p>
*/
ANDROID_FLASH_STRENGTH_LEVEL,
/**
* android.flash.singleStrengthMaxLevel [static, int32, public]
*
* <p>Maximum flash brightness level for manual flash control in SINGLE mode.</p>
*/
ANDROID_FLASH_SINGLE_STRENGTH_MAX_LEVEL,
/**
* android.flash.singleStrengthDefaultLevel [static, int32, public]
*
* <p>Default flash brightness level for manual flash control in SINGLE mode.</p>
*/
ANDROID_FLASH_SINGLE_STRENGTH_DEFAULT_LEVEL,
/**
* android.flash.torchStrengthMaxLevel [static, int32, public]
*
* <p>Maximum flash brightness level for manual flash control in TORCH mode</p>
*/
ANDROID_FLASH_TORCH_STRENGTH_MAX_LEVEL,
/**
* android.flash.torchStrengthDefaultLevel [static, int32, public]
*
* <p>Default flash brightness level for manual flash control in TORCH mode</p>
*/
ANDROID_FLASH_TORCH_STRENGTH_DEFAULT_LEVEL,
/**
* android.flash.info.available [static, enum, public]
*
* <p>Whether this camera device has a
* flash unit.</p>
*/
ANDROID_FLASH_INFO_AVAILABLE = CameraMetadataSectionStart.ANDROID_FLASH_INFO_START,
/**
* android.flash.info.chargeDuration [static, int64, system]
*
* <p>Time taken before flash can fire
* again</p>
*/
ANDROID_FLASH_INFO_CHARGE_DURATION,
/**
* android.flash.info.strengthMaximumLevel [static, int32, public]
*
* <p>Maximum flashlight brightness level.</p>
*/
ANDROID_FLASH_INFO_STRENGTH_MAXIMUM_LEVEL,
/**
* android.flash.info.strengthDefaultLevel [static, int32, public]
*
* <p>Default flashlight brightness level to be set via
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraManager.html#turnOnTorchWithStrengthLevel">CameraManager#turnOnTorchWithStrengthLevel</a>.</p>
*/
ANDROID_FLASH_INFO_STRENGTH_DEFAULT_LEVEL,
/**
* android.hotPixel.mode [dynamic, enum, public]
*
* <p>Operational mode for hot pixel correction.</p>
*/
ANDROID_HOT_PIXEL_MODE = CameraMetadataSectionStart.ANDROID_HOT_PIXEL_START,
/**
* android.hotPixel.availableHotPixelModes [static, byte[], public]
*
* <p>List of hot pixel correction modes for ANDROID_HOT_PIXEL_MODE that are supported by this
* camera device.</p>
*
* @see ANDROID_HOT_PIXEL_MODE
*/
ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
/**
* android.jpeg.gpsCoordinates [dynamic, double[], ndk_public]
*
* <p>GPS coordinates to include in output JPEG
* EXIF.</p>
*/
ANDROID_JPEG_GPS_COORDINATES = CameraMetadataSectionStart.ANDROID_JPEG_START,
/**
* android.jpeg.gpsProcessingMethod [dynamic, byte, ndk_public]
*
* <p>32 characters describing GPS algorithm to
* include in EXIF.</p>
*/
ANDROID_JPEG_GPS_PROCESSING_METHOD,
/**
* android.jpeg.gpsTimestamp [dynamic, int64, ndk_public]
*
* <p>Time GPS fix was made to include in
* EXIF.</p>
*/
ANDROID_JPEG_GPS_TIMESTAMP,
/**
* android.jpeg.orientation [dynamic, int32, public]
*
* <p>The orientation for a JPEG image.</p>
*/
ANDROID_JPEG_ORIENTATION,
/**
* android.jpeg.quality [dynamic, byte, public]
*
* <p>Compression quality of the final JPEG
* image.</p>
*/
ANDROID_JPEG_QUALITY,
/**
* android.jpeg.thumbnailQuality [dynamic, byte, public]
*
* <p>Compression quality of JPEG
* thumbnail.</p>
*/
ANDROID_JPEG_THUMBNAIL_QUALITY,
/**
* android.jpeg.thumbnailSize [dynamic, int32[], public]
*
* <p>Resolution of embedded JPEG thumbnail.</p>
*/
ANDROID_JPEG_THUMBNAIL_SIZE,
/**
* android.jpeg.availableThumbnailSizes [static, int32[], public]
*
* <p>List of JPEG thumbnail sizes for ANDROID_JPEG_THUMBNAIL_SIZE supported by this
* camera device.</p>
*
* @see ANDROID_JPEG_THUMBNAIL_SIZE
*/
ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
/**
* android.jpeg.maxSize [static, int32, system]
*
* <p>Maximum size in bytes for the compressed
* JPEG buffer, in default sensor pixel mode (see ANDROID_SENSOR_PIXEL_MODE)</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_JPEG_MAX_SIZE,
/**
* android.jpeg.size [dynamic, int32, system]
*
* <p>The size of the compressed JPEG image, in
* bytes</p>
*/
ANDROID_JPEG_SIZE,
/**
* android.lens.aperture [dynamic, float, public]
*
* <p>The desired lens aperture size, as a ratio of lens focal length to the
* effective aperture diameter.</p>
*/
ANDROID_LENS_APERTURE = CameraMetadataSectionStart.ANDROID_LENS_START,
/**
* android.lens.filterDensity [dynamic, float, public]
*
* <p>The desired setting for the lens neutral density filter(s).</p>
*/
ANDROID_LENS_FILTER_DENSITY,
/**
* android.lens.focalLength [dynamic, float, public]
*
* <p>The desired lens focal length; used for optical zoom.</p>
*/
ANDROID_LENS_FOCAL_LENGTH,
/**
* android.lens.focusDistance [dynamic, float, public]
*
* <p>Desired distance to plane of sharpest focus,
* measured from frontmost surface of the lens.</p>
*/
ANDROID_LENS_FOCUS_DISTANCE,
/**
* android.lens.opticalStabilizationMode [dynamic, enum, public]
*
* <p>Sets whether the camera device uses optical image stabilization (OIS)
* when capturing images.</p>
*/
ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
/**
* android.lens.facing [static, enum, public]
*
* <p>Direction the camera faces relative to
* device screen.</p>
*/
ANDROID_LENS_FACING,
/**
* android.lens.poseRotation [dynamic, float[], public]
*
* <p>The orientation of the camera relative to the sensor
* coordinate system.</p>
*/
ANDROID_LENS_POSE_ROTATION,
/**
* android.lens.poseTranslation [dynamic, float[], public]
*
* <p>Position of the camera optical center.</p>
*/
ANDROID_LENS_POSE_TRANSLATION,
/**
* android.lens.focusRange [dynamic, float[], public]
*
* <p>The range of scene distances that are in
* sharp focus (depth of field).</p>
*/
ANDROID_LENS_FOCUS_RANGE,
/**
* android.lens.state [dynamic, enum, public]
*
* <p>Current lens status.</p>
*/
ANDROID_LENS_STATE,
/**
* android.lens.intrinsicCalibration [dynamic, float[], public]
*
* <p>The parameters for this camera device's intrinsic
* calibration.</p>
*/
ANDROID_LENS_INTRINSIC_CALIBRATION,
/**
* android.lens.radialDistortion [dynamic, float[], public]
*
* <p>The correction coefficients to correct for this camera device's
* radial and tangential lens distortion.</p>
*/
ANDROID_LENS_RADIAL_DISTORTION,
/**
* android.lens.poseReference [static, enum, public]
*
* <p>The origin for ANDROID_LENS_POSE_TRANSLATION, and the accuracy of
* ANDROID_LENS_POSE_TRANSLATION and ANDROID_LENS_POSE_ROTATION.</p>
*
* @see ANDROID_LENS_POSE_ROTATION
* @see ANDROID_LENS_POSE_TRANSLATION
*/
ANDROID_LENS_POSE_REFERENCE,
/**
* android.lens.distortion [dynamic, float[], public]
*
* <p>The correction coefficients to correct for this camera device's
* radial and tangential lens distortion.</p>
* <p>Replaces the deprecated ANDROID_LENS_RADIAL_DISTORTION field, which was
* inconsistently defined.</p>
*
* @see ANDROID_LENS_RADIAL_DISTORTION
*/
ANDROID_LENS_DISTORTION,
/**
* android.lens.distortionMaximumResolution [static, float[], public]
*
* <p>The correction coefficients to correct for this camera device's
* radial and tangential lens distortion for a
* CaptureRequest with ANDROID_SENSOR_PIXEL_MODE set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_LENS_DISTORTION_MAXIMUM_RESOLUTION,
/**
* android.lens.intrinsicCalibrationMaximumResolution [static, float[], public]
*
* <p>The parameters for this camera device's intrinsic
* calibration when ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_LENS_INTRINSIC_CALIBRATION_MAXIMUM_RESOLUTION,
/**
* android.lens.info.availableApertures [static, float[], public]
*
* <p>List of aperture size values for ANDROID_LENS_APERTURE that are
* supported by this camera device.</p>
*
* @see ANDROID_LENS_APERTURE
*/
ANDROID_LENS_INFO_AVAILABLE_APERTURES = CameraMetadataSectionStart.ANDROID_LENS_INFO_START,
/**
* android.lens.info.availableFilterDensities [static, float[], public]
*
* <p>List of neutral density filter values for
* ANDROID_LENS_FILTER_DENSITY that are supported by this camera device.</p>
*
* @see ANDROID_LENS_FILTER_DENSITY
*/
ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
/**
* android.lens.info.availableFocalLengths [static, float[], public]
*
* <p>List of focal lengths for ANDROID_LENS_FOCAL_LENGTH that are supported by this camera
* device.</p>
*
* @see ANDROID_LENS_FOCAL_LENGTH
*/
ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
/**
* android.lens.info.availableOpticalStabilization [static, byte[], public]
*
* <p>List of optical image stabilization (OIS) modes for
* ANDROID_LENS_OPTICAL_STABILIZATION_MODE that are supported by this camera device.</p>
*
* @see ANDROID_LENS_OPTICAL_STABILIZATION_MODE
*/
ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
/**
* android.lens.info.hyperfocalDistance [static, float, public]
*
* <p>Hyperfocal distance for this lens.</p>
*/
ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
/**
* android.lens.info.minimumFocusDistance [static, float, public]
*
* <p>Shortest distance from frontmost surface
* of the lens that can be brought into sharp focus.</p>
*/
ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
/**
* android.lens.info.shadingMapSize [static, int32[], ndk_public]
*
* <p>Dimensions of lens shading map.</p>
*/
ANDROID_LENS_INFO_SHADING_MAP_SIZE,
/**
* android.lens.info.focusDistanceCalibration [static, enum, public]
*
* <p>The lens focus distance calibration quality.</p>
*/
ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
/**
* android.noiseReduction.mode [dynamic, enum, public]
*
* <p>Mode of operation for the noise reduction algorithm.</p>
*/
ANDROID_NOISE_REDUCTION_MODE = CameraMetadataSectionStart.ANDROID_NOISE_REDUCTION_START,
/**
* android.noiseReduction.strength [controls, byte, system]
*
* <p>Control the amount of noise reduction
* applied to the images</p>
*/
ANDROID_NOISE_REDUCTION_STRENGTH,
/**
* android.noiseReduction.availableNoiseReductionModes [static, byte[], public]
*
* <p>List of noise reduction modes for ANDROID_NOISE_REDUCTION_MODE that are supported
* by this camera device.</p>
*
* @see ANDROID_NOISE_REDUCTION_MODE
*/
ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
/**
* android.quirks.meteringCropRegion [static, byte, system]
*
* <p>If set to 1, the camera service does not
* scale 'normalized' coordinates with respect to the crop
* region. This applies to metering input (a{e,f,wb}Region
* and output (face rectangles).</p>
*/
ANDROID_QUIRKS_METERING_CROP_REGION = CameraMetadataSectionStart.ANDROID_QUIRKS_START,
/**
* android.quirks.triggerAfWithAuto [static, byte, system]
*
* <p>If set to 1, then the camera service always
* switches to FOCUS_MODE_AUTO before issuing a AF
* trigger.</p>
*/
ANDROID_QUIRKS_TRIGGER_AF_WITH_AUTO,
/**
* android.quirks.useZslFormat [static, byte, system]
*
* <p>If set to 1, the camera service uses
* CAMERA2_PIXEL_FORMAT_ZSL instead of
* HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero
* shutter lag stream</p>
*/
ANDROID_QUIRKS_USE_ZSL_FORMAT,
/**
* android.quirks.usePartialResult [static, byte, hidden]
*
* <p>If set to 1, the HAL will always split result
* metadata for a single capture into multiple buffers,
* returned using multiple process_capture_result calls.</p>
*/
ANDROID_QUIRKS_USE_PARTIAL_RESULT,
/**
* android.quirks.partialResult [dynamic, enum, hidden]
*
* <p>Whether a result given to the framework is the
* final one for the capture, or only a partial that contains a
* subset of the full set of dynamic metadata
* values.</p>
*/
ANDROID_QUIRKS_PARTIAL_RESULT,
/**
* android.request.frameCount [dynamic, int32, hidden]
*
* <p>A frame counter set by the framework. This value monotonically
* increases with every new result (that is, each new result has a unique
* frameCount value).</p>
*/
ANDROID_REQUEST_FRAME_COUNT = CameraMetadataSectionStart.ANDROID_REQUEST_START,
/**
* android.request.id [dynamic, int32, hidden]
*
* <p>An application-specified ID for the current
* request. Must be maintained unchanged in output
* frame</p>
*/
ANDROID_REQUEST_ID,
/**
* android.request.inputStreams [controls, int32[], system]
*
* <p>List which camera reprocess stream is used
* for the source of reprocessing data.</p>
*/
ANDROID_REQUEST_INPUT_STREAMS,
/**
* android.request.metadataMode [dynamic, enum, system]
*
* <p>How much metadata to produce on
* output</p>
*/
ANDROID_REQUEST_METADATA_MODE,
/**
* android.request.outputStreams [dynamic, int32[], system]
*
* <p>Lists which camera output streams image data
* from this capture must be sent to</p>
*/
ANDROID_REQUEST_OUTPUT_STREAMS,
/**
* android.request.type [controls, enum, system]
*
* <p>The type of the request; either CAPTURE or
* REPROCESS. For legacy HAL3, this tag is redundant.</p>
*/
ANDROID_REQUEST_TYPE,
/**
* android.request.maxNumOutputStreams [static, int32[], ndk_public]
*
* <p>The maximum numbers of different types of output streams
* that can be configured and used simultaneously by a camera device.</p>
*/
ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
/**
* android.request.maxNumReprocessStreams [static, int32[], system]
*
* <p>How many reprocessing streams of any type
* can be allocated at the same time.</p>
*/
ANDROID_REQUEST_MAX_NUM_REPROCESS_STREAMS,
/**
* android.request.maxNumInputStreams [static, int32, java_public]
*
* <p>The maximum numbers of any type of input streams
* that can be configured and used simultaneously by a camera device.</p>
*/
ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
/**
* android.request.pipelineDepth [dynamic, byte, public]
*
* <p>Specifies the number of pipeline stages the frame went
* through from when it was exposed to when the final completed result
* was available to the framework.</p>
*/
ANDROID_REQUEST_PIPELINE_DEPTH,
/**
* android.request.pipelineMaxDepth [static, byte, public]
*
* <p>Specifies the number of maximum pipeline stages a frame
* has to go through from when it's exposed to when it's available
* to the framework.</p>
*/
ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
/**
* android.request.partialResultCount [static, int32, public]
*
* <p>Defines how many sub-components
* a result will be composed of.</p>
*/
ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
/**
* android.request.availableCapabilities [static, enum[], public]
*
* <p>List of capabilities that this camera device
* advertises as fully supporting.</p>
*/
ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
/**
* android.request.availableRequestKeys [static, int32[], ndk_public]
*
* <p>A list of all keys that the camera device has available
* to use with {@link ACaptureRequest }.</p>
*/
ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
/**
* android.request.availableResultKeys [static, int32[], ndk_public]
*
* <p>A list of all keys that the camera device has available to use with {@link ACameraCaptureSession_captureCallback_result }.</p>
*/
ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
/**
* android.request.availableCharacteristicsKeys [static, int32[], ndk_public]
*
* <p>A list of all keys that the camera device has available to use with {@link ACameraManager_getCameraCharacteristics }.</p>
*/
ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
/**
* android.request.availableSessionKeys [static, int32[], ndk_public]
*
* <p>A subset of the available request keys that the camera device
* can pass as part of the capture session initialization.</p>
*/
ANDROID_REQUEST_AVAILABLE_SESSION_KEYS,
/**
* android.request.availablePhysicalCameraRequestKeys [static, int32[], ndk_public]
*
* <p>A subset of the available request keys that can be overridden for
* physical devices backing a logical multi-camera.</p>
*/
ANDROID_REQUEST_AVAILABLE_PHYSICAL_CAMERA_REQUEST_KEYS,
/**
* android.request.characteristicKeysNeedingPermission [static, int32[], hidden]
*
* <p>A list of camera characteristics keys that are only available
* in case the camera client has camera permission.</p>
*/
ANDROID_REQUEST_CHARACTERISTIC_KEYS_NEEDING_PERMISSION,
/**
* android.request.availableDynamicRangeProfilesMap [static, enum[], ndk_public]
*
* <p>A map of all available 10-bit dynamic range profiles along with their
* capture request constraints.</p>
*/
ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_MAP,
/**
* android.request.recommendedTenBitDynamicRangeProfile [static, int64, java_public]
*
* <p>Recommended 10-bit dynamic range profile.</p>
*/
ANDROID_REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE,
/**
* android.request.availableColorSpaceProfilesMap [static, enum[], ndk_public]
*
* <p>A list of all possible color space profiles supported by a camera device.</p>
*/
ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP,
/**
* android.scaler.cropRegion [dynamic, int32[], public]
*
* <p>The desired region of the sensor to read out for this capture.</p>
*/
ANDROID_SCALER_CROP_REGION = CameraMetadataSectionStart.ANDROID_SCALER_START,
/**
* android.scaler.availableFormats [static, enum[], hidden]
*
* <p>The list of image formats that are supported by this
* camera device for output streams.</p>
*/
ANDROID_SCALER_AVAILABLE_FORMATS,
/**
* android.scaler.availableJpegMinDurations [static, int64[], hidden]
*
* <p>The minimum frame duration that is supported
* for each resolution in ANDROID_SCALER_AVAILABLE_JPEG_SIZES.</p>
*
* @see ANDROID_SCALER_AVAILABLE_JPEG_SIZES
*/
ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS,
/**
* android.scaler.availableJpegSizes [static, int32[], hidden]
*
* <p>The JPEG resolutions that are supported by this camera device.</p>
*/
ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
/**
* android.scaler.availableMaxDigitalZoom [static, float, public]
*
* <p>The maximum ratio between both active area width
* and crop region width, and active area height and
* crop region height, for ANDROID_SCALER_CROP_REGION.</p>
*
* @see ANDROID_SCALER_CROP_REGION
*/
ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
/**
* android.scaler.availableProcessedMinDurations [static, int64[], hidden]
*
* <p>For each available processed output size (defined in
* ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES), this property lists the
* minimum supportable frame duration for that size.</p>
*
* @see ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES
*/
ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS,
/**
* android.scaler.availableProcessedSizes [static, int32[], hidden]
*
* <p>The resolutions available for use with
* processed output streams, such as YV12, NV12, and
* platform opaque YUV/RGB streams to the GPU or video
* encoders.</p>
*/
ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
/**
* android.scaler.availableRawMinDurations [static, int64[], system]
*
* <p>For each available raw output size (defined in
* ANDROID_SCALER_AVAILABLE_RAW_SIZES), this property lists the minimum
* supportable frame duration for that size.</p>
*
* @see ANDROID_SCALER_AVAILABLE_RAW_SIZES
*/
ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS,
/**
* android.scaler.availableRawSizes [static, int32[], system]
*
* <p>The resolutions available for use with raw
* sensor output streams, listed as width,
* height</p>
*/
ANDROID_SCALER_AVAILABLE_RAW_SIZES,
/**
* android.scaler.availableInputOutputFormatsMap [static, int32, hidden]
*
* <p>The mapping of image formats that are supported by this
* camera device for input streams, to their corresponding output formats.</p>
*/
ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
/**
* android.scaler.availableStreamConfigurations [static, enum[], ndk_public]
*
* <p>The available stream configurations that this
* camera device supports
* (i.e. format, width, height, output/input stream).</p>
*/
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
/**
* android.scaler.availableMinFrameDurations [static, int64[], ndk_public]
*
* <p>This lists the minimum frame duration for each
* format/size combination.</p>
*/
ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
/**
* android.scaler.availableStallDurations [static, int64[], ndk_public]
*
* <p>This lists the maximum stall duration for each
* output format/size combination.</p>
*/
ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
/**
* android.scaler.croppingType [static, enum, public]
*
* <p>The crop type that this camera device supports.</p>
*/
ANDROID_SCALER_CROPPING_TYPE,
/**
* android.scaler.availableRecommendedStreamConfigurations [static, enum[], ndk_public]
*
* <p>Recommended stream configurations for common client use cases.</p>
*/
ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS,
/**
* android.scaler.availableRecommendedInputOutputFormatsMap [static, int32, ndk_public]
*
* <p>Recommended mappings of image formats that are supported by this
* camera device for input streams, to their corresponding output formats.</p>
*/
ANDROID_SCALER_AVAILABLE_RECOMMENDED_INPUT_OUTPUT_FORMATS_MAP,
/**
* android.scaler.availableRotateAndCropModes [static, byte[], public]
*
* <p>List of rotate-and-crop modes for ANDROID_SCALER_ROTATE_AND_CROP that are supported by this camera device.</p>
*
* @see ANDROID_SCALER_ROTATE_AND_CROP
*/
ANDROID_SCALER_AVAILABLE_ROTATE_AND_CROP_MODES,
/**
* android.scaler.rotateAndCrop [dynamic, enum, public]
*
* <p>Whether a rotation-and-crop operation is applied to processed
* outputs from the camera.</p>
*/
ANDROID_SCALER_ROTATE_AND_CROP,
/**
* android.scaler.defaultSecureImageSize [static, int32[], public]
*
* <p>Default YUV/PRIVATE size to use for requesting secure image buffers.</p>
*/
ANDROID_SCALER_DEFAULT_SECURE_IMAGE_SIZE,
/**
* android.scaler.physicalCameraMultiResolutionStreamConfigurations [static, enum[], ndk_public]
*
* <p>The available multi-resolution stream configurations that this
* physical camera device supports
* (i.e. format, width, height, output/input stream).</p>
*/
ANDROID_SCALER_PHYSICAL_CAMERA_MULTI_RESOLUTION_STREAM_CONFIGURATIONS,
/**
* android.scaler.availableStreamConfigurationsMaximumResolution [static, enum[], ndk_public]
*
* <p>The available stream configurations that this
* camera device supports (i.e. format, width, height, output/input stream) for a
* CaptureRequest with ANDROID_SENSOR_PIXEL_MODE set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
/**
* android.scaler.availableMinFrameDurationsMaximumResolution [static, int64[], ndk_public]
*
* <p>This lists the minimum frame duration for each
* format/size combination when the camera device is sent a CaptureRequest with
* ANDROID_SENSOR_PIXEL_MODE set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
/**
* android.scaler.availableStallDurationsMaximumResolution [static, int64[], ndk_public]
*
* <p>This lists the maximum stall duration for each
* output format/size combination when CaptureRequests are submitted with
* ANDROID_SENSOR_PIXEL_MODE set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a></p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_SCALER_AVAILABLE_STALL_DURATIONS_MAXIMUM_RESOLUTION,
/**
* android.scaler.availableInputOutputFormatsMapMaximumResolution [static, int32, hidden]
*
* <p>The mapping of image formats that are supported by this
* camera device for input streams, to their corresponding output formats, when
* ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP_MAXIMUM_RESOLUTION,
/**
* android.scaler.multiResolutionStreamSupported [static, enum, ndk_public]
*
* <p>Whether the camera device supports multi-resolution input or output streams</p>
*/
ANDROID_SCALER_MULTI_RESOLUTION_STREAM_SUPPORTED,
/**
* android.scaler.availableStreamUseCases [static, enum[], public]
*
* <p>The stream use cases supported by this camera device.</p>
*/
ANDROID_SCALER_AVAILABLE_STREAM_USE_CASES = 851994,
/**
* android.scaler.rawCropRegion [dynamic, int32[], public]
*
* <p>The region of the sensor that corresponds to the RAW read out for this
* capture when the stream use case of a RAW stream is set to CROPPED_RAW.</p>
*/
ANDROID_SCALER_RAW_CROP_REGION,
/**
* android.sensor.exposureTime [dynamic, int64, public]
*
* <p>Duration each pixel is exposed to
* light.</p>
*/
ANDROID_SENSOR_EXPOSURE_TIME = CameraMetadataSectionStart.ANDROID_SENSOR_START,
/**
* android.sensor.frameDuration [dynamic, int64, public]
*
* <p>Duration from start of frame readout to
* start of next frame readout.</p>
*/
ANDROID_SENSOR_FRAME_DURATION,
/**
* android.sensor.sensitivity [dynamic, int32, public]
*
* <p>The amount of gain applied to sensor data
* before processing.</p>
*/
ANDROID_SENSOR_SENSITIVITY,
/**
* android.sensor.referenceIlluminant1 [static, enum, public]
*
* <p>The standard reference illuminant used as the scene light source when
* calculating the ANDROID_SENSOR_COLOR_TRANSFORM1,
* ANDROID_SENSOR_CALIBRATION_TRANSFORM1, and
* ANDROID_SENSOR_FORWARD_MATRIX1 matrices.</p>
*
* @see ANDROID_SENSOR_CALIBRATION_TRANSFORM1
* @see ANDROID_SENSOR_COLOR_TRANSFORM1
* @see ANDROID_SENSOR_FORWARD_MATRIX1
*/
ANDROID_SENSOR_REFERENCE_ILLUMINANT1,
/**
* android.sensor.referenceIlluminant2 [static, byte, public]
*
* <p>The standard reference illuminant used as the scene light source when
* calculating the ANDROID_SENSOR_COLOR_TRANSFORM2,
* ANDROID_SENSOR_CALIBRATION_TRANSFORM2, and
* ANDROID_SENSOR_FORWARD_MATRIX2 matrices.</p>
*
* @see ANDROID_SENSOR_CALIBRATION_TRANSFORM2
* @see ANDROID_SENSOR_COLOR_TRANSFORM2
* @see ANDROID_SENSOR_FORWARD_MATRIX2
*/
ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
/**
* android.sensor.calibrationTransform1 [static, rational[], public]
*
* <p>A per-device calibration transform matrix that maps from the
* reference sensor colorspace to the actual device sensor colorspace.</p>
*/
ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
/**
* android.sensor.calibrationTransform2 [static, rational[], public]
*
* <p>A per-device calibration transform matrix that maps from the
* reference sensor colorspace to the actual device sensor colorspace
* (this is the colorspace of the raw buffer data).</p>
*/
ANDROID_SENSOR_CALIBRATION_TRANSFORM2,
/**
* android.sensor.colorTransform1 [static, rational[], public]
*
* <p>A matrix that transforms color values from CIE XYZ color space to
* reference sensor color space.</p>
*/
ANDROID_SENSOR_COLOR_TRANSFORM1,
/**
* android.sensor.colorTransform2 [static, rational[], public]
*
* <p>A matrix that transforms color values from CIE XYZ color space to
* reference sensor color space.</p>
*/
ANDROID_SENSOR_COLOR_TRANSFORM2,
/**
* android.sensor.forwardMatrix1 [static, rational[], public]
*
* <p>A matrix that transforms white balanced camera colors from the reference
* sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
*/
ANDROID_SENSOR_FORWARD_MATRIX1,
/**
* android.sensor.forwardMatrix2 [static, rational[], public]
*
* <p>A matrix that transforms white balanced camera colors from the reference
* sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint.</p>
*/
ANDROID_SENSOR_FORWARD_MATRIX2,
/**
* android.sensor.baseGainFactor [static, rational, system]
*
* <p>Gain factor from electrons to raw units when
* ISO=100</p>
*/
ANDROID_SENSOR_BASE_GAIN_FACTOR,
/**
* android.sensor.blackLevelPattern [static, int32[], public]
*
* <p>A fixed black level offset for each of the color filter arrangement
* (CFA) mosaic channels.</p>
*/
ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
/**
* android.sensor.maxAnalogSensitivity [static, int32, public]
*
* <p>Maximum sensitivity that is implemented
* purely through analog gain.</p>
*/
ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
/**
* android.sensor.orientation [static, int32, public]
*
* <p>Clockwise angle through which the output image needs to be rotated to be
* upright on the device screen in its native orientation.</p>
*/
ANDROID_SENSOR_ORIENTATION,
/**
* android.sensor.profileHueSatMapDimensions [static, int32[], system]
*
* <p>The number of input samples for each dimension of
* ANDROID_SENSOR_PROFILE_HUE_SAT_MAP.</p>
*
* @see ANDROID_SENSOR_PROFILE_HUE_SAT_MAP
*/
ANDROID_SENSOR_PROFILE_HUE_SAT_MAP_DIMENSIONS,
/**
* android.sensor.timestamp [dynamic, int64, public]
*
* <p>Time at start of exposure of first
* row of the image sensor active array, in nanoseconds.</p>
*/
ANDROID_SENSOR_TIMESTAMP,
/**
* android.sensor.temperature [dynamic, float, system]
*
* <p>The temperature of the sensor, sampled at the time
* exposure began for this frame.</p>
* <p>The thermal diode being queried should be inside the sensor PCB, or
* somewhere close to it.</p>
*/
ANDROID_SENSOR_TEMPERATURE,
/**
* android.sensor.neutralColorPoint [dynamic, rational[], public]
*
* <p>The estimated camera neutral color in the native sensor colorspace at
* the time of capture.</p>
*/
ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
/**
* android.sensor.noiseProfile [dynamic, double[], public]
*
* <p>Noise model coefficients for each CFA mosaic channel.</p>
*/
ANDROID_SENSOR_NOISE_PROFILE,
/**
* android.sensor.profileHueSatMap [dynamic, float[], system]
*
* <p>A mapping containing a hue shift, saturation scale, and value scale
* for each pixel.</p>
*/
ANDROID_SENSOR_PROFILE_HUE_SAT_MAP,
/**
* android.sensor.profileToneCurve [dynamic, float[], system]
*
* <p>A list of x,y samples defining a tone-mapping curve for gamma adjustment.</p>
*/
ANDROID_SENSOR_PROFILE_TONE_CURVE,
/**
* android.sensor.greenSplit [dynamic, float, public]
*
* <p>The worst-case divergence between Bayer green channels.</p>
*/
ANDROID_SENSOR_GREEN_SPLIT,
/**
* android.sensor.testPatternData [dynamic, int32[], public]
*
* <p>A pixel <code>[R, G_even, G_odd, B]</code> that supplies the test pattern
* when ANDROID_SENSOR_TEST_PATTERN_MODE is SOLID_COLOR.</p>
*
* @see ANDROID_SENSOR_TEST_PATTERN_MODE
*/
ANDROID_SENSOR_TEST_PATTERN_DATA,
/**
* android.sensor.testPatternMode [dynamic, enum, public]
*
* <p>When enabled, the sensor sends a test pattern instead of
* doing a real exposure from the camera.</p>
*/
ANDROID_SENSOR_TEST_PATTERN_MODE,
/**
* android.sensor.availableTestPatternModes [static, int32[], public]
*
* <p>List of sensor test pattern modes for ANDROID_SENSOR_TEST_PATTERN_MODE
* supported by this camera device.</p>
*
* @see ANDROID_SENSOR_TEST_PATTERN_MODE
*/
ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
/**
* android.sensor.rollingShutterSkew [dynamic, int64, public]
*
* <p>Duration between the start of exposure for the first row of the image sensor,
* and the start of exposure for one past the last row of the image sensor.</p>
*/
ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
/**
* android.sensor.opticalBlackRegions [static, int32[], public]
*
* <p>List of disjoint rectangles indicating the sensor
* optically shielded black pixel regions.</p>
*/
ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
/**
* android.sensor.dynamicBlackLevel [dynamic, float[], public]
*
* <p>A per-frame dynamic black level offset for each of the color filter
* arrangement (CFA) mosaic channels.</p>
*/
ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL,
/**
* android.sensor.dynamicWhiteLevel [dynamic, int32, public]
*
* <p>Maximum raw value output by sensor for this frame.</p>
*/
ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
/**
* android.sensor.opaqueRawSize [static, int32[], system]
*
* <p>Size in bytes for all the listed opaque RAW buffer sizes</p>
*/
ANDROID_SENSOR_OPAQUE_RAW_SIZE,
/**
* android.sensor.opaqueRawSizeMaximumResolution [static, int32[], system]
*
* <p>Size in bytes for all the listed opaque RAW buffer sizes when
* ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_SENSOR_OPAQUE_RAW_SIZE_MAXIMUM_RESOLUTION,
/**
* android.sensor.pixelMode [dynamic, enum, public]
*
* <p>Switches sensor pixel mode between maximum resolution mode and default mode.</p>
*/
ANDROID_SENSOR_PIXEL_MODE,
/**
* android.sensor.rawBinningFactorUsed [dynamic, enum, public]
*
* <p>Whether <code>RAW</code> images requested have their bayer pattern as described by
* ANDROID_SENSOR_INFO_BINNING_FACTOR.</p>
*
* @see ANDROID_SENSOR_INFO_BINNING_FACTOR
*/
ANDROID_SENSOR_RAW_BINNING_FACTOR_USED,
/**
* android.sensor.info.activeArraySize [static, int32[], public]
*
* <p>The area of the image sensor which corresponds to active pixels after any geometric
* distortion correction has been applied.</p>
*/
ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE = CameraMetadataSectionStart.ANDROID_SENSOR_INFO_START,
/**
* android.sensor.info.sensitivityRange [static, int32[], public]
*
* <p>Range of sensitivities for ANDROID_SENSOR_SENSITIVITY supported by this
* camera device.</p>
*
* @see ANDROID_SENSOR_SENSITIVITY
*/
ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
/**
* android.sensor.info.colorFilterArrangement [static, enum, public]
*
* <p>The arrangement of color filters on sensor;
* represents the colors in the top-left 2x2 section of
* the sensor, in reading order, for a Bayer camera, or the
* light spectrum it captures for MONOCHROME camera.</p>
*/
ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
/**
* android.sensor.info.exposureTimeRange [static, int64[], public]
*
* <p>The range of image exposure times for ANDROID_SENSOR_EXPOSURE_TIME supported
* by this camera device.</p>
*
* @see ANDROID_SENSOR_EXPOSURE_TIME
*/
ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
/**
* android.sensor.info.maxFrameDuration [static, int64, public]
*
* <p>The maximum possible frame duration (minimum frame rate) for
* ANDROID_SENSOR_FRAME_DURATION that is supported this camera device.</p>
*
* @see ANDROID_SENSOR_FRAME_DURATION
*/
ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
/**
* android.sensor.info.physicalSize [static, float[], public]
*
* <p>The physical dimensions of the full pixel
* array.</p>
*/
ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
/**
* android.sensor.info.pixelArraySize [static, int32[], public]
*
* <p>Dimensions of the full pixel array, possibly
* including black calibration pixels.</p>
*/
ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
/**
* android.sensor.info.whiteLevel [static, int32, public]
*
* <p>Maximum raw value output by sensor.</p>
*/
ANDROID_SENSOR_INFO_WHITE_LEVEL,
/**
* android.sensor.info.timestampSource [static, enum, public]
*
* <p>The time base source for sensor capture start timestamps.</p>
*/
ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
/**
* android.sensor.info.lensShadingApplied [static, enum, public]
*
* <p>Whether the RAW images output from this camera device are subject to
* lens shading correction.</p>
*/
ANDROID_SENSOR_INFO_LENS_SHADING_APPLIED,
/**
* android.sensor.info.preCorrectionActiveArraySize [static, int32[], public]
*
* <p>The area of the image sensor which corresponds to active pixels prior to the
* application of any geometric distortion correction.</p>
*/
ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
/**
* android.sensor.info.activeArraySizeMaximumResolution [static, int32[], public]
*
* <p>The area of the image sensor which corresponds to active pixels after any geometric
* distortion correction has been applied, when the sensor runs in maximum resolution mode.</p>
*/
ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
/**
* android.sensor.info.pixelArraySizeMaximumResolution [static, int32[], public]
*
* <p>Dimensions of the full pixel array, possibly
* including black calibration pixels, when the sensor runs in maximum resolution mode.
* Analogous to ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, when ANDROID_SENSOR_PIXEL_MODE is
* set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE_MAXIMUM_RESOLUTION,
/**
* android.sensor.info.preCorrectionActiveArraySizeMaximumResolution [static, int32[], public]
*
* <p>The area of the image sensor which corresponds to active pixels prior to the
* application of any geometric distortion correction, when the sensor runs in maximum
* resolution mode. This key must be used for crop / metering regions, only when
* ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE_MAXIMUM_RESOLUTION,
/**
* android.sensor.info.binningFactor [static, int32[], public]
*
* <p>Dimensions of the group of pixels which are under the same color filter.
* This specifies the width and height (pair of integers) of the group of pixels which fall
* under the same color filter for ULTRA_HIGH_RESOLUTION sensors.</p>
*/
ANDROID_SENSOR_INFO_BINNING_FACTOR,
/**
* android.shading.mode [dynamic, enum, public]
*
* <p>Quality of lens shading correction applied
* to the image data.</p>
*/
ANDROID_SHADING_MODE = CameraMetadataSectionStart.ANDROID_SHADING_START,
/**
* android.shading.strength [controls, byte, system]
*
* <p>Control the amount of shading correction
* applied to the images</p>
*/
ANDROID_SHADING_STRENGTH,
/**
* android.shading.availableModes [static, byte[], public]
*
* <p>List of lens shading modes for ANDROID_SHADING_MODE that are supported by this camera device.</p>
*
* @see ANDROID_SHADING_MODE
*/
ANDROID_SHADING_AVAILABLE_MODES,
/**
* android.statistics.faceDetectMode [dynamic, enum, public]
*
* <p>Operating mode for the face detector
* unit.</p>
*/
ANDROID_STATISTICS_FACE_DETECT_MODE = CameraMetadataSectionStart.ANDROID_STATISTICS_START,
/**
* android.statistics.histogramMode [dynamic, enum, system]
*
* <p>Operating mode for histogram
* generation</p>
*/
ANDROID_STATISTICS_HISTOGRAM_MODE,
/**
* android.statistics.sharpnessMapMode [dynamic, enum, system]
*
* <p>Operating mode for sharpness map
* generation</p>
*/
ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
/**
* android.statistics.hotPixelMapMode [dynamic, enum, public]
*
* <p>Operating mode for hot pixel map generation.</p>
*/
ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
/**
* android.statistics.faceIds [dynamic, int32[], ndk_public]
*
* <p>List of unique IDs for detected faces.</p>
*/
ANDROID_STATISTICS_FACE_IDS,
/**
* android.statistics.faceLandmarks [dynamic, int32[], ndk_public]
*
* <p>List of landmarks for detected
* faces.</p>
*/
ANDROID_STATISTICS_FACE_LANDMARKS,
/**
* android.statistics.faceRectangles [dynamic, int32[], ndk_public]
*
* <p>List of the bounding rectangles for detected
* faces.</p>
*/
ANDROID_STATISTICS_FACE_RECTANGLES,
/**
* android.statistics.faceScores [dynamic, byte[], ndk_public]
*
* <p>List of the face confidence scores for
* detected faces</p>
*/
ANDROID_STATISTICS_FACE_SCORES,
/**
* android.statistics.histogram [dynamic, int32[], system]
*
* <p>A 3-channel histogram based on the raw
* sensor data</p>
*/
ANDROID_STATISTICS_HISTOGRAM,
/**
* android.statistics.sharpnessMap [dynamic, int32[], system]
*
* <p>A 3-channel sharpness map, based on the raw
* sensor data</p>
*/
ANDROID_STATISTICS_SHARPNESS_MAP,
/**
* android.statistics.lensShadingCorrectionMap [dynamic, byte, java_public]
*
* <p>The shading map is a low-resolution floating-point map
* that lists the coefficients used to correct for vignetting, for each
* Bayer color channel.</p>
*/
ANDROID_STATISTICS_LENS_SHADING_CORRECTION_MAP,
/**
* android.statistics.lensShadingMap [dynamic, float[], ndk_public]
*
* <p>The shading map is a low-resolution floating-point map
* that lists the coefficients used to correct for vignetting and color shading,
* for each Bayer color channel of RAW image data.</p>
*/
ANDROID_STATISTICS_LENS_SHADING_MAP,
/**
* android.statistics.predictedColorGains [dynamic, float[], hidden]
*
* <p>The best-fit color channel gains calculated
* by the camera device's statistics units for the current output frame.</p>
*/
ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
/**
* android.statistics.predictedColorTransform [dynamic, rational[], hidden]
*
* <p>The best-fit color transform matrix estimate
* calculated by the camera device's statistics units for the current
* output frame.</p>
*/
ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
/**
* android.statistics.sceneFlicker [dynamic, enum, public]
*
* <p>The camera device estimated scene illumination lighting
* frequency.</p>
*/
ANDROID_STATISTICS_SCENE_FLICKER,
/**
* android.statistics.hotPixelMap [dynamic, int32[], public]
*
* <p>List of <code>(x, y)</code> coordinates of hot/defective pixels on the sensor.</p>
*/
ANDROID_STATISTICS_HOT_PIXEL_MAP,
/**
* android.statistics.lensShadingMapMode [dynamic, enum, public]
*
* <p>Whether the camera device will output the lens
* shading map in output result metadata.</p>
*/
ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
/**
* android.statistics.oisDataMode [dynamic, enum, public]
*
* <p>A control for selecting whether optical stabilization (OIS) position
* information is included in output result metadata.</p>
*/
ANDROID_STATISTICS_OIS_DATA_MODE,
/**
* android.statistics.oisTimestamps [dynamic, int64[], ndk_public]
*
* <p>An array of timestamps of OIS samples, in nanoseconds.</p>
*/
ANDROID_STATISTICS_OIS_TIMESTAMPS,
/**
* android.statistics.oisXShifts [dynamic, float[], ndk_public]
*
* <p>An array of shifts of OIS samples, in x direction.</p>
*/
ANDROID_STATISTICS_OIS_X_SHIFTS,
/**
* android.statistics.oisYShifts [dynamic, float[], ndk_public]
*
* <p>An array of shifts of OIS samples, in y direction.</p>
*/
ANDROID_STATISTICS_OIS_Y_SHIFTS,
/**
* android.statistics.lensIntrinsicTimestamps [dynamic, int64[], ndk_public]
*
* <p>An array of timestamps of lens intrinsics samples, in nanoseconds.</p>
*/
ANDROID_STATISTICS_LENS_INTRINSIC_TIMESTAMPS,
/**
* android.statistics.lensIntrinsicSamples [dynamic, float[], ndk_public]
*
* <p>An array of intra-frame lens intrinsics.</p>
*/
ANDROID_STATISTICS_LENS_INTRINSIC_SAMPLES,
/**
* android.statistics.info.availableFaceDetectModes [static, byte[], public]
*
* <p>List of face detection modes for ANDROID_STATISTICS_FACE_DETECT_MODE that are
* supported by this camera device.</p>
*
* @see ANDROID_STATISTICS_FACE_DETECT_MODE
*/
ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES = CameraMetadataSectionStart.ANDROID_STATISTICS_INFO_START,
/**
* android.statistics.info.histogramBucketCount [static, int32, system]
*
* <p>Number of histogram buckets
* supported</p>
*/
ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
/**
* android.statistics.info.maxFaceCount [static, int32, public]
*
* <p>The maximum number of simultaneously detectable
* faces.</p>
*/
ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
/**
* android.statistics.info.maxHistogramCount [static, int32, system]
*
* <p>Maximum value possible for a histogram
* bucket</p>
*/
ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
/**
* android.statistics.info.maxSharpnessMapValue [static, int32, system]
*
* <p>Maximum value possible for a sharpness map
* region.</p>
*/
ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
/**
* android.statistics.info.sharpnessMapSize [static, int32[], system]
*
* <p>Dimensions of the sharpness
* map</p>
*/
ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
/**
* android.statistics.info.availableHotPixelMapModes [static, byte[], public]
*
* <p>List of hot pixel map output modes for ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE that are
* supported by this camera device.</p>
*
* @see ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE
*/
ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
/**
* android.statistics.info.availableLensShadingMapModes [static, byte[], public]
*
* <p>List of lens shading map output modes for ANDROID_STATISTICS_LENS_SHADING_MAP_MODE that
* are supported by this camera device.</p>
*
* @see ANDROID_STATISTICS_LENS_SHADING_MAP_MODE
*/
ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
/**
* android.statistics.info.availableOisDataModes [static, byte[], public]
*
* <p>List of OIS data output modes for ANDROID_STATISTICS_OIS_DATA_MODE that
* are supported by this camera device.</p>
*
* @see ANDROID_STATISTICS_OIS_DATA_MODE
*/
ANDROID_STATISTICS_INFO_AVAILABLE_OIS_DATA_MODES,
/**
* android.tonemap.curveBlue [dynamic, float[], ndk_public]
*
* <p>Tonemapping / contrast / gamma curve for the blue
* channel, to use when ANDROID_TONEMAP_MODE is
* CONTRAST_CURVE.</p>
*
* @see ANDROID_TONEMAP_MODE
*/
ANDROID_TONEMAP_CURVE_BLUE = CameraMetadataSectionStart.ANDROID_TONEMAP_START,
/**
* android.tonemap.curveGreen [dynamic, float[], ndk_public]
*
* <p>Tonemapping / contrast / gamma curve for the green
* channel, to use when ANDROID_TONEMAP_MODE is
* CONTRAST_CURVE.</p>
*
* @see ANDROID_TONEMAP_MODE
*/
ANDROID_TONEMAP_CURVE_GREEN,
/**
* android.tonemap.curveRed [dynamic, float[], ndk_public]
*
* <p>Tonemapping / contrast / gamma curve for the red
* channel, to use when ANDROID_TONEMAP_MODE is
* CONTRAST_CURVE.</p>
*
* @see ANDROID_TONEMAP_MODE
*/
ANDROID_TONEMAP_CURVE_RED,
/**
* android.tonemap.mode [dynamic, enum, public]
*
* <p>High-level global contrast/gamma/tonemapping control.</p>
*/
ANDROID_TONEMAP_MODE,
/**
* android.tonemap.maxCurvePoints [static, int32, public]
*
* <p>Maximum number of supported points in the
* tonemap curve that can be used for ANDROID_TONEMAP_CURVE.</p>
*
* @see ANDROID_TONEMAP_CURVE
*/
ANDROID_TONEMAP_MAX_CURVE_POINTS,
/**
* android.tonemap.availableToneMapModes [static, byte[], public]
*
* <p>List of tonemapping modes for ANDROID_TONEMAP_MODE that are supported by this camera
* device.</p>
*
* @see ANDROID_TONEMAP_MODE
*/
ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
/**
* android.tonemap.gamma [dynamic, float, public]
*
* <p>Tonemapping curve to use when ANDROID_TONEMAP_MODE is
* GAMMA_VALUE</p>
*
* @see ANDROID_TONEMAP_MODE
*/
ANDROID_TONEMAP_GAMMA,
/**
* android.tonemap.presetCurve [dynamic, enum, public]
*
* <p>Tonemapping curve to use when ANDROID_TONEMAP_MODE is
* PRESET_CURVE</p>
*
* @see ANDROID_TONEMAP_MODE
*/
ANDROID_TONEMAP_PRESET_CURVE,
/**
* android.led.transmit [dynamic, enum, hidden]
*
* <p>This LED is nominally used to indicate to the user
* that the camera is powered on and may be streaming images back to the
* Application Processor. In certain rare circumstances, the OS may
* disable this when video is processed locally and not transmitted to
* any untrusted applications.</p>
* <p>In particular, the LED <em>must</em> always be on when the data could be
* transmitted off the device. The LED <em>should</em> always be on whenever
* data is stored locally on the device.</p>
* <p>The LED <em>may</em> be off if a trusted application is using the data that
* doesn't violate the above rules.</p>
*/
ANDROID_LED_TRANSMIT = CameraMetadataSectionStart.ANDROID_LED_START,
/**
* android.led.availableLeds [static, enum[], hidden]
*
* <p>A list of camera LEDs that are available on this system.</p>
*/
ANDROID_LED_AVAILABLE_LEDS,
/**
* android.info.supportedHardwareLevel [static, enum, public]
*
* <p>Generally classifies the overall set of the camera device functionality.</p>
*/
ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL = CameraMetadataSectionStart.ANDROID_INFO_START,
/**
* android.info.version [static, byte, public]
*
* <p>A short string for manufacturer version information about the camera device, such as
* ISP hardware, sensors, etc.</p>
*/
ANDROID_INFO_VERSION,
/**
* android.info.supportedBufferManagementVersion [static, enum, system]
*
* <p>The version of buffer management API this camera device supports and opts into.</p>
*/
ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION,
/**
* android.info.deviceStateOrientations [static, int64[], ndk_public]
*/
ANDROID_INFO_DEVICE_STATE_ORIENTATIONS,
/**
* android.blackLevel.lock [dynamic, enum, public]
*
* <p>Whether black-level compensation is locked
* to its current values, or is free to vary.</p>
*/
ANDROID_BLACK_LEVEL_LOCK = CameraMetadataSectionStart.ANDROID_BLACK_LEVEL_START,
/**
* android.sync.frameNumber [dynamic, enum, ndk_public]
*
* <p>The frame number corresponding to the last request
* with which the output result (metadata + buffers) has been fully
* synchronized.</p>
*/
ANDROID_SYNC_FRAME_NUMBER = CameraMetadataSectionStart.ANDROID_SYNC_START,
/**
* android.sync.maxLatency [static, enum, public]
*
* <p>The maximum number of frames that can occur after a request
* (different than the previous) has been submitted, and before the
* result's state becomes synchronized.</p>
*/
ANDROID_SYNC_MAX_LATENCY,
/**
* android.reprocess.effectiveExposureFactor [dynamic, float, java_public]
*
* <p>The amount of exposure time increase factor applied to the original output
* frame by the application processing before sending for reprocessing.</p>
*/
ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR = CameraMetadataSectionStart.ANDROID_REPROCESS_START,
/**
* android.reprocess.maxCaptureStall [static, int32, java_public]
*
* <p>The maximal camera capture pipeline stall (in unit of frame count) introduced by a
* reprocess capture request.</p>
*/
ANDROID_REPROCESS_MAX_CAPTURE_STALL,
/**
* android.depth.maxDepthSamples [static, int32, system]
*
* <p>Maximum number of points that a depth point cloud may contain.</p>
*/
ANDROID_DEPTH_MAX_DEPTH_SAMPLES = CameraMetadataSectionStart.ANDROID_DEPTH_START,
/**
* android.depth.availableDepthStreamConfigurations [static, enum[], ndk_public]
*
* <p>The available depth dataspace stream
* configurations that this camera device supports
* (i.e. format, width, height, output/input stream).</p>
*/
ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
/**
* android.depth.availableDepthMinFrameDurations [static, int64[], ndk_public]
*
* <p>This lists the minimum frame duration for each
* format/size combination for depth output formats.</p>
*/
ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
/**
* android.depth.availableDepthStallDurations [static, int64[], ndk_public]
*
* <p>This lists the maximum stall duration for each
* output format/size combination for depth streams.</p>
*/
ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
/**
* android.depth.depthIsExclusive [static, enum, public]
*
* <p>Indicates whether a capture request may target both a
* DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as
* YUV_420_888, JPEG, or RAW) simultaneously.</p>
*/
ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE,
/**
* android.depth.availableRecommendedDepthStreamConfigurations [static, int32[], ndk_public]
*
* <p>Recommended depth stream configurations for common client use cases.</p>
*/
ANDROID_DEPTH_AVAILABLE_RECOMMENDED_DEPTH_STREAM_CONFIGURATIONS,
/**
* android.depth.availableDynamicDepthStreamConfigurations [static, enum[], ndk_public]
*
* <p>The available dynamic depth dataspace stream
* configurations that this camera device supports
* (i.e. format, width, height, output/input stream).</p>
*/
ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS,
/**
* android.depth.availableDynamicDepthMinFrameDurations [static, int64[], ndk_public]
*
* <p>This lists the minimum frame duration for each
* format/size combination for dynamic depth output streams.</p>
*/
ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS,
/**
* android.depth.availableDynamicDepthStallDurations [static, int64[], ndk_public]
*
* <p>This lists the maximum stall duration for each
* output format/size combination for dynamic depth streams.</p>
*/
ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS,
/**
* android.depth.availableDepthStreamConfigurationsMaximumResolution [static, enum[], ndk_public]
*
* <p>The available depth dataspace stream
* configurations that this camera device supports
* (i.e. format, width, height, output/input stream) when a CaptureRequest is submitted with
* ANDROID_SENSOR_PIXEL_MODE set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
/**
* android.depth.availableDepthMinFrameDurationsMaximumResolution [static, int64[], ndk_public]
*
* <p>This lists the minimum frame duration for each
* format/size combination for depth output formats when a CaptureRequest is submitted with
* ANDROID_SENSOR_PIXEL_MODE set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
/**
* android.depth.availableDepthStallDurationsMaximumResolution [static, int64[], ndk_public]
*
* <p>This lists the maximum stall duration for each
* output format/size combination for depth streams for CaptureRequests where
* ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
/**
* android.depth.availableDynamicDepthStreamConfigurationsMaximumResolution [static, enum[], ndk_public]
*
* <p>The available dynamic depth dataspace stream
* configurations that this camera device supports (i.e. format, width, height,
* output/input stream) for CaptureRequests where ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
/**
* android.depth.availableDynamicDepthMinFrameDurationsMaximumResolution [static, int64[], ndk_public]
*
* <p>This lists the minimum frame duration for each
* format/size combination for dynamic depth output streams for CaptureRequests where
* ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
/**
* android.depth.availableDynamicDepthStallDurationsMaximumResolution [static, int64[], ndk_public]
*
* <p>This lists the maximum stall duration for each
* output format/size combination for dynamic depth streams for CaptureRequests where
* ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_DEPTH_AVAILABLE_DYNAMIC_DEPTH_STALL_DURATIONS_MAXIMUM_RESOLUTION,
/**
* android.logicalMultiCamera.physicalIds [static, byte[], ndk_public]
*
* <p>String containing the ids of the underlying physical cameras.</p>
*/
ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS = CameraMetadataSectionStart.ANDROID_LOGICAL_MULTI_CAMERA_START,
/**
* android.logicalMultiCamera.sensorSyncType [static, enum, public]
*
* <p>The accuracy of frame timestamp synchronization between physical cameras</p>
*/
ANDROID_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE,
/**
* android.logicalMultiCamera.activePhysicalId [dynamic, byte, public]
*
* <p>String containing the ID of the underlying active physical camera.</p>
*/
ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_ID,
/**
* android.logicalMultiCamera.activePhysicalSensorCropRegion [dynamic, int32[], public]
*
* <p>The current region of the active physical sensor that will be read out for this
* capture.</p>
*/
ANDROID_LOGICAL_MULTI_CAMERA_ACTIVE_PHYSICAL_SENSOR_CROP_REGION,
/**
* android.distortionCorrection.mode [dynamic, enum, public]
*
* <p>Mode of operation for the lens distortion correction block.</p>
*/
ANDROID_DISTORTION_CORRECTION_MODE = CameraMetadataSectionStart.ANDROID_DISTORTION_CORRECTION_START,
/**
* android.distortionCorrection.availableModes [static, byte[], public]
*
* <p>List of distortion correction modes for ANDROID_DISTORTION_CORRECTION_MODE that are
* supported by this camera device.</p>
*
* @see ANDROID_DISTORTION_CORRECTION_MODE
*/
ANDROID_DISTORTION_CORRECTION_AVAILABLE_MODES,
/**
* android.heic.availableHeicStreamConfigurations [static, enum[], ndk_public]
*
* <p>The available HEIC (ISO/IEC 23008-12) stream
* configurations that this camera device supports
* (i.e. format, width, height, output/input stream).</p>
*/
ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS = CameraMetadataSectionStart.ANDROID_HEIC_START,
/**
* android.heic.availableHeicMinFrameDurations [static, int64[], ndk_public]
*
* <p>This lists the minimum frame duration for each
* format/size combination for HEIC output formats.</p>
*/
ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS,
/**
* android.heic.availableHeicStallDurations [static, int64[], ndk_public]
*
* <p>This lists the maximum stall duration for each
* output format/size combination for HEIC streams.</p>
*/
ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS,
/**
* android.heic.availableHeicStreamConfigurationsMaximumResolution [static, enum[], ndk_public]
*
* <p>The available HEIC (ISO/IEC 23008-12) stream
* configurations that this camera device supports
* (i.e. format, width, height, output/input stream).</p>
*/
ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
/**
* android.heic.availableHeicMinFrameDurationsMaximumResolution [static, int64[], ndk_public]
*
* <p>This lists the minimum frame duration for each
* format/size combination for HEIC output formats for CaptureRequests where
* ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
/**
* android.heic.availableHeicStallDurationsMaximumResolution [static, int64[], ndk_public]
*
* <p>This lists the maximum stall duration for each
* output format/size combination for HEIC streams for CaptureRequests where
* ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS_MAXIMUM_RESOLUTION,
/**
* android.heic.info.supported [static, enum, system]
*
* <p>Whether this camera device can support identical set of stream combinations
* involving HEIC image format, compared to the
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraDevice.html#legacy-level-guaranteed-configurations">table of combinations</a> involving JPEG image format required for the device's hardware
* level and capabilities.</p>
*/
ANDROID_HEIC_INFO_SUPPORTED = CameraMetadataSectionStart.ANDROID_HEIC_INFO_START,
/**
* android.heic.info.maxJpegAppSegmentsCount [static, byte, system]
*
* <p>The maximum number of Jpeg APP segments supported by the camera HAL device.</p>
*/
ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT,
/**
* android.automotive.location [static, enum, public]
*
* <p>Location of the cameras on the automotive devices.</p>
*/
ANDROID_AUTOMOTIVE_LOCATION = CameraMetadataSectionStart.ANDROID_AUTOMOTIVE_START,
/**
* android.automotive.lens.facing [static, enum[], public]
*
* <p>The direction of the camera faces relative to the vehicle body frame and the
* passenger seats.</p>
*/
ANDROID_AUTOMOTIVE_LENS_FACING = CameraMetadataSectionStart.ANDROID_AUTOMOTIVE_LENS_START,
/**
* android.jpegr.availableJpegRStreamConfigurations [static, enum[], ndk_public]
*
* <p>The available Jpeg/R stream
* configurations that this camera device supports
* (i.e. format, width, height, output/input stream).</p>
*/
ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS = CameraMetadataSectionStart.ANDROID_JPEGR_START,
/**
* android.jpegr.availableJpegRMinFrameDurations [static, int64[], ndk_public]
*
* <p>This lists the minimum frame duration for each
* format/size combination for Jpeg/R output formats.</p>
*/
ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS,
/**
* android.jpegr.availableJpegRStallDurations [static, int64[], ndk_public]
*
* <p>This lists the maximum stall duration for each
* output format/size combination for Jpeg/R streams.</p>
*/
ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS,
/**
* android.jpegr.availableJpegRStreamConfigurationsMaximumResolution [static, enum[], ndk_public]
*
* <p>The available Jpeg/R stream
* configurations that this camera device supports
* (i.e. format, width, height, output/input stream).</p>
*/
ANDROID_JPEGR_AVAILABLE_JPEG_R_STREAM_CONFIGURATIONS_MAXIMUM_RESOLUTION,
/**
* android.jpegr.availableJpegRMinFrameDurationsMaximumResolution [static, int64[], ndk_public]
*
* <p>This lists the minimum frame duration for each
* format/size combination for Jpeg/R output formats for CaptureRequests where
* ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_JPEGR_AVAILABLE_JPEG_R_MIN_FRAME_DURATIONS_MAXIMUM_RESOLUTION,
/**
* android.jpegr.availableJpegRStallDurationsMaximumResolution [static, int64[], ndk_public]
*
* <p>This lists the maximum stall duration for each
* output format/size combination for Jpeg/R streams for CaptureRequests where
* ANDROID_SENSOR_PIXEL_MODE is set to
* <a href="https://developer.android.com/reference/android/hardware/camera2/CameraMetadata.html#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION">CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION</a>.</p>
*
* @see ANDROID_SENSOR_PIXEL_MODE
*/
ANDROID_JPEGR_AVAILABLE_JPEG_R_STALL_DURATIONS_MAXIMUM_RESOLUTION,
}