| <?xml version="1.0" encoding="utf-8"?> |
| <!-- Copyright (C) 2012 The Android Open Source Project |
| |
| Licensed under the Apache License, Version 2.0 (the "License"); |
| you may not use this file except in compliance with the License. |
| You may obtain a copy of the License at |
| |
| http://www.apache.org/licenses/LICENSE-2.0 |
| |
| Unless required by applicable law or agreed to in writing, software |
| distributed under the License is distributed on an "AS IS" BASIS, |
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| See the License for the specific language governing permissions and |
| limitations under the License. |
| --> |
| <metadata xmlns="http://schemas.android.com/service/camera/metadata/" |
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| xsi:schemaLocation="http://schemas.android.com/service/camera/metadata/ metadata_definitions.xsd"> |
| |
| <tags> |
| <tag id="BC"> |
| Needed for backwards compatibility with old Java API |
| </tag> |
| <tag id="V1"> |
| New features for first camera 2 release (API1) |
| </tag> |
| <tag id="RAW"> |
| Needed for useful RAW image processing and DNG file support |
| </tag> |
| <tag id="HAL2"> |
| Entry is only used by camera device legacy HAL 2.x |
| </tag> |
| <tag id="FULL"> |
| Entry is required for full hardware level devices, and optional for other hardware levels |
| </tag> |
| <tag id="DEPTH"> |
| Entry is required for the depth capability. |
| </tag> |
| <tag id="REPROC"> |
| Entry is required for the YUV or PRIVATE reprocessing capability. |
| </tag> |
| <tag id="LOGICALCAMERA"> |
| Entry is required for logical multi-camera capability. |
| </tag> |
| <tag id="HEIC"> |
| Entry is required for devices with HEIC (High Efficiency Image Format) support. |
| </tag> |
| <tag id="FUTURE"> |
| Entry is under-specified and is not required for now. This is for book-keeping purpose, |
| do not implement or use it, it may be revised for future. |
| </tag> |
| </tags> |
| |
| <types> |
| <typedef name="pairFloatFloat"> |
| <language name="java">android.util.Pair<Float,Float></language> |
| </typedef> |
| <typedef name="pairDoubleDouble"> |
| <language name="java">android.util.Pair<Double,Double></language> |
| </typedef> |
| <typedef name="pairIntegerInteger"> |
| <language name="java">android.util.Pair<Integer,Integer></language> |
| </typedef> |
| <typedef name="rectangle"> |
| <language name="java">android.graphics.Rect</language> |
| </typedef> |
| <typedef name="size"> |
| <language name="java">android.util.Size</language> |
| </typedef> |
| <typedef name="string"> |
| <language name="java">String</language> |
| </typedef> |
| <typedef name="boolean"> |
| <language name="java">boolean</language> |
| </typedef> |
| <typedef name="imageFormat"> |
| <language name="java">int</language> |
| </typedef> |
| <typedef name="versionCode"> |
| <language name="java">int</language> |
| </typedef> |
| <typedef name="streamConfigurationMap"> |
| <language name="java">android.hardware.camera2.params.StreamConfigurationMap</language> |
| </typedef> |
| <typedef name="streamConfiguration"> |
| <language name="java">android.hardware.camera2.params.StreamConfiguration</language> |
| </typedef> |
| <typedef name="recommendedStreamConfiguration"> |
| <language |
| name="java">android.hardware.camera2.params.RecommendedStreamConfiguration</language> |
| </typedef> |
| <typedef name="streamConfigurationDuration"> |
| <language name="java">android.hardware.camera2.params.StreamConfigurationDuration</language> |
| </typedef> |
| <typedef name="face"> |
| <language name="java">android.hardware.camera2.params.Face</language> |
| </typedef> |
| <typedef name="meteringRectangle"> |
| <language name="java">android.hardware.camera2.params.MeteringRectangle</language> |
| </typedef> |
| <typedef name="rangeFloat"> |
| <language name="java">android.util.Range<Float></language> |
| </typedef> |
| <typedef name="rangeInt"> |
| <language name="java">android.util.Range<Integer></language> |
| </typedef> |
| <typedef name="rangeLong"> |
| <language name="java">android.util.Range<Long></language> |
| </typedef> |
| <typedef name="colorSpaceTransform"> |
| <language name="java">android.hardware.camera2.params.ColorSpaceTransform</language> |
| </typedef> |
| <typedef name="rggbChannelVector"> |
| <language name="java">android.hardware.camera2.params.RggbChannelVector</language> |
| </typedef> |
| <typedef name="blackLevelPattern"> |
| <language name="java">android.hardware.camera2.params.BlackLevelPattern</language> |
| </typedef> |
| <typedef name="enumList"> |
| <language name="java">int</language> |
| </typedef> |
| <typedef name="sizeF"> |
| <language name="java">android.util.SizeF</language> |
| </typedef> |
| <typedef name="point"> |
| <language name="java">android.graphics.Point</language> |
| </typedef> |
| <typedef name="pointF"> |
| <language name="java">android.graphics.PointF</language> |
| </typedef> |
| <typedef name="tonemapCurve"> |
| <language name="java">android.hardware.camera2.params.TonemapCurve</language> |
| </typedef> |
| <typedef name="lensShadingMap"> |
| <language name="java">android.hardware.camera2.params.LensShadingMap</language> |
| </typedef> |
| <typedef name="location"> |
| <language name="java">android.location.Location</language> |
| </typedef> |
| <typedef name="highSpeedVideoConfiguration"> |
| <language name="java">android.hardware.camera2.params.HighSpeedVideoConfiguration</language> |
| </typedef> |
| <typedef name="reprocessFormatsMap"> |
| <language name="java">android.hardware.camera2.params.ReprocessFormatsMap</language> |
| </typedef> |
| <typedef name="oisSample"> |
| <language name="java">android.hardware.camera2.params.OisSample</language> |
| </typedef> |
| <typedef name="mandatoryStreamCombination"> |
| <language name="java">android.hardware.camera2.params.MandatoryStreamCombination</language> |
| </typedef> |
| <typedef name="capability"> |
| <language name="java">android.hardware.camera2.params.Capability</language> |
| </typedef> |
| <typedef name="multiResolutionStreamConfigurationMap"> |
| <language name="java">android.hardware.camera2.params.MultiResolutionStreamConfigurationMap</language> |
| </typedef> |
| <typedef name="deviceStateSensorOrientationMap"> |
| <language name="java">android.hardware.camera2.params.DeviceStateSensorOrientationMap</language> |
| </typedef> |
| <typedef name="dynamicRangeProfiles"> |
| <language name="java">android.hardware.camera2.params.DynamicRangeProfiles</language> |
| </typedef> |
| <typedef name="colorSpaceProfiles"> |
| <language name="java">android.hardware.camera2.params.ColorSpaceProfiles</language> |
| </typedef> |
| <typedef name="lensIntrinsicsSample"> |
| <language name="java">android.hardware.camera2.params.LensIntrinsicsSample</language> |
| </typedef> |
| <typedef name="sharedSessionConfiguration"> |
| <language name="java">android.hardware.camera2.params.SharedSessionConfiguration</language> |
| </typedef> |
| </types> |
| |
| <namespace name="android"> |
| <section name="colorCorrection"> |
| <controls> |
| <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full"> |
| <enum> |
| <value>TRANSFORM_MATRIX |
| <notes>Use the android.colorCorrection.transform matrix |
| and android.colorCorrection.gains to do color conversion. |
| |
| All advanced white balance adjustments (not specified |
| by our white balance pipeline) must be disabled. |
| |
| If AWB is enabled with `android.control.awbMode != OFF`, then |
| TRANSFORM_MATRIX is ignored. The camera device will override |
| this value to either FAST or HIGH_QUALITY. |
| </notes> |
| </value> |
| <value>FAST |
| <notes>Color correction processing must not slow down |
| capture rate relative to sensor raw output. |
| |
| Advanced white balance adjustments above and beyond |
| the specified white balance pipeline may be applied. |
| |
| If AWB is enabled with `android.control.awbMode != OFF`, then |
| the camera device uses the last frame's AWB values |
| (or defaults if AWB has never been run). |
| </notes> |
| </value> |
| <value>HIGH_QUALITY |
| <notes>Color correction processing operates at improved |
| quality but the capture rate might be reduced (relative to sensor |
| raw output rate) |
| |
| Advanced white balance adjustments above and beyond |
| the specified white balance pipeline may be applied. |
| |
| If AWB is enabled with `android.control.awbMode != OFF`, then |
| the camera device uses the last frame's AWB values |
| (or defaults if AWB has never been run). |
| </notes> |
| </value> |
| <value hal_version="3.11" aconfig_flag="color_temperature">CCT |
| <notes>Use |
| android.colorCorrection.colorTemperature and |
| android.colorCorrection.colorTint to adjust the white balance based |
| on correlated color temperature. |
| |
| If AWB is enabled with `android.control.awbMode != OFF`, then |
| CCT is ignored. |
| </notes> |
| </value> |
| </enum> |
| |
| <description> |
| The mode control selects how the image data is converted from the |
| sensor's native color into linear sRGB color. |
| </description> |
| <range>Starting from API level 36, android.colorCorrection.availableModes |
| can be used to check the list of supported values. Prior to API level 36, |
| TRANSFORM_MATRIX, HIGH_QUALITY, and FAST are guaranteed to be available |
| as valid modes on devices that support this key.</range> |
| <details> |
| When auto-white balance (AWB) is enabled with android.control.awbMode, this |
| control is overridden by the AWB routine. When AWB is disabled, the |
| application controls how the color mapping is performed. |
| |
| We define the expected processing pipeline below. For consistency |
| across devices, this is always the case with TRANSFORM_MATRIX. |
| |
| When either FAST or HIGH_QUALITY is used, the camera device may |
| do additional processing but android.colorCorrection.gains and |
| android.colorCorrection.transform will still be provided by the |
| camera device (in the results) and be roughly correct. |
| |
| Switching to TRANSFORM_MATRIX and using the data provided from |
| FAST or HIGH_QUALITY will yield a picture with the same white point |
| as what was produced by the camera device in the earlier frame. |
| |
| The expected processing pipeline is as follows: |
| |
|  |
| |
| The white balance is encoded by two values, a 4-channel white-balance |
| gain vector (applied in the Bayer domain), and a 3x3 color transform |
| matrix (applied after demosaic). |
| |
| The 4-channel white-balance gains are defined as: |
| |
| android.colorCorrection.gains = [ R G_even G_odd B ] |
| |
| where `G_even` is the gain for green pixels on even rows of the |
| output, and `G_odd` is the gain for green pixels on the odd rows. |
| These may be identical for a given camera device implementation; if |
| the camera device does not support a separate gain for even/odd green |
| channels, it will use the `G_even` value, and write `G_odd` equal to |
| `G_even` in the output result metadata. |
| |
| The matrices for color transforms are defined as a 9-entry vector: |
| |
| android.colorCorrection.transform = [ I0 I1 I2 I3 I4 I5 I6 I7 I8 ] |
| |
| which define a transform from input sensor colors, `P_in = [ r g b ]`, |
| to output linear sRGB, `P_out = [ r' g' b' ]`, |
| |
| with colors as follows: |
| |
| r' = I0r + I1g + I2b |
| g' = I3r + I4g + I5b |
| b' = I6r + I7g + I8b |
| |
| Both the input and output value ranges must match. Overflow/underflow |
| values are clipped to fit within the range. |
| </details> |
| <hal_details> |
| HAL must support both FAST and HIGH_QUALITY if color correction control is available |
| on the camera device, but the underlying implementation can be the same for both modes. |
| That is, if the highest quality implementation on the camera device does not slow down |
| capture rate, then FAST and HIGH_QUALITY should generate the same output. |
| </hal_details> |
| </entry> |
| <entry name="transform" type="rational" visibility="public" |
| type_notes="3x3 rational matrix in row-major order" |
| container="array" typedef="colorSpaceTransform" hwlevel="full"> |
| <array> |
| <size>3</size> |
| <size>3</size> |
| </array> |
| <description>A color transform matrix to use to transform |
| from sensor RGB color space to output linear sRGB color space. |
| </description> |
| <units>Unitless scale factors</units> |
| <details>This matrix is either set by the camera device when the request |
| android.colorCorrection.mode is not TRANSFORM_MATRIX, or |
| directly by the application in the request when the |
| android.colorCorrection.mode is TRANSFORM_MATRIX. |
| |
| In the latter case, the camera device may round the matrix to account |
| for precision issues; the final rounded matrix should be reported back |
| in this matrix result metadata. The transform should keep the magnitude |
| of the output color values within `[0, 1.0]` (assuming input color |
| values is within the normalized range `[0, 1.0]`), or clipping may occur. |
| |
| The valid range of each matrix element varies on different devices, but |
| values within [-1.5, 3.0] are guaranteed not to be clipped. |
| </details> |
| </entry> |
| <entry name="gains" type="float" visibility="public" |
| type_notes="A 1D array of floats for 4 color channel gains" |
| container="array" typedef="rggbChannelVector" hwlevel="full"> |
| <array> |
| <size>4</size> |
| </array> |
| <description>Gains applying to Bayer raw color channels for |
| white-balance.</description> |
| <units>Unitless gain factors</units> |
| <details> |
| These per-channel gains are either set by the camera device |
| when the request android.colorCorrection.mode is not |
| TRANSFORM_MATRIX, or directly by the application in the |
| request when the android.colorCorrection.mode is |
| TRANSFORM_MATRIX. |
| |
| The gains in the result metadata are the gains actually |
| applied by the camera device to the current frame. |
| |
| The valid range of gains varies on different devices, but gains |
| between [1.0, 3.0] are guaranteed not to be clipped. Even if a given |
| device allows gains below 1.0, this is usually not recommended because |
| this can create color artifacts. |
| </details> |
| <hal_details> |
| The 4-channel white-balance gains are defined in |
| the order of `[R G_even G_odd B]`, where `G_even` is the gain |
| for green pixels on even rows of the output, and `G_odd` |
| is the gain for green pixels on the odd rows. |
| |
| If a HAL does not support a separate gain for even/odd green |
| channels, it must use the `G_even` value, and write |
| `G_odd` equal to `G_even` in the output result metadata. |
| </hal_details> |
| </entry> |
| <entry name="aberrationMode" type="byte" visibility="public" enum="true" hwlevel="legacy"> |
| <enum> |
| <value>OFF |
| <notes> |
| No aberration correction is applied. |
| </notes> |
| </value> |
| <value>FAST |
| <notes> |
| Aberration correction will not slow down capture rate |
| relative to sensor raw output. |
| </notes> |
| </value> |
| <value>HIGH_QUALITY |
| <notes> |
| Aberration correction operates at improved quality but the capture rate might be |
| reduced (relative to sensor raw output rate) |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Mode of operation for the chromatic aberration correction algorithm. |
| </description> |
| <range>android.colorCorrection.availableAberrationModes</range> |
| <details> |
| Chromatic (color) aberration is caused by the fact that different wavelengths of light |
| can not focus on the same point after exiting from the lens. This metadata defines |
| the high level control of chromatic aberration correction algorithm, which aims to |
| minimize the chromatic artifacts that may occur along the object boundaries in an |
| image. |
| |
| FAST/HIGH_QUALITY both mean that camera device determined aberration |
| correction will be applied. HIGH_QUALITY mode indicates that the camera device will |
| use the highest-quality aberration correction algorithms, even if it slows down |
| capture rate. FAST means the camera device will not slow down capture rate when |
| applying aberration correction. |
| |
| LEGACY devices will always be in FAST mode. |
| </details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.colorCorrection.mode" kind="controls"> |
| </clone> |
| <clone entry="android.colorCorrection.transform" kind="controls"> |
| </clone> |
| <clone entry="android.colorCorrection.gains" kind="controls"> |
| </clone> |
| <clone entry="android.colorCorrection.aberrationMode" kind="controls"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="availableAberrationModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="enumList" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of aberration correction modes for android.colorCorrection.aberrationMode that are |
| supported by this camera device. |
| </description> |
| <range>Any value listed in android.colorCorrection.aberrationMode</range> |
| <details> |
| This key lists the valid modes for android.colorCorrection.aberrationMode. If no |
| aberration correction modes are available for a device, this list will solely include |
| OFF mode. All camera devices will support either OFF or FAST mode. |
| |
| Camera devices that support the MANUAL_POST_PROCESSING capability will always list |
| OFF mode. This includes all FULL level devices. |
| |
| LEGACY devices will always only support FAST mode. |
| </details> |
| <hal_details> |
| HAL must support both FAST and HIGH_QUALITY if chromatic aberration control is available |
| on the camera device, but the underlying implementation can be the same for both modes. |
| That is, if the highest quality implementation on the camera device does not slow down |
| capture rate, then FAST and HIGH_QUALITY will generate the same output. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| </static> |
| <controls> |
| <entry name="colorTemperature" type="int32" visibility="public" optional="true" |
| aconfig_flag="color_temperature" hal_version="3.11"> |
| <description> |
| Specifies the color temperature for CCT mode in Kelvin |
| to adjust the white balance of the image. |
| </description> |
| <units>Kelvin</units> |
| <range>android.colorCorrection.colorTemperatureRange</range> |
| <details> |
| Sets the color temperature in Kelvin units for when |
| android.colorCorrection.mode is CCT to adjust the |
| white balance of the image. |
| |
| If CCT mode is enabled without a requested color temperature, |
| a default value will be set by the camera device. The default value can be |
| retrieved by checking the corresponding capture result. Color temperatures |
| requested outside the advertised android.colorCorrection.colorTemperatureRange |
| will be clamped. |
| </details> |
| </entry> |
| <entry name="colorTint" type="int32" visibility="public" optional="true" |
| aconfig_flag="color_temperature" hal_version="3.11"> |
| <description> |
| Specifies the color tint for CCT mode to adjust the white |
| balance of the image. |
| </description> |
| <units>D_uv defined as the distance from the Planckian locus on the CIE 1931 xy |
| chromaticity diagram, with the range ±50 mapping to ±0.01 D_uv</units> |
| <range>The supported range, -50 to +50, corresponds to a D_uv distance |
| of ±0.01 below and above the Planckian locus. Some camera devices may have |
| limitations to achieving the full ±0.01 D_uv range at some color temperatures |
| (e.g., below 1500K). In these cases, the applied D_uv value may be clamped and |
| the actual color tint will be reported in the android.colorCorrection.colorTint |
| result.</range> |
| <details> |
| Sets the color tint for when android.colorCorrection.mode |
| is CCT to adjust the white balance of the image. |
| |
| If CCT mode is enabled without a requested color tint, |
| a default value will be set by the camera device. The default value can be |
| retrieved by checking the corresponding capture result. Color tints requested |
| outside the supported range will be clamped to the nearest limit (-50 or +50). |
| </details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.colorCorrection.colorTemperature" kind="controls"> |
| </clone> |
| <clone entry="android.colorCorrection.colorTint" kind="controls"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="colorTemperatureRange" type="int32" visibility="public" |
| optional="true" container="array" typedef="rangeInt" |
| aconfig_flag="color_temperature" hal_version="3.11"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>The range of supported color temperature values for |
| android.colorCorrection.colorTemperature.</description> |
| <range> |
| The minimum supported range will be [2856K,6500K]. The maximum supported |
| range will be [1000K,40000K]. |
| </range> |
| <details> |
| This key lists the valid range of color temperature values for |
| android.colorCorrection.colorTemperature supported by this camera device. |
| |
| This key will be null on devices that do not support CCT mode for |
| android.colorCorrection.mode. |
| </details> |
| </entry> |
| <entry name="availableModes" type="byte" visibility="public" |
| optional="true" type_notes="list of enums" container="array" typedef="enumList" |
| aconfig_flag="color_temperature" hal_version="3.11"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of color correction modes for android.colorCorrection.mode that are |
| supported by this camera device. |
| </description> |
| <range>Any value listed in android.colorCorrection.mode</range> |
| <details> |
| This key lists the valid modes for android.colorCorrection.mode. If no |
| color correction modes are available for a device, this key will be null. |
| |
| Camera devices that have a FULL hardware level will always include at least |
| FAST, HIGH_QUALITY, and TRANSFORM_MATRIX modes. |
| </details> |
| </entry> |
| </static> |
| </section> |
| <section name="control"> |
| <controls> |
| <entry name="aeAntibandingMode" type="byte" visibility="public" |
| enum="true" hwlevel="legacy"> |
| <enum> |
| <value>OFF |
| <notes> |
| The camera device will not adjust exposure duration to |
| avoid banding problems. |
| </notes> |
| </value> |
| <value>50HZ |
| <notes> |
| The camera device will adjust exposure duration to |
| avoid banding problems with 50Hz illumination sources. |
| </notes> |
| </value> |
| <value>60HZ |
| <notes> |
| The camera device will adjust exposure duration to |
| avoid banding problems with 60Hz illumination |
| sources. |
| </notes> |
| </value> |
| <value>AUTO |
| <notes> |
| The camera device will automatically adapt its |
| antibanding routine to the current illumination |
| condition. This is the default mode if AUTO is |
| available on given camera device. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| The desired setting for the camera device's auto-exposure |
| algorithm's antibanding compensation. |
| </description> |
| <range> |
| android.control.aeAvailableAntibandingModes |
| </range> |
| <details> |
| Some kinds of lighting fixtures, such as some fluorescent |
| lights, flicker at the rate of the power supply frequency |
| (60Hz or 50Hz, depending on country). While this is |
| typically not noticeable to a person, it can be visible to |
| a camera device. If a camera sets its exposure time to the |
| wrong value, the flicker may become visible in the |
| viewfinder as flicker or in a final captured image, as a |
| set of variable-brightness bands across the image. |
| |
| Therefore, the auto-exposure routines of camera devices |
| include antibanding routines that ensure that the chosen |
| exposure value will not cause such banding. The choice of |
| exposure time depends on the rate of flicker, which the |
| camera device can detect automatically, or the expected |
| rate can be selected by the application using this |
| control. |
| |
| A given camera device may not support all of the possible |
| options for the antibanding mode. The |
| android.control.aeAvailableAntibandingModes key contains |
| the available modes for a given camera device. |
| |
| AUTO mode is the default if it is available on given |
| camera device. When AUTO mode is not available, the |
| default will be either 50HZ or 60HZ, and both 50HZ |
| and 60HZ will be available. |
| |
| If manual exposure control is enabled (by setting |
| android.control.aeMode or android.control.mode to OFF), |
| then this setting has no effect, and the application must |
| ensure it selects exposure times that do not cause banding |
| issues. The android.statistics.sceneFlicker key can assist |
| the application in this. |
| </details> |
| <hal_details> |
| For all capture request templates, this field must be set |
| to AUTO if AUTO mode is available. If AUTO is not available, |
| the default must be either 50HZ or 60HZ, and both 50HZ and |
| 60HZ must be available. |
| |
| If manual exposure control is enabled (by setting |
| android.control.aeMode or android.control.mode to OFF), |
| then the exposure values provided by the application must not be |
| adjusted for antibanding. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="aeExposureCompensation" type="int32" visibility="public" hwlevel="legacy"> |
| <description>Adjustment to auto-exposure (AE) target image |
| brightness.</description> |
| <units>Compensation steps</units> |
| <range>android.control.aeCompensationRange</range> |
| <details> |
| The adjustment is measured as a count of steps, with the |
| step size defined by android.control.aeCompensationStep and the |
| allowed range by android.control.aeCompensationRange. |
| |
| For example, if the exposure value (EV) step is 0.333, '6' |
| will mean an exposure compensation of +2 EV; -3 will mean an |
| exposure compensation of -1 EV. One EV represents a doubling |
| of image brightness. Note that this control will only be |
| effective if android.control.aeMode `!=` OFF. This control |
| will take effect even when android.control.aeLock `== true`. |
| |
| In the event of exposure compensation value being changed, camera device |
| may take several frames to reach the newly requested exposure target. |
| During that time, android.control.aeState field will be in the SEARCHING |
| state. Once the new exposure target is reached, android.control.aeState will |
| change from SEARCHING to either CONVERGED, LOCKED (if AE lock is enabled), or |
| FLASH_REQUIRED (if the scene is too dark for still capture). |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="aeLock" type="byte" visibility="public" enum="true" |
| typedef="boolean" hwlevel="legacy"> |
| <enum> |
| <value>OFF |
| <notes>Auto-exposure lock is disabled; the AE algorithm |
| is free to update its parameters.</notes></value> |
| <value>ON |
| <notes>Auto-exposure lock is enabled; the AE algorithm |
| must not update the exposure and sensitivity parameters |
| while the lock is active. |
| |
| android.control.aeExposureCompensation setting changes |
| will still take effect while auto-exposure is locked. |
| |
| Some rare LEGACY devices may not support |
| this, in which case the value will always be overridden to OFF. |
| </notes></value> |
| </enum> |
| <description>Whether auto-exposure (AE) is currently locked to its latest |
| calculated values.</description> |
| <details> |
| When set to `true` (ON), the AE algorithm is locked to its latest parameters, |
| and will not change exposure settings until the lock is set to `false` (OFF). |
| |
| Note that even when AE is locked, the flash may be fired if |
| the android.control.aeMode is ON_AUTO_FLASH / |
| ON_ALWAYS_FLASH / ON_AUTO_FLASH_REDEYE. |
| |
| When android.control.aeExposureCompensation is changed, even if the AE lock |
| is ON, the camera device will still adjust its exposure value. |
| |
| If AE precapture is triggered (see android.control.aePrecaptureTrigger) |
| when AE is already locked, the camera device will not change the exposure time |
| (android.sensor.exposureTime) and sensitivity (android.sensor.sensitivity) |
| parameters. The flash may be fired if the android.control.aeMode |
| is ON_AUTO_FLASH/ON_AUTO_FLASH_REDEYE and the scene is too dark. If the |
| android.control.aeMode is ON_ALWAYS_FLASH, the scene may become overexposed. |
| Similarly, AE precapture trigger CANCEL has no effect when AE is already locked. |
| |
| When an AE precapture sequence is triggered, AE unlock will not be able to unlock |
| the AE if AE is locked by the camera device internally during precapture metering |
| sequence In other words, submitting requests with AE unlock has no effect for an |
| ongoing precapture metering sequence. Otherwise, the precapture metering sequence |
| will never succeed in a sequence of preview requests where AE lock is always set |
| to `false`. |
| |
| Since the camera device has a pipeline of in-flight requests, the settings that |
| get locked do not necessarily correspond to the settings that were present in the |
| latest capture result received from the camera device, since additional captures |
| and AE updates may have occurred even before the result was sent out. If an |
| application is switching between automatic and manual control and wishes to eliminate |
| any flicker during the switch, the following procedure is recommended: |
| |
| 1. Starting in auto-AE mode: |
| 2. Lock AE |
| 3. Wait for the first result to be output that has the AE locked |
| 4. Copy exposure settings from that result into a request, set the request to manual AE |
| 5. Submit the capture request, proceed to run manual AE as desired. |
| |
| See android.control.aeState for AE lock related state transition details. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="aeMode" type="byte" visibility="public" enum="true" hwlevel="legacy"> |
| <enum> |
| <value>OFF |
| <notes> |
| The camera device's autoexposure routine is disabled. |
| |
| The application-selected android.sensor.exposureTime, |
| android.sensor.sensitivity and |
| android.sensor.frameDuration are used by the camera |
| device, along with android.flash.* fields, if there's |
| a flash unit for this camera device. |
| |
| Note that auto-white balance (AWB) and auto-focus (AF) |
| behavior is device dependent when AE is in OFF mode. |
| To have consistent behavior across different devices, |
| it is recommended to either set AWB and AF to OFF mode |
| or lock AWB and AF before setting AE to OFF. |
| See android.control.awbMode, android.control.afMode, |
| android.control.awbLock, and android.control.afTrigger |
| for more details. |
| |
| LEGACY devices do not support the OFF mode and will |
| override attempts to use this value to ON. |
| </notes> |
| </value> |
| <value>ON |
| <notes> |
| The camera device's autoexposure routine is active, |
| with no flash control. |
| |
| The application's values for |
| android.sensor.exposureTime, |
| android.sensor.sensitivity, and |
| android.sensor.frameDuration are ignored. The |
| application has control over the various |
| android.flash.* fields. |
| |
| If the device supports manual flash strength control, i.e., |
| if android.flash.singleStrengthMaxLevel and |
| android.flash.torchStrengthMaxLevel are greater than 1, then |
| the auto-exposure (AE) precapture metering sequence should be |
| triggered for the configured flash mode and strength to avoid |
| the image being incorrectly exposed at different |
| android.flash.strengthLevel. |
| </notes> |
| </value> |
| <value>ON_AUTO_FLASH |
| <notes> |
| Like ON, except that the camera device also controls |
| the camera's flash unit, firing it in low-light |
| conditions. |
| |
| The flash may be fired during a precapture sequence |
| (triggered by android.control.aePrecaptureTrigger) and |
| may be fired for captures for which the |
| android.control.captureIntent field is set to |
| STILL_CAPTURE |
| </notes> |
| </value> |
| <value>ON_ALWAYS_FLASH |
| <notes> |
| Like ON, except that the camera device also controls |
| the camera's flash unit, always firing it for still |
| captures. |
| |
| The flash may be fired during a precapture sequence |
| (triggered by android.control.aePrecaptureTrigger) and |
| will always be fired for captures for which the |
| android.control.captureIntent field is set to |
| STILL_CAPTURE |
| </notes> |
| </value> |
| <value>ON_AUTO_FLASH_REDEYE |
| <notes> |
| Like ON_AUTO_FLASH, but with automatic red eye |
| reduction. |
| |
| If deemed necessary by the camera device, a red eye |
| reduction flash will fire during the precapture |
| sequence. |
| </notes> |
| </value> |
| <value hal_version="3.3">ON_EXTERNAL_FLASH |
| <notes> |
| An external flash has been turned on. |
| |
| It informs the camera device that an external flash has been turned on, and that |
| metering (and continuous focus if active) should be quickly recalculated to account |
| for the external flash. Otherwise, this mode acts like ON. |
| |
| When the external flash is turned off, AE mode should be changed to one of the |
| other available AE modes. |
| |
| If the camera device supports AE external flash mode, android.control.aeState must |
| be FLASH_REQUIRED after the camera device finishes AE scan and it's too dark without |
| flash. |
| </notes> |
| </value> |
| <value optional="true" hal_version="3.10" |
| aconfig_flag="camera_ae_mode_low_light_boost">ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY |
| <notes> |
| Like 'ON' but applies additional brightness boost in low light scenes. |
| |
| When the scene lighting conditions are within the range defined by |
| android.control.lowLightBoostInfoLuminanceRange this mode will apply additional |
| brightness boost. |
| |
| This mode will automatically adjust the intensity of low light boost applied |
| according to the scene lighting conditions. A darker scene will receive more boost |
| while a brighter scene will receive less boost. |
| |
| This mode can ignore the set target frame rate to allow more light to be captured |
| which can result in choppier motion. The frame rate can extend to lower than the |
| android.control.aeAvailableTargetFpsRanges but will not go below 10 FPS. This mode |
| can also increase the sensor sensitivity gain which can result in increased luma |
| and chroma noise. The sensor sensitivity gain can extend to higher values beyond |
| android.sensor.info.sensitivityRange. This mode may also apply additional |
| processing to recover details in dark and bright areas of the image,and noise |
| reduction at high sensitivity gain settings to manage the trade-off between light |
| sensitivity and capture noise. |
| |
| This mode is restricted to two output surfaces. One output surface type can either |
| be SurfaceView or TextureView. Another output surface type can either be MediaCodec |
| or MediaRecorder. This mode cannot be used with a target FPS range higher than 30 |
| FPS. |
| |
| If the session configuration is not supported, the AE mode reported in the |
| CaptureResult will be 'ON' instead of 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY'. |
| |
| When this AE mode is enabled, the CaptureResult field |
| android.control.lowLightBoostState will indicate when low light boost is 'ACTIVE' |
| or 'INACTIVE'. By default android.control.lowLightBoostState will be 'INACTIVE'. |
| |
| The low light boost is 'ACTIVE' once the scene lighting condition is less than the |
| upper bound lux value defined by android.control.lowLightBoostInfoLuminanceRange. |
| This mode will be 'INACTIVE' once the scene lighting condition is greater than the |
| upper bound lux value defined by android.control.lowLightBoostInfoLuminanceRange. |
| </notes> |
| </value> |
| </enum> |
| <description>The desired mode for the camera device's |
| auto-exposure routine.</description> |
| <range>android.control.aeAvailableModes</range> |
| <details> |
| This control is only effective if android.control.mode is |
| AUTO. |
| |
| When set to any of the ON modes, the camera device's |
| auto-exposure routine is enabled, overriding the |
| application's selected exposure time, sensor sensitivity, |
| and frame duration (android.sensor.exposureTime, |
| android.sensor.sensitivity, and |
| android.sensor.frameDuration). If android.control.aePriorityMode is |
| enabled, the relevant priority CaptureRequest settings will not be overridden. |
| See android.control.aePriorityMode for more details. If one of the FLASH modes |
| is selected, the camera device's flash unit controls are |
| also overridden. |
| |
| The FLASH modes are only available if the camera device |
| has a flash unit (android.flash.info.available is `true`). |
| |
| If flash TORCH mode is desired, this field must be set to |
| ON or OFF, and android.flash.mode set to TORCH. |
| |
| When set to any of the ON modes, the values chosen by the |
| camera device auto-exposure routine for the overridden |
| fields for a given capture will be available in its |
| CaptureResult. |
| |
| When android.control.aeMode is AE_MODE_ON and if the device |
| supports manual flash strength control, i.e., |
| if android.flash.singleStrengthMaxLevel and |
| android.flash.torchStrengthMaxLevel are greater than 1, then |
| the auto-exposure (AE) precapture metering sequence should be |
| triggered to avoid the image being incorrectly exposed at |
| different android.flash.strengthLevel. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="aeRegions" type="int32" visibility="public" |
| optional="true" container="array" typedef="meteringRectangle"> |
| <array> |
| <size>5</size> |
| <size>area_count</size> |
| </array> |
| <description>List of metering areas to use for auto-exposure adjustment.</description> |
| <units>Pixel coordinates within android.sensor.info.activeArraySize or |
| android.sensor.info.preCorrectionActiveArraySize depending on |
| distortion correction capability and mode</units> |
| <range>Coordinates must be between `[(0,0), (width, height))` of |
| android.sensor.info.activeArraySize or android.sensor.info.preCorrectionActiveArraySize |
| depending on distortion correction capability and mode</range> |
| <details> |
| Not available if android.control.maxRegionsAe is 0. |
| Otherwise will always be present. |
| |
| The maximum number of regions supported by the device is determined by the value |
| of android.control.maxRegionsAe. |
| |
| For devices not supporting android.distortionCorrection.mode control, the coordinate |
| system always follows that of android.sensor.info.activeArraySize, with (0,0) being |
| the top-left pixel in the active pixel array, and |
| (android.sensor.info.activeArraySize.width - 1, |
| android.sensor.info.activeArraySize.height - 1) being the bottom-right pixel in the |
| active pixel array. |
| |
| For devices supporting android.distortionCorrection.mode control, the coordinate |
| system depends on the mode being set. |
| When the distortion correction mode is OFF, the coordinate system follows |
| android.sensor.info.preCorrectionActiveArraySize, with |
| `(0, 0)` being the top-left pixel of the pre-correction active array, and |
| (android.sensor.info.preCorrectionActiveArraySize.width - 1, |
| android.sensor.info.preCorrectionActiveArraySize.height - 1) being the bottom-right |
| pixel in the pre-correction active pixel array. |
| When the distortion correction mode is not OFF, the coordinate system follows |
| android.sensor.info.activeArraySize, with |
| `(0, 0)` being the top-left pixel of the active array, and |
| (android.sensor.info.activeArraySize.width - 1, |
| android.sensor.info.activeArraySize.height - 1) being the bottom-right pixel in the |
| active pixel array. |
| |
| The weight must be within `[0, 1000]`, and represents a weight |
| for every pixel in the area. This means that a large metering area |
| with the same weight as a smaller area will have more effect in |
| the metering result. Metering areas can partially overlap and the |
| camera device will add the weights in the overlap region. |
| |
| The weights are relative to weights of other exposure metering regions, so if only one |
| region is used, all non-zero weights will have the same effect. A region with 0 |
| weight is ignored. |
| |
| If all regions have 0 weight, then no specific metering area needs to be used by the |
| camera device. |
| |
| If the metering region is outside the used android.scaler.cropRegion returned in |
| capture result metadata, the camera device will ignore the sections outside the crop |
| region and output only the intersection rectangle as the metering region in the result |
| metadata. If the region is entirely outside the crop region, it will be ignored and |
| not reported in the result metadata. |
| |
| When setting the AE metering regions, the application must consider the additional |
| crop resulted from the aspect ratio differences between the preview stream and |
| android.scaler.cropRegion. For example, if the android.scaler.cropRegion is the full |
| active array size with 4:3 aspect ratio, and the preview stream is 16:9, |
| the boundary of AE regions will be [0, y_crop] and |
| [active_width, active_height - 2 * y_crop] rather than [0, 0] and |
| [active_width, active_height], where y_crop is the additional crop due to aspect ratio |
| mismatch. |
| |
| Starting from API level 30, the coordinate system of activeArraySize or |
| preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not |
| pre-zoom field of view. This means that the same aeRegions values at different |
| android.control.zoomRatio represent different parts of the scene. The aeRegions |
| coordinates are relative to the activeArray/preCorrectionActiveArray representing the |
| zoomed field of view. If android.control.zoomRatio is set to 1.0 (default), the same |
| aeRegions at different android.scaler.cropRegion still represent the same parts of the |
| scene as they do before. See android.control.zoomRatio for details. Whether to use |
| activeArraySize or preCorrectionActiveArraySize still depends on distortion correction |
| mode. |
| |
| For camera devices with the |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability or devices where |
| {@link CameraCharacteristics#getAvailableCaptureRequestKeys} |
| lists android.sensor.pixelMode, |
| android.sensor.info.activeArraySizeMaximumResolution / |
| android.sensor.info.preCorrectionActiveArraySizeMaximumResolution must be used as the |
| coordinate system for requests where android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <ndk_details> |
| The data representation is `int[5 * area_count]`. |
| Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`. |
| The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and |
| ymax. |
| </ndk_details> |
| <hal_details> |
| The HAL level representation of MeteringRectangle[] is a |
| int[5 * area_count]. |
| Every five elements represent a metering region of |
| (xmin, ymin, xmax, ymax, weight). |
| The rectangle is defined to be inclusive on xmin and ymin, but |
| exclusive on xmax and ymax. |
| HAL must always report metering regions in the coordinate system of pre-correction |
| active array. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="aeTargetFpsRange" type="int32" visibility="public" |
| container="array" typedef="rangeInt" hwlevel="legacy"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>Range over which the auto-exposure routine can |
| adjust the capture frame rate to maintain good |
| exposure.</description> |
| <units>Frames per second (FPS)</units> |
| <range>Any of the entries in android.control.aeAvailableTargetFpsRanges</range> |
| <details>Only constrains auto-exposure (AE) algorithm, not |
| manual control of android.sensor.exposureTime and |
| android.sensor.frameDuration. |
| |
| Note that the actual achievable max framerate also depends on the minimum frame |
| duration of the output streams. The max frame rate will be |
| `min(aeTargetFpsRange.maxFps, 1 / max(individual stream min durations))`. For example, |
| if the application sets this key to `{60, 60}`, but the maximum minFrameDuration among |
| all configured streams is 33ms, the maximum framerate won't be 60fps, but will be |
| 30fps. |
| |
| To start a CaptureSession with a target FPS range different from the |
| capture request template's default value, the application |
| is strongly recommended to call |
| {@link android.hardware.camera2.params.SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters} |
| with the target fps range before creating the capture session. The aeTargetFpsRange is |
| typically a session parameter. Specifying it at session creation time helps avoid |
| session reconfiguration delays in cases like 60fps or high speed recording. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="aePrecaptureTrigger" type="byte" visibility="public" |
| enum="true" hwlevel="limited"> |
| <enum> |
| <value>IDLE |
| <notes>The trigger is idle.</notes> |
| </value> |
| <value>START |
| <notes>The precapture metering sequence will be started |
| by the camera device. |
| |
| The exact effect of the precapture trigger depends on |
| the current AE mode and state.</notes> |
| </value> |
| <value>CANCEL |
| <notes>The camera device will cancel any currently active or completed |
| precapture metering sequence, the auto-exposure routine will return to its |
| initial state.</notes> |
| </value> |
| </enum> |
| <description>Whether the camera device will trigger a precapture |
| metering sequence when it processes this request.</description> |
| <details>This entry is normally set to IDLE, or is not |
| included at all in the request settings. When included and |
| set to START, the camera device will trigger the auto-exposure (AE) |
| precapture metering sequence. |
| |
| When set to CANCEL, the camera device will cancel any active |
| precapture metering trigger, and return to its initial AE state. |
| If a precapture metering sequence is already completed, and the camera |
| device has implicitly locked the AE for subsequent still capture, the |
| CANCEL trigger will unlock the AE and return to its initial AE state. |
| |
| The precapture sequence should be triggered before starting a |
| high-quality still capture for final metering decisions to |
| be made, and for firing pre-capture flash pulses to estimate |
| scene brightness and required final capture flash power, when |
| the flash is enabled. |
| |
| Flash is enabled during precapture sequence when: |
| |
| * AE mode is ON_ALWAYS_FLASH |
| * AE mode is ON_AUTO_FLASH and the scene is deemed too dark without flash, or |
| * AE mode is ON and flash mode is TORCH or SINGLE |
| |
| Normally, this entry should be set to START for only single request, and the |
| application should wait until the sequence completes before starting a new one. |
| |
| When a precapture metering sequence is finished, the camera device |
| may lock the auto-exposure routine internally to be able to accurately expose the |
| subsequent still capture image (`android.control.captureIntent == STILL_CAPTURE`). |
| For this case, the AE may not resume normal scan if no subsequent still capture is |
| submitted. To ensure that the AE routine restarts normal scan, the application should |
| submit a request with `android.control.aeLock == true`, followed by a request |
| with `android.control.aeLock == false`, if the application decides not to submit a |
| still capture request after the precapture sequence completes. Alternatively, for |
| API level 23 or newer devices, the CANCEL can be used to unlock the camera device |
| internally locked AE if the application doesn't submit a still capture request after |
| the AE precapture trigger. Note that, the CANCEL was added in API level 23, and must not |
| be used in devices that have earlier API levels. |
| |
| The exact effect of auto-exposure (AE) precapture trigger |
| depends on the current AE mode and state; see |
| android.control.aeState for AE precapture state transition |
| details. |
| |
| On LEGACY-level devices, the precapture trigger is not supported; |
| capturing a high-resolution JPEG image will automatically trigger a |
| precapture sequence before the high-resolution capture, including |
| potentially firing a pre-capture flash. |
| |
| Using the precapture trigger and the auto-focus trigger android.control.afTrigger |
| simultaneously is allowed. However, since these triggers often require cooperation between |
| the auto-focus and auto-exposure routines (for example, the may need to be enabled for a |
| focus sweep), the camera device may delay acting on a later trigger until the previous |
| trigger has been fully handled. This may lead to longer intervals between the trigger and |
| changes to android.control.aeState indicating the start of the precapture sequence, for |
| example. |
| |
| If both the precapture and the auto-focus trigger are activated on the same request, then |
| the camera device will complete them in the optimal order for that device. |
| </details> |
| <hal_details> |
| The HAL must support triggering the AE precapture trigger while an AF trigger is active |
| (and vice versa), or at the same time as the AF trigger. It is acceptable for the HAL to |
| treat these as two consecutive triggers, for example handling the AF trigger and then the |
| AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once, |
| to minimize the latency for converging both focus and exposure/flash usage. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="afMode" type="byte" visibility="public" enum="true" |
| hwlevel="legacy"> |
| <enum> |
| <value>OFF |
| <notes>The auto-focus routine does not control the lens; |
| android.lens.focusDistance is controlled by the |
| application.</notes></value> |
| <value>AUTO |
| <notes>Basic automatic focus mode. |
| |
| In this mode, the lens does not move unless |
| the autofocus trigger action is called. When that trigger |
| is activated, AF will transition to ACTIVE_SCAN, then to |
| the outcome of the scan (FOCUSED or NOT_FOCUSED). |
| |
| Always supported if lens is not fixed focus. |
| |
| Use android.lens.info.minimumFocusDistance to determine if lens |
| is fixed-focus. |
| |
| Triggering AF_CANCEL resets the lens position to default, |
| and sets the AF state to INACTIVE.</notes></value> |
| <value>MACRO |
| <notes>Close-up focusing mode. |
| |
| In this mode, the lens does not move unless the |
| autofocus trigger action is called. When that trigger is |
| activated, AF will transition to ACTIVE_SCAN, then to |
| the outcome of the scan (FOCUSED or NOT_FOCUSED). This |
| mode is optimized for focusing on objects very close to |
| the camera. |
| |
| When that trigger is activated, AF will transition to |
| ACTIVE_SCAN, then to the outcome of the scan (FOCUSED or |
| NOT_FOCUSED). Triggering cancel AF resets the lens |
| position to default, and sets the AF state to |
| INACTIVE.</notes></value> |
| <value>CONTINUOUS_VIDEO |
| <notes>In this mode, the AF algorithm modifies the lens |
| position continually to attempt to provide a |
| constantly-in-focus image stream. |
| |
| The focusing behavior should be suitable for good quality |
| video recording; typically this means slower focus |
| movement and no overshoots. When the AF trigger is not |
| involved, the AF algorithm should start in INACTIVE state, |
| and then transition into PASSIVE_SCAN and PASSIVE_FOCUSED |
| states as appropriate. When the AF trigger is activated, |
| the algorithm should immediately transition into |
| AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the |
| lens position until a cancel AF trigger is received. |
| |
| Once cancel is received, the algorithm should transition |
| back to INACTIVE and resume passive scan. Note that this |
| behavior is not identical to CONTINUOUS_PICTURE, since an |
| ongoing PASSIVE_SCAN must immediately be |
| canceled.</notes></value> |
| <value>CONTINUOUS_PICTURE |
| <notes>In this mode, the AF algorithm modifies the lens |
| position continually to attempt to provide a |
| constantly-in-focus image stream. |
| |
| The focusing behavior should be suitable for still image |
| capture; typically this means focusing as fast as |
| possible. When the AF trigger is not involved, the AF |
| algorithm should start in INACTIVE state, and then |
| transition into PASSIVE_SCAN and PASSIVE_FOCUSED states as |
| appropriate as it attempts to maintain focus. When the AF |
| trigger is activated, the algorithm should finish its |
| PASSIVE_SCAN if active, and then transition into |
| AF_FOCUSED or AF_NOT_FOCUSED as appropriate, and lock the |
| lens position until a cancel AF trigger is received. |
| |
| When the AF cancel trigger is activated, the algorithm |
| should transition back to INACTIVE and then act as if it |
| has just been started.</notes></value> |
| <value>EDOF |
| <notes>Extended depth of field (digital focus) mode. |
| |
| The camera device will produce images with an extended |
| depth of field automatically; no special focusing |
| operations need to be done before taking a picture. |
| |
| AF triggers are ignored, and the AF state will always be |
| INACTIVE.</notes></value> |
| </enum> |
| <description>Whether auto-focus (AF) is currently enabled, and what |
| mode it is set to.</description> |
| <range>android.control.afAvailableModes</range> |
| <details>Only effective if android.control.mode = AUTO and the lens is not fixed focus |
| (i.e. `android.lens.info.minimumFocusDistance > 0`). Also note that |
| when android.control.aeMode is OFF, the behavior of AF is device |
| dependent. It is recommended to lock AF by using android.control.afTrigger before |
| setting android.control.aeMode to OFF, or set AF mode to OFF when AE is OFF. |
| |
| If the lens is controlled by the camera device auto-focus algorithm, |
| the camera device will report the current AF status in android.control.afState |
| in result metadata.</details> |
| <hal_details> |
| When afMode is AUTO or MACRO, the lens must not move until an AF trigger is sent in a |
| request (android.control.afTrigger `==` START). After an AF trigger, the afState will end |
| up with either FOCUSED_LOCKED or NOT_FOCUSED_LOCKED state (see |
| android.control.afState for detailed state transitions), which indicates that the lens is |
| locked and will not move. If camera movement (e.g. tilting camera) causes the lens to move |
| after the lens is locked, the HAL must compensate this movement appropriately such that |
| the same focal plane remains in focus. |
| |
| When afMode is one of the continuous auto focus modes, the HAL is free to start a AF |
| scan whenever it's not locked. When the lens is locked after an AF trigger |
| (see android.control.afState for detailed state transitions), the HAL should maintain the |
| same lock behavior as above. |
| |
| When afMode is OFF, the application controls focus manually. The accuracy of the |
| focus distance control depends on the android.lens.info.focusDistanceCalibration. |
| However, the lens must not move regardless of the camera movement for any focus distance |
| manual control. |
| |
| To put this in concrete terms, if the camera has lens elements which may move based on |
| camera orientation or motion (e.g. due to gravity), then the HAL must drive the lens to |
| remain in a fixed position invariant to the camera's orientation or motion, for example, |
| by using accelerometer measurements in the lens control logic. This is a typical issue |
| that will arise on camera modules with open-loop VCMs. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="afRegions" type="int32" visibility="public" |
| optional="true" container="array" typedef="meteringRectangle"> |
| <array> |
| <size>5</size> |
| <size>area_count</size> |
| </array> |
| <description>List of metering areas to use for auto-focus.</description> |
| <units>Pixel coordinates within android.sensor.info.activeArraySize or |
| android.sensor.info.preCorrectionActiveArraySize depending on |
| distortion correction capability and mode</units> |
| <range>Coordinates must be between `[(0,0), (width, height))` of |
| android.sensor.info.activeArraySize or android.sensor.info.preCorrectionActiveArraySize |
| depending on distortion correction capability and mode</range> |
| <details> |
| Not available if android.control.maxRegionsAf is 0. |
| Otherwise will always be present. |
| |
| The maximum number of focus areas supported by the device is determined by the value |
| of android.control.maxRegionsAf. |
| |
| |
| For devices not supporting android.distortionCorrection.mode control, the coordinate |
| system always follows that of android.sensor.info.activeArraySize, with (0,0) being |
| the top-left pixel in the active pixel array, and |
| (android.sensor.info.activeArraySize.width - 1, |
| android.sensor.info.activeArraySize.height - 1) being the bottom-right pixel in the |
| active pixel array. |
| |
| For devices supporting android.distortionCorrection.mode control, the coordinate |
| system depends on the mode being set. |
| When the distortion correction mode is OFF, the coordinate system follows |
| android.sensor.info.preCorrectionActiveArraySize, with |
| `(0, 0)` being the top-left pixel of the pre-correction active array, and |
| (android.sensor.info.preCorrectionActiveArraySize.width - 1, |
| android.sensor.info.preCorrectionActiveArraySize.height - 1) being the bottom-right |
| pixel in the pre-correction active pixel array. |
| When the distortion correction mode is not OFF, the coordinate system follows |
| android.sensor.info.activeArraySize, with |
| `(0, 0)` being the top-left pixel of the active array, and |
| (android.sensor.info.activeArraySize.width - 1, |
| android.sensor.info.activeArraySize.height - 1) being the bottom-right pixel in the |
| active pixel array. |
| |
| The weight must be within `[0, 1000]`, and represents a weight |
| for every pixel in the area. This means that a large metering area |
| with the same weight as a smaller area will have more effect in |
| the metering result. Metering areas can partially overlap and the |
| camera device will add the weights in the overlap region. |
| |
| The weights are relative to weights of other metering regions, so if only one region |
| is used, all non-zero weights will have the same effect. A region with 0 weight is |
| ignored. |
| |
| If all regions have 0 weight, then no specific metering area needs to be used by the |
| camera device. The capture result will either be a zero weight region as well, or |
| the region selected by the camera device as the focus area of interest. |
| |
| If the metering region is outside the used android.scaler.cropRegion returned in |
| capture result metadata, the camera device will ignore the sections outside the crop |
| region and output only the intersection rectangle as the metering region in the result |
| metadata. If the region is entirely outside the crop region, it will be ignored and |
| not reported in the result metadata. |
| |
| When setting the AF metering regions, the application must consider the additional |
| crop resulted from the aspect ratio differences between the preview stream and |
| android.scaler.cropRegion. For example, if the android.scaler.cropRegion is the full |
| active array size with 4:3 aspect ratio, and the preview stream is 16:9, |
| the boundary of AF regions will be [0, y_crop] and |
| [active_width, active_height - 2 * y_crop] rather than [0, 0] and |
| [active_width, active_height], where y_crop is the additional crop due to aspect ratio |
| mismatch. |
| |
| Starting from API level 30, the coordinate system of activeArraySize or |
| preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not |
| pre-zoom field of view. This means that the same afRegions values at different |
| android.control.zoomRatio represent different parts of the scene. The afRegions |
| coordinates are relative to the activeArray/preCorrectionActiveArray representing the |
| zoomed field of view. If android.control.zoomRatio is set to 1.0 (default), the same |
| afRegions at different android.scaler.cropRegion still represent the same parts of the |
| scene as they do before. See android.control.zoomRatio for details. Whether to use |
| activeArraySize or preCorrectionActiveArraySize still depends on distortion correction |
| mode. |
| |
| For camera devices with the |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability or devices where |
| {@link CameraCharacteristics#getAvailableCaptureRequestKeys} |
| lists android.sensor.pixelMode, |
| android.sensor.info.activeArraySizeMaximumResolution / |
| android.sensor.info.preCorrectionActiveArraySizeMaximumResolution must be used as the |
| coordinate system for requests where android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <ndk_details> |
| The data representation is `int[5 * area_count]`. |
| Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`. |
| The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and |
| ymax. |
| </ndk_details> |
| <hal_details> |
| The HAL level representation of MeteringRectangle[] is a |
| int[5 * area_count]. |
| Every five elements represent a metering region of |
| (xmin, ymin, xmax, ymax, weight). |
| The rectangle is defined to be inclusive on xmin and ymin, but |
| exclusive on xmax and ymax. |
| HAL must always report metering regions in the coordinate system of pre-correction |
| active array. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="afTrigger" type="byte" visibility="public" enum="true" |
| hwlevel="legacy"> |
| <enum> |
| <value>IDLE |
| <notes>The trigger is idle.</notes> |
| </value> |
| <value>START |
| <notes>Autofocus will trigger now.</notes> |
| </value> |
| <value>CANCEL |
| <notes>Autofocus will return to its initial |
| state, and cancel any currently active trigger.</notes> |
| </value> |
| </enum> |
| <description> |
| Whether the camera device will trigger autofocus for this request. |
| </description> |
| <details>This entry is normally set to IDLE, or is not |
| included at all in the request settings. |
| |
| When included and set to START, the camera device will trigger the |
| autofocus algorithm. If autofocus is disabled, this trigger has no effect. |
| |
| When set to CANCEL, the camera device will cancel any active trigger, |
| and return to its initial AF state. |
| |
| Generally, applications should set this entry to START or CANCEL for only a |
| single capture, and then return it to IDLE (or not set at all). Specifying |
| START for multiple captures in a row means restarting the AF operation over |
| and over again. |
| |
| See android.control.afState for what the trigger means for each AF mode. |
| |
| Using the autofocus trigger and the precapture trigger android.control.aePrecaptureTrigger |
| simultaneously is allowed. However, since these triggers often require cooperation between |
| the auto-focus and auto-exposure routines (for example, the may need to be enabled for a |
| focus sweep), the camera device may delay acting on a later trigger until the previous |
| trigger has been fully handled. This may lead to longer intervals between the trigger and |
| changes to android.control.afState, for example. |
| </details> |
| <hal_details> |
| The HAL must support triggering the AF trigger while an AE precapture trigger is active |
| (and vice versa), or at the same time as the AE trigger. It is acceptable for the HAL to |
| treat these as two consecutive triggers, for example handling the AF trigger and then the |
| AE trigger. Or the HAL may choose to optimize the case with both triggers fired at once, |
| to minimize the latency for converging both focus and exposure/flash usage. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="awbLock" type="byte" visibility="public" enum="true" |
| typedef="boolean" hwlevel="legacy"> |
| <enum> |
| <value>OFF |
| <notes>Auto-white balance lock is disabled; the AWB |
| algorithm is free to update its parameters if in AUTO |
| mode.</notes></value> |
| <value>ON |
| <notes>Auto-white balance lock is enabled; the AWB |
| algorithm will not update its parameters while the lock |
| is active.</notes></value> |
| </enum> |
| <description>Whether auto-white balance (AWB) is currently locked to its |
| latest calculated values.</description> |
| <details> |
| When set to `true` (ON), the AWB algorithm is locked to its latest parameters, |
| and will not change color balance settings until the lock is set to `false` (OFF). |
| |
| Since the camera device has a pipeline of in-flight requests, the settings that |
| get locked do not necessarily correspond to the settings that were present in the |
| latest capture result received from the camera device, since additional captures |
| and AWB updates may have occurred even before the result was sent out. If an |
| application is switching between automatic and manual control and wishes to eliminate |
| any flicker during the switch, the following procedure is recommended: |
| |
| 1. Starting in auto-AWB mode: |
| 2. Lock AWB |
| 3. Wait for the first result to be output that has the AWB locked |
| 4. Copy AWB settings from that result into a request, set the request to manual AWB |
| 5. Submit the capture request, proceed to run manual AWB as desired. |
| |
| Note that AWB lock is only meaningful when |
| android.control.awbMode is in the AUTO mode; in other modes, |
| AWB is already fixed to a specific setting. |
| |
| Some LEGACY devices may not support ON; the value is then overridden to OFF. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="awbMode" type="byte" visibility="public" enum="true" |
| hwlevel="legacy"> |
| <enum> |
| <value>OFF |
| <notes> |
| The camera device's auto-white balance routine is disabled. |
| |
| The application-selected color transform matrix |
| (android.colorCorrection.transform) and gains |
| (android.colorCorrection.gains) are used by the camera |
| device for manual white balance control. |
| </notes> |
| </value> |
| <value>AUTO |
| <notes> |
| The camera device's auto-white balance routine is active. |
| |
| The application's values for android.colorCorrection.transform |
| and android.colorCorrection.gains are ignored. |
| For devices that support the MANUAL_POST_PROCESSING capability, the |
| values used by the camera device for the transform and gains |
| will be available in the capture result for this request. |
| </notes> |
| </value> |
| <value>INCANDESCENT |
| <notes> |
| The camera device's auto-white balance routine is disabled; |
| the camera device uses incandescent light as the assumed scene |
| illumination for white balance. |
| |
| While the exact white balance transforms are up to the |
| camera device, they will approximately match the CIE |
| standard illuminant A. |
| |
| The application's values for android.colorCorrection.transform |
| and android.colorCorrection.gains are ignored. |
| For devices that support the MANUAL_POST_PROCESSING capability, the |
| values used by the camera device for the transform and gains |
| will be available in the capture result for this request. |
| </notes> |
| </value> |
| <value>FLUORESCENT |
| <notes> |
| The camera device's auto-white balance routine is disabled; |
| the camera device uses fluorescent light as the assumed scene |
| illumination for white balance. |
| |
| While the exact white balance transforms are up to the |
| camera device, they will approximately match the CIE |
| standard illuminant F2. |
| |
| The application's values for android.colorCorrection.transform |
| and android.colorCorrection.gains are ignored. |
| For devices that support the MANUAL_POST_PROCESSING capability, the |
| values used by the camera device for the transform and gains |
| will be available in the capture result for this request. |
| </notes> |
| </value> |
| <value>WARM_FLUORESCENT |
| <notes> |
| The camera device's auto-white balance routine is disabled; |
| the camera device uses warm fluorescent light as the assumed scene |
| illumination for white balance. |
| |
| While the exact white balance transforms are up to the |
| camera device, they will approximately match the CIE |
| standard illuminant F4. |
| |
| The application's values for android.colorCorrection.transform |
| and android.colorCorrection.gains are ignored. |
| For devices that support the MANUAL_POST_PROCESSING capability, the |
| values used by the camera device for the transform and gains |
| will be available in the capture result for this request. |
| </notes> |
| </value> |
| <value>DAYLIGHT |
| <notes> |
| The camera device's auto-white balance routine is disabled; |
| the camera device uses daylight light as the assumed scene |
| illumination for white balance. |
| |
| While the exact white balance transforms are up to the |
| camera device, they will approximately match the CIE |
| standard illuminant D65. |
| |
| The application's values for android.colorCorrection.transform |
| and android.colorCorrection.gains are ignored. |
| For devices that support the MANUAL_POST_PROCESSING capability, the |
| values used by the camera device for the transform and gains |
| will be available in the capture result for this request. |
| </notes> |
| </value> |
| <value>CLOUDY_DAYLIGHT |
| <notes> |
| The camera device's auto-white balance routine is disabled; |
| the camera device uses cloudy daylight light as the assumed scene |
| illumination for white balance. |
| |
| The application's values for android.colorCorrection.transform |
| and android.colorCorrection.gains are ignored. |
| For devices that support the MANUAL_POST_PROCESSING capability, the |
| values used by the camera device for the transform and gains |
| will be available in the capture result for this request. |
| </notes> |
| </value> |
| <value>TWILIGHT |
| <notes> |
| The camera device's auto-white balance routine is disabled; |
| the camera device uses twilight light as the assumed scene |
| illumination for white balance. |
| |
| The application's values for android.colorCorrection.transform |
| and android.colorCorrection.gains are ignored. |
| For devices that support the MANUAL_POST_PROCESSING capability, the |
| values used by the camera device for the transform and gains |
| will be available in the capture result for this request. |
| </notes> |
| </value> |
| <value>SHADE |
| <notes> |
| The camera device's auto-white balance routine is disabled; |
| the camera device uses shade light as the assumed scene |
| illumination for white balance. |
| |
| The application's values for android.colorCorrection.transform |
| and android.colorCorrection.gains are ignored. |
| For devices that support the MANUAL_POST_PROCESSING capability, the |
| values used by the camera device for the transform and gains |
| will be available in the capture result for this request. |
| </notes> |
| </value> |
| </enum> |
| <description>Whether auto-white balance (AWB) is currently setting the color |
| transform fields, and what its illumination target |
| is.</description> |
| <range>android.control.awbAvailableModes</range> |
| <details> |
| This control is only effective if android.control.mode is AUTO. |
| |
| When set to the AUTO mode, the camera device's auto-white balance |
| routine is enabled, overriding the application's selected |
| android.colorCorrection.transform, android.colorCorrection.gains and |
| android.colorCorrection.mode. Note that when android.control.aeMode |
| is OFF, the behavior of AWB is device dependent. It is recommended to |
| also set AWB mode to OFF or lock AWB by using android.control.awbLock before |
| setting AE mode to OFF. |
| |
| When set to the OFF mode, the camera device's auto-white balance |
| routine is disabled. The application manually controls the white |
| balance by android.colorCorrection.transform, android.colorCorrection.gains |
| and android.colorCorrection.mode. |
| |
| When set to any other modes, the camera device's auto-white |
| balance routine is disabled. The camera device uses each |
| particular illumination target for white balance |
| adjustment. The application's values for |
| android.colorCorrection.transform, |
| android.colorCorrection.gains and |
| android.colorCorrection.mode are ignored. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="awbRegions" type="int32" visibility="public" |
| optional="true" container="array" typedef="meteringRectangle"> |
| <array> |
| <size>5</size> |
| <size>area_count</size> |
| </array> |
| <description>List of metering areas to use for auto-white-balance illuminant |
| estimation.</description> |
| <units>Pixel coordinates within android.sensor.info.activeArraySize or |
| android.sensor.info.preCorrectionActiveArraySize depending on |
| distortion correction capability and mode</units> |
| <range>Coordinates must be between `[(0,0), (width, height))` of |
| android.sensor.info.activeArraySize or android.sensor.info.preCorrectionActiveArraySize |
| depending on distortion correction capability and mode</range> |
| <details> |
| Not available if android.control.maxRegionsAwb is 0. |
| Otherwise will always be present. |
| |
| The maximum number of regions supported by the device is determined by the value |
| of android.control.maxRegionsAwb. |
| |
| For devices not supporting android.distortionCorrection.mode control, the coordinate |
| system always follows that of android.sensor.info.activeArraySize, with (0,0) being |
| the top-left pixel in the active pixel array, and |
| (android.sensor.info.activeArraySize.width - 1, |
| android.sensor.info.activeArraySize.height - 1) being the bottom-right pixel in the |
| active pixel array. |
| |
| For devices supporting android.distortionCorrection.mode control, the coordinate |
| system depends on the mode being set. |
| When the distortion correction mode is OFF, the coordinate system follows |
| android.sensor.info.preCorrectionActiveArraySize, with |
| `(0, 0)` being the top-left pixel of the pre-correction active array, and |
| (android.sensor.info.preCorrectionActiveArraySize.width - 1, |
| android.sensor.info.preCorrectionActiveArraySize.height - 1) being the bottom-right |
| pixel in the pre-correction active pixel array. |
| When the distortion correction mode is not OFF, the coordinate system follows |
| android.sensor.info.activeArraySize, with |
| `(0, 0)` being the top-left pixel of the active array, and |
| (android.sensor.info.activeArraySize.width - 1, |
| android.sensor.info.activeArraySize.height - 1) being the bottom-right pixel in the |
| active pixel array. |
| |
| The weight must range from 0 to 1000, and represents a weight |
| for every pixel in the area. This means that a large metering area |
| with the same weight as a smaller area will have more effect in |
| the metering result. Metering areas can partially overlap and the |
| camera device will add the weights in the overlap region. |
| |
| The weights are relative to weights of other white balance metering regions, so if |
| only one region is used, all non-zero weights will have the same effect. A region with |
| 0 weight is ignored. |
| |
| If all regions have 0 weight, then no specific metering area needs to be used by the |
| camera device. |
| |
| If the metering region is outside the used android.scaler.cropRegion returned in |
| capture result metadata, the camera device will ignore the sections outside the crop |
| region and output only the intersection rectangle as the metering region in the result |
| metadata. If the region is entirely outside the crop region, it will be ignored and |
| not reported in the result metadata. |
| |
| When setting the AWB metering regions, the application must consider the additional |
| crop resulted from the aspect ratio differences between the preview stream and |
| android.scaler.cropRegion. For example, if the android.scaler.cropRegion is the full |
| active array size with 4:3 aspect ratio, and the preview stream is 16:9, |
| the boundary of AWB regions will be [0, y_crop] and |
| [active_width, active_height - 2 * y_crop] rather than [0, 0] and |
| [active_width, active_height], where y_crop is the additional crop due to aspect ratio |
| mismatch. |
| |
| Starting from API level 30, the coordinate system of activeArraySize or |
| preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not |
| pre-zoom field of view. This means that the same awbRegions values at different |
| android.control.zoomRatio represent different parts of the scene. The awbRegions |
| coordinates are relative to the activeArray/preCorrectionActiveArray representing the |
| zoomed field of view. If android.control.zoomRatio is set to 1.0 (default), the same |
| awbRegions at different android.scaler.cropRegion still represent the same parts of |
| the scene as they do before. See android.control.zoomRatio for details. Whether to use |
| activeArraySize or preCorrectionActiveArraySize still depends on distortion correction |
| mode. |
| |
| For camera devices with the |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability or devices where |
| {@link CameraCharacteristics#getAvailableCaptureRequestKeys} |
| lists android.sensor.pixelMode, |
| android.sensor.info.activeArraySizeMaximumResolution / |
| android.sensor.info.preCorrectionActiveArraySizeMaximumResolution must be used as the |
| coordinate system for requests where android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <ndk_details> |
| The data representation is `int[5 * area_count]`. |
| Every five elements represent a metering region of `(xmin, ymin, xmax, ymax, weight)`. |
| The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and |
| ymax. |
| </ndk_details> |
| <hal_details> |
| The HAL level representation of MeteringRectangle[] is a |
| int[5 * area_count]. |
| Every five elements represent a metering region of |
| (xmin, ymin, xmax, ymax, weight). |
| The rectangle is defined to be inclusive on xmin and ymin, but |
| exclusive on xmax and ymax. |
| HAL must always report metering regions in the coordinate system of pre-correction |
| active array. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="captureIntent" type="byte" visibility="public" enum="true" |
| hwlevel="legacy"> |
| <enum> |
| <value>CUSTOM |
| <notes>The goal of this request doesn't fall into the other |
| categories. The camera device will default to preview-like |
| behavior.</notes></value> |
| <value>PREVIEW |
| <notes>This request is for a preview-like use case. |
| |
| The precapture trigger may be used to start off a metering |
| w/flash sequence. |
| </notes></value> |
| <value>STILL_CAPTURE |
| <notes>This request is for a still capture-type |
| use case. |
| |
| If the flash unit is under automatic control, it may fire as needed. |
| </notes></value> |
| <value>VIDEO_RECORD |
| <notes>This request is for a video recording |
| use case.</notes></value> |
| <value>VIDEO_SNAPSHOT |
| <notes>This request is for a video snapshot (still |
| image while recording video) use case. |
| |
| The camera device should take the highest-quality image |
| possible (given the other settings) without disrupting the |
| frame rate of video recording. </notes></value> |
| <value>ZERO_SHUTTER_LAG |
| <notes>This request is for a ZSL usecase; the |
| application will stream full-resolution images and |
| reprocess one or several later for a final |
| capture. |
| </notes></value> |
| <value>MANUAL |
| <notes>This request is for manual capture use case where |
| the applications want to directly control the capture parameters. |
| |
| For example, the application may wish to manually control |
| android.sensor.exposureTime, android.sensor.sensitivity, etc. |
| </notes></value> |
| <value hal_version="3.3">MOTION_TRACKING |
| <notes>This request is for a motion tracking use case, where |
| the application will use camera and inertial sensor data to |
| locate and track objects in the world. |
| |
| The camera device auto-exposure routine will limit the exposure time |
| of the camera to no more than 20 milliseconds, to minimize motion blur. |
| </notes></value> |
| </enum> |
| <description>Information to the camera device 3A (auto-exposure, |
| auto-focus, auto-white balance) routines about the purpose |
| of this capture, to help the camera device to decide optimal 3A |
| strategy.</description> |
| <details>This control (except for MANUAL) is only effective if |
| `android.control.mode != OFF` and any 3A routine is active. |
| |
| All intents are supported by all devices, except that: |
| |
| * ZERO_SHUTTER_LAG will be supported if android.request.availableCapabilities contains |
| PRIVATE_REPROCESSING or YUV_REPROCESSING. |
| * MANUAL will be supported if android.request.availableCapabilities contains |
| MANUAL_SENSOR. |
| * MOTION_TRACKING will be supported if android.request.availableCapabilities contains |
| MOTION_TRACKING. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="effectMode" type="byte" visibility="public" enum="true" |
| hwlevel="legacy"> |
| <enum> |
| <value>OFF |
| <notes> |
| No color effect will be applied. |
| </notes> |
| </value> |
| <value optional="true">MONO |
| <notes> |
| A "monocolor" effect where the image is mapped into |
| a single color. |
| |
| This will typically be grayscale. |
| </notes> |
| </value> |
| <value optional="true">NEGATIVE |
| <notes> |
| A "photo-negative" effect where the image's colors |
| are inverted. |
| </notes> |
| </value> |
| <value optional="true">SOLARIZE |
| <notes> |
| A "solarisation" effect (Sabattier effect) where the |
| image is wholly or partially reversed in |
| tone. |
| </notes> |
| </value> |
| <value optional="true">SEPIA |
| <notes> |
| A "sepia" effect where the image is mapped into warm |
| gray, red, and brown tones. |
| </notes> |
| </value> |
| <value optional="true">POSTERIZE |
| <notes> |
| A "posterization" effect where the image uses |
| discrete regions of tone rather than a continuous |
| gradient of tones. |
| </notes> |
| </value> |
| <value optional="true">WHITEBOARD |
| <notes> |
| A "whiteboard" effect where the image is typically displayed |
| as regions of white, with black or grey details. |
| </notes> |
| </value> |
| <value optional="true">BLACKBOARD |
| <notes> |
| A "blackboard" effect where the image is typically displayed |
| as regions of black, with white or grey details. |
| </notes> |
| </value> |
| <value optional="true">AQUA |
| <notes> |
| An "aqua" effect where a blue hue is added to the image. |
| </notes> |
| </value> |
| </enum> |
| <description>A special color effect to apply.</description> |
| <range>android.control.availableEffects</range> |
| <details> |
| When this mode is set, a color effect will be applied |
| to images produced by the camera device. The interpretation |
| and implementation of these color effects is left to the |
| implementor of the camera device, and should not be |
| depended on to be consistent (or present) across all |
| devices. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="mode" type="byte" visibility="public" enum="true" |
| hwlevel="legacy"> |
| <enum> |
| <value>OFF |
| <notes>Full application control of pipeline. |
| |
| All control by the device's metering and focusing (3A) |
| routines is disabled, and no other settings in |
| android.control.* have any effect, except that |
| android.control.captureIntent may be used by the camera |
| device to select post-processing values for processing |
| blocks that do not allow for manual control, or are not |
| exposed by the camera API. |
| |
| However, the camera device's 3A routines may continue to |
| collect statistics and update their internal state so that |
| when control is switched to AUTO mode, good control values |
| can be immediately applied. |
| </notes></value> |
| <value>AUTO |
| <notes>Use settings for each individual 3A routine. |
| |
| Manual control of capture parameters is disabled. All |
| controls in android.control.* besides sceneMode take |
| effect.</notes></value> |
| <value optional="true">USE_SCENE_MODE |
| <notes>Use a specific scene mode. |
| |
| Enabling this disables control.aeMode, control.awbMode and |
| control.afMode controls; the camera device will ignore |
| those settings while USE_SCENE_MODE is active (except for |
| FACE_PRIORITY scene mode). Other control entries are still active. |
| This setting can only be used if scene mode is supported (i.e. |
| android.control.availableSceneModes |
| contain some modes other than DISABLED). |
| |
| For extended scene modes such as BOKEH, please use USE_EXTENDED_SCENE_MODE instead. |
| </notes></value> |
| <value optional="true">OFF_KEEP_STATE |
| <notes>Same as OFF mode, except that this capture will not be |
| used by camera device background auto-exposure, auto-white balance and |
| auto-focus algorithms (3A) to update their statistics. |
| |
| Specifically, the 3A routines are locked to the last |
| values set from a request with AUTO, OFF, or |
| USE_SCENE_MODE, and any statistics or state updates |
| collected from manual captures with OFF_KEEP_STATE will be |
| discarded by the camera device. |
| </notes></value> |
| <value optional="true" hal_version="3.5">USE_EXTENDED_SCENE_MODE |
| <notes>Use a specific extended scene mode. |
| |
| When extended scene mode is on, the camera device may override certain control |
| parameters, such as targetFpsRange, AE, AWB, and AF modes, to achieve best power and |
| quality tradeoffs. Only the mandatory stream combinations of LIMITED hardware level |
| are guaranteed. |
| |
| This setting can only be used if extended scene mode is supported (i.e. |
| android.control.availableExtendedSceneModes |
| contains some modes other than DISABLED).</notes></value> |
| </enum> |
| <description>Overall mode of 3A (auto-exposure, auto-white-balance, auto-focus) control |
| routines.</description> |
| <range>android.control.availableModes</range> |
| <details> |
| This is a top-level 3A control switch. When set to OFF, all 3A control |
| by the camera device is disabled. The application must set the fields for |
| capture parameters itself. |
| |
| When set to AUTO, the individual algorithm controls in |
| android.control.* are in effect, such as android.control.afMode. |
| |
| When set to USE_SCENE_MODE or USE_EXTENDED_SCENE_MODE, the individual controls in |
| android.control.* are mostly disabled, and the camera device |
| implements one of the scene mode or extended scene mode settings (such as ACTION, |
| SUNSET, PARTY, or BOKEH) as it wishes. The camera device scene mode |
| 3A settings are provided by {@link |
| android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result |
| capture results}. |
| |
| When set to OFF_KEEP_STATE, it is similar to OFF mode, the only difference |
| is that this frame will not be used by camera device background 3A statistics |
| update, as if this frame is never captured. This mode can be used in the scenario |
| where the application doesn't want a 3A manual control capture to affect |
| the subsequent auto 3A capture results. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="sceneMode" type="byte" visibility="public" enum="true" |
| hwlevel="legacy"> |
| <enum> |
| <value id="0">DISABLED |
| <notes> |
| Indicates that no scene modes are set for a given capture request. |
| </notes> |
| </value> |
| <value>FACE_PRIORITY |
| <notes>If face detection support exists, use face |
| detection data for auto-focus, auto-white balance, and |
| auto-exposure routines. |
| |
| If face detection statistics are disabled |
| (i.e. android.statistics.faceDetectMode is set to OFF), |
| this should still operate correctly (but will not return |
| face detection statistics to the framework). |
| |
| Unlike the other scene modes, android.control.aeMode, |
| android.control.awbMode, and android.control.afMode |
| remain active when FACE_PRIORITY is set. |
| </notes> |
| </value> |
| <value optional="true">ACTION |
| <notes> |
| Optimized for photos of quickly moving objects. |
| |
| Similar to SPORTS. |
| </notes> |
| </value> |
| <value optional="true">PORTRAIT |
| <notes> |
| Optimized for still photos of people. |
| </notes> |
| </value> |
| <value optional="true">LANDSCAPE |
| <notes> |
| Optimized for photos of distant macroscopic objects. |
| </notes> |
| </value> |
| <value optional="true">NIGHT |
| <notes> |
| Optimized for low-light settings. |
| </notes> |
| </value> |
| <value optional="true">NIGHT_PORTRAIT |
| <notes> |
| Optimized for still photos of people in low-light |
| settings. |
| </notes> |
| </value> |
| <value optional="true">THEATRE |
| <notes> |
| Optimized for dim, indoor settings where flash must |
| remain off. |
| </notes> |
| </value> |
| <value optional="true">BEACH |
| <notes> |
| Optimized for bright, outdoor beach settings. |
| </notes> |
| </value> |
| <value optional="true">SNOW |
| <notes> |
| Optimized for bright, outdoor settings containing snow. |
| </notes> |
| </value> |
| <value optional="true">SUNSET |
| <notes> |
| Optimized for scenes of the setting sun. |
| </notes> |
| </value> |
| <value optional="true">STEADYPHOTO |
| <notes> |
| Optimized to avoid blurry photos due to small amounts of |
| device motion (for example: due to hand shake). |
| </notes> |
| </value> |
| <value optional="true">FIREWORKS |
| <notes> |
| Optimized for nighttime photos of fireworks. |
| </notes> |
| </value> |
| <value optional="true">SPORTS |
| <notes> |
| Optimized for photos of quickly moving people. |
| |
| Similar to ACTION. |
| </notes> |
| </value> |
| <value optional="true">PARTY |
| <notes> |
| Optimized for dim, indoor settings with multiple moving |
| people. |
| </notes> |
| </value> |
| <value optional="true">CANDLELIGHT |
| <notes> |
| Optimized for dim settings where the main light source |
| is a candle. |
| </notes> |
| </value> |
| <value optional="true">BARCODE |
| <notes> |
| Optimized for accurately capturing a photo of barcode |
| for use by camera applications that wish to read the |
| barcode value. |
| </notes> |
| </value> |
| <value deprecated="true" optional="true" visibility="java_public">HIGH_SPEED_VIDEO |
| <notes> |
| This is deprecated, please use {@link |
| android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession} |
| and {@link |
| android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList} |
| for high speed video recording. |
| |
| Optimized for high speed video recording (frame rate >=60fps) use case. |
| |
| The supported high speed video sizes and fps ranges are specified in |
| android.control.availableHighSpeedVideoConfigurations. To get desired |
| output frame rates, the application is only allowed to select video size |
| and fps range combinations listed in this static metadata. The fps range |
| can be control via android.control.aeTargetFpsRange. |
| |
| In this mode, the camera device will override aeMode, awbMode, and afMode to |
| ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing block mode |
| controls will be overridden to be FAST. Therefore, no manual control of capture |
| and post-processing parameters is possible. All other controls operate the |
| same as when android.control.mode == AUTO. This means that all other |
| android.control.* fields continue to work, such as |
| |
| * android.control.aeTargetFpsRange |
| * android.control.aeExposureCompensation |
| * android.control.aeLock |
| * android.control.awbLock |
| * android.control.effectMode |
| * android.control.aeRegions |
| * android.control.afRegions |
| * android.control.awbRegions |
| * android.control.afTrigger |
| * android.control.aePrecaptureTrigger |
| * android.control.zoomRatio |
| |
| Outside of android.control.*, the following controls will work: |
| |
| * android.flash.mode (automatic flash for still capture will not work since aeMode is ON) |
| * android.lens.opticalStabilizationMode (if it is supported) |
| * android.scaler.cropRegion |
| * android.statistics.faceDetectMode |
| |
| For high speed recording use case, the actual maximum supported frame rate may |
| be lower than what camera can output, depending on the destination Surfaces for |
| the image data. For example, if the destination surface is from video encoder, |
| the application need check if the video encoder is capable of supporting the |
| high frame rate for a given video size, or it will end up with lower recording |
| frame rate. If the destination surface is from preview window, the preview frame |
| rate will be bounded by the screen refresh rate. |
| |
| The camera device will only support up to 2 output high speed streams |
| (processed non-stalling format defined in android.request.maxNumOutputStreams) |
| in this mode. This control will be effective only if all of below conditions are true: |
| |
| * The application created no more than maxNumHighSpeedStreams processed non-stalling |
| format output streams, where maxNumHighSpeedStreams is calculated as |
| min(2, android.request.maxNumOutputStreams[Processed (but not-stalling)]). |
| * The stream sizes are selected from the sizes reported by |
| android.control.availableHighSpeedVideoConfigurations. |
| * No processed non-stalling or raw streams are configured. |
| |
| When above conditions are NOT satisfied, the controls of this mode and |
| android.control.aeTargetFpsRange will be ignored by the camera device, |
| the camera device will fall back to android.control.mode `==` AUTO, |
| and the returned capture result metadata will give the fps range chosen |
| by the camera device. |
| |
| Switching into or out of this mode may trigger some camera ISP/sensor |
| reconfigurations, which may introduce extra latency. It is recommended that |
| the application avoids unnecessary scene mode switch as much as possible. |
| </notes> |
| </value> |
| <value optional="true">HDR |
| <notes> |
| Turn on a device-specific high dynamic range (HDR) mode. |
| |
| In this scene mode, the camera device captures images |
| that keep a larger range of scene illumination levels |
| visible in the final image. For example, when taking a |
| picture of a object in front of a bright window, both |
| the object and the scene through the window may be |
| visible when using HDR mode, while in normal AUTO mode, |
| one or the other may be poorly exposed. As a tradeoff, |
| HDR mode generally takes much longer to capture a single |
| image, has no user control, and may have other artifacts |
| depending on the HDR method used. |
| |
| Therefore, HDR captures operate at a much slower rate |
| than regular captures. |
| |
| In this mode, on LIMITED or FULL devices, when a request |
| is made with a android.control.captureIntent of |
| STILL_CAPTURE, the camera device will capture an image |
| using a high dynamic range capture technique. On LEGACY |
| devices, captures that target a JPEG-format output will |
| be captured with HDR, and the capture intent is not |
| relevant. |
| |
| The HDR capture may involve the device capturing a burst |
| of images internally and combining them into one, or it |
| may involve the device using specialized high dynamic |
| range capture hardware. In all cases, a single image is |
| produced in response to a capture request submitted |
| while in HDR mode. |
| |
| Since substantial post-processing is generally needed to |
| produce an HDR image, only YUV, PRIVATE, and JPEG |
| outputs are supported for LIMITED/FULL device HDR |
| captures, and only JPEG outputs are supported for LEGACY |
| HDR captures. Using a RAW output for HDR capture is not |
| supported. |
| |
| Some devices may also support always-on HDR, which |
| applies HDR processing at full frame rate. For these |
| devices, intents other than STILL_CAPTURE will also |
| produce an HDR output with no frame rate impact compared |
| to normal operation, though the quality may be lower |
| than for STILL_CAPTURE intents. |
| |
| If SCENE_MODE_HDR is used with unsupported output types |
| or capture intents, the images captured will be as if |
| the SCENE_MODE was not enabled at all. |
| </notes> |
| </value> |
| <value optional="true" visibility="hidden">FACE_PRIORITY_LOW_LIGHT |
| <notes>Same as FACE_PRIORITY scene mode, except that the camera |
| device will choose higher sensitivity values (android.sensor.sensitivity) |
| under low light conditions. |
| |
| The camera device may be tuned to expose the images in a reduced |
| sensitivity range to produce the best quality images. For example, |
| if the android.sensor.info.sensitivityRange gives range of [100, 1600], |
| the camera device auto-exposure routine tuning process may limit the actual |
| exposure sensitivity range to [100, 1200] to ensure that the noise level isn't |
| excessive in order to preserve the image quality. Under this situation, the image under |
| low light may be under-exposed when the sensor max exposure time (bounded by the |
| android.control.aeTargetFpsRange when android.control.aeMode is one of the |
| ON_* modes) and effective max sensitivity are reached. This scene mode allows the |
| camera device auto-exposure routine to increase the sensitivity up to the max |
| sensitivity specified by android.sensor.info.sensitivityRange when the scene is too |
| dark and the max exposure time is reached. The captured images may be noisier |
| compared with the images captured in normal FACE_PRIORITY mode; therefore, it is |
| recommended that the application only use this scene mode when it is capable of |
| reducing the noise level of the captured images. |
| |
| Unlike the other scene modes, android.control.aeMode, |
| android.control.awbMode, and android.control.afMode |
| remain active when FACE_PRIORITY_LOW_LIGHT is set. |
| </notes> |
| </value> |
| <value optional="true" visibility="hidden" id="100">DEVICE_CUSTOM_START |
| <notes> |
| Scene mode values within the range of |
| `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific |
| customized scene modes. |
| </notes> |
| </value> |
| <value optional="true" visibility="hidden" id="127">DEVICE_CUSTOM_END |
| <notes> |
| Scene mode values within the range of |
| `[DEVICE_CUSTOM_START, DEVICE_CUSTOM_END]` are reserved for device specific |
| customized scene modes. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Control for which scene mode is currently active. |
| </description> |
| <range>android.control.availableSceneModes</range> |
| <details> |
| Scene modes are custom camera modes optimized for a certain set of conditions and |
| capture settings. |
| |
| This is the mode that that is active when |
| `android.control.mode == USE_SCENE_MODE`. Aside from FACE_PRIORITY, these modes will |
| disable android.control.aeMode, android.control.awbMode, and android.control.afMode |
| while in use. |
| |
| The interpretation and implementation of these scene modes is left |
| to the implementor of the camera device. Their behavior will not be |
| consistent across all devices, and any given device may only implement |
| a subset of these modes. |
| </details> |
| <hal_details> |
| HAL implementations that include scene modes are expected to provide |
| the per-scene settings to use for android.control.aeMode, |
| android.control.awbMode, and android.control.afMode in |
| android.control.sceneModeOverrides. |
| |
| For HIGH_SPEED_VIDEO mode, if it is included in android.control.availableSceneModes, the |
| HAL must list supported video size and fps range in |
| android.control.availableHighSpeedVideoConfigurations. For a given size, e.g. 1280x720, |
| if the HAL has two different sensor configurations for normal streaming mode and high |
| speed streaming, when this scene mode is set/reset in a sequence of capture requests, the |
| HAL may have to switch between different sensor modes. This mode is deprecated in legacy |
| HAL3.3, to support high speed video recording, please implement |
| android.control.availableHighSpeedVideoConfigurations and CONSTRAINED_HIGH_SPEED_VIDEO |
| capability defined in android.request.availableCapabilities. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="videoStabilizationMode" type="byte" visibility="public" |
| enum="true" hwlevel="legacy"> |
| <enum> |
| <value>OFF |
| <notes> |
| Video stabilization is disabled. |
| </notes></value> |
| <value>ON |
| <notes> |
| Video stabilization is enabled. |
| </notes></value> |
| <value optional="true" hal_version="3.8">PREVIEW_STABILIZATION |
| <notes> |
| Preview stabilization, where the preview in addition to all other non-RAW streams are |
| stabilized with the same quality of stabilization, is enabled. This mode aims to give |
| clients a 'what you see is what you get' effect. In this mode, the FoV reduction will |
| be a maximum of 20 % both horizontally and vertically |
| (10% from left, right, top, bottom) for the given zoom ratio / crop region. |
| The resultant FoV will also be the same across all processed streams |
| (that have the same aspect ratio). |
| </notes></value> |
| </enum> |
| <description>Whether video stabilization is |
| active.</description> |
| <details> |
| Video stabilization automatically warps images from |
| the camera in order to stabilize motion between consecutive frames. |
| |
| If enabled, video stabilization can modify the |
| android.scaler.cropRegion to keep the video stream stabilized. |
| |
| Switching between different video stabilization modes may take several |
| frames to initialize, the camera device will report the current mode |
| in capture result metadata. For example, When "ON" mode is requested, |
| the video stabilization modes in the first several capture results may |
| still be "OFF", and it will become "ON" when the initialization is |
| done. |
| |
| In addition, not all recording sizes or frame rates may be supported for |
| stabilization by a device that reports stabilization support. It is guaranteed |
| that an output targeting a MediaRecorder or MediaCodec will be stabilized if |
| the recording resolution is less than or equal to 1920 x 1080 (width less than |
| or equal to 1920, height less than or equal to 1080), and the recording |
| frame rate is less than or equal to 30fps. At other sizes, the CaptureResult |
| android.control.videoStabilizationMode field will return |
| OFF if the recording output is not stabilized, or if there are no output |
| Surface types that can be stabilized. |
| |
| The application is strongly recommended to call |
| {@link android.hardware.camera2.params.SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters} |
| with the desired video stabilization mode before creating the capture session. |
| Video stabilization mode is a session parameter on many devices. Specifying |
| it at session creation time helps avoid reconfiguration delay caused by difference |
| between the default value and the first CaptureRequest. |
| |
| If a camera device supports both this mode and OIS |
| (android.lens.opticalStabilizationMode), turning both modes on may |
| produce undesirable interaction, so it is recommended not to enable |
| both at the same time. |
| |
| If video stabilization is set to "PREVIEW_STABILIZATION", |
| android.lens.opticalStabilizationMode is overridden. The camera sub-system may choose |
| to turn on hardware based image stabilization in addition to software based stabilization |
| if it deems that appropriate. |
| This key may be a part of the available session keys, which camera clients may |
| query via |
| {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys|ACameraManager_getCameraCharacteristics}. |
| If this is the case, changing this key over the life-time of a capture session may |
| cause delays / glitches. |
| |
| </details> |
| <hal_details> |
| When this key is set to "PREVIEW_STABILIZATION", |
| for non-stalling buffers returned without errors, the time interval between notify readout |
| timestamp and when buffers are returned to the camera framework, must be no more than 1 |
| extra frame interval, relative to the case where this key is set to "OFF". |
| |
| This is in order for look-ahead time period to be short enough |
| for preview to match video recording for real-time usage. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| </controls> |
| <static> |
| <entry name="aeAvailableAntibandingModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="enumList" |
| hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of auto-exposure antibanding modes for android.control.aeAntibandingMode that are |
| supported by this camera device. |
| </description> |
| <range>Any value listed in android.control.aeAntibandingMode</range> |
| <details> |
| Not all of the auto-exposure anti-banding modes may be |
| supported by a given camera device. This field lists the |
| valid anti-banding modes that the application may request |
| for this camera device with the |
| android.control.aeAntibandingMode control. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="aeAvailableModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="enumList" |
| hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of auto-exposure modes for android.control.aeMode that are supported by this camera |
| device. |
| </description> |
| <range>Any value listed in android.control.aeMode</range> |
| <details> |
| Not all the auto-exposure modes may be supported by a |
| given camera device, especially if no flash unit is |
| available. This entry lists the valid modes for |
| android.control.aeMode for this camera device. |
| |
| All camera devices support ON, and all camera devices with flash |
| units support ON_AUTO_FLASH and ON_ALWAYS_FLASH. |
| |
| FULL mode camera devices always support OFF mode, |
| which enables application control of camera exposure time, |
| sensitivity, and frame duration. |
| |
| LEGACY mode camera devices never support OFF mode. |
| LIMITED mode devices support OFF if they support the MANUAL_SENSOR |
| capability. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="aeAvailableTargetFpsRanges" type="int32" visibility="public" |
| type_notes="list of pairs of frame rates" |
| container="array" typedef="rangeInt" |
| hwlevel="legacy"> |
| <array> |
| <size>2</size> |
| <size>n</size> |
| </array> |
| <description>List of frame rate ranges for android.control.aeTargetFpsRange supported by |
| this camera device.</description> |
| <units>Frames per second (FPS)</units> |
| <details> |
| For devices at the LEGACY level or above: |
| |
| * For constant-framerate recording, for each normal |
| {@link android.media.CamcorderProfile CamcorderProfile}, that is, a |
| {@link android.media.CamcorderProfile CamcorderProfile} that has |
| {@link android.media.CamcorderProfile#quality quality} in |
| the range [{@link android.media.CamcorderProfile#QUALITY_LOW QUALITY_LOW}, |
| {@link android.media.CamcorderProfile#QUALITY_2160P QUALITY_2160P}], if the profile is |
| supported by the device and has |
| {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x`, this list will |
| always include (`x`,`x`). |
| |
| * Also, a camera device must either not support any |
| {@link android.media.CamcorderProfile CamcorderProfile}, |
| or support at least one |
| normal {@link android.media.CamcorderProfile CamcorderProfile} that has |
| {@link android.media.CamcorderProfile#videoFrameRate videoFrameRate} `x` >= 24. |
| |
| For devices at the LIMITED level or above: |
| |
| * For devices that advertise NIR color filter arrangement in |
| android.sensor.info.colorFilterArrangement, this list will always include |
| (`max`, `max`) where `max` = the maximum output frame rate of the maximum YUV_420_888 |
| output size. |
| * For devices advertising any color filter arrangement other than NIR, or devices not |
| advertising color filter arrangement, this list will always include (`min`, `max`) and |
| (`max`, `max`) where `min` <= 15 and `max` = the maximum output frame rate of the |
| maximum YUV_420_888 output size. |
| |
| </details> |
| <hal_details> |
| HAL must make sure the lower bound and upper bound of each supported targetFpsRange can |
| be reached. For example, if HAL supports an aeTargetFpsRange of (15, 30), when set by the |
| application, the camera must be able to reach 15fps in sufficient dark scenes. This way |
| the application knows the exact range of frame rate it can expect. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="aeCompensationRange" type="int32" visibility="public" |
| container="array" typedef="rangeInt" |
| hwlevel="legacy"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>Maximum and minimum exposure compensation values for |
| android.control.aeExposureCompensation, in counts of android.control.aeCompensationStep, |
| that are supported by this camera device.</description> |
| <range> |
| Range [0,0] indicates that exposure compensation is not supported. |
| |
| For LIMITED and FULL devices, range must follow below requirements if exposure |
| compensation is supported (`range != [0, 0]`): |
| |
| `Min.exposure compensation * android.control.aeCompensationStep <= -2 EV` |
| |
| `Max.exposure compensation * android.control.aeCompensationStep >= 2 EV` |
| |
| LEGACY devices may support a smaller range than this. |
| </range> |
| <tag id="BC" /> |
| </entry> |
| <entry name="aeCompensationStep" type="rational" visibility="public" |
| hwlevel="legacy"> |
| <description>Smallest step by which the exposure compensation |
| can be changed.</description> |
| <units>Exposure Value (EV)</units> |
| <details> |
| This is the unit for android.control.aeExposureCompensation. For example, if this key has |
| a value of `1/2`, then a setting of `-2` for android.control.aeExposureCompensation means |
| that the target EV offset for the auto-exposure routine is -1 EV. |
| |
| One unit of EV compensation changes the brightness of the captured image by a factor |
| of two. +1 EV doubles the image brightness, while -1 EV halves the image brightness. |
| </details> |
| <hal_details> |
| This must be less than or equal to 1/2. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="afAvailableModes" type="byte" visibility="public" |
| type_notes="List of enums" container="array" typedef="enumList" |
| hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of auto-focus (AF) modes for android.control.afMode that are |
| supported by this camera device. |
| </description> |
| <range>Any value listed in android.control.afMode</range> |
| <details> |
| Not all the auto-focus modes may be supported by a |
| given camera device. This entry lists the valid modes for |
| android.control.afMode for this camera device. |
| |
| All LIMITED and FULL mode camera devices will support OFF mode, and all |
| camera devices with adjustable focuser units |
| (`android.lens.info.minimumFocusDistance > 0`) will support AUTO mode. |
| |
| LEGACY devices will support OFF mode only if they support |
| focusing to infinity (by also setting android.lens.focusDistance to |
| `0.0f`). |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="availableEffects" type="byte" visibility="public" |
| type_notes="List of enums (android.control.effectMode)." container="array" |
| typedef="enumList" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of color effects for android.control.effectMode that are supported by this camera |
| device. |
| </description> |
| <range>Any value listed in android.control.effectMode</range> |
| <details> |
| This list contains the color effect modes that can be applied to |
| images produced by the camera device. |
| Implementations are not expected to be consistent across all devices. |
| If no color effect modes are available for a device, this will only list |
| OFF. |
| |
| A color effect will only be applied if |
| android.control.mode != OFF. OFF is always included in this list. |
| |
| This control has no effect on the operation of other control routines such |
| as auto-exposure, white balance, or focus. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="availableSceneModes" type="byte" visibility="public" |
| type_notes="List of enums (android.control.sceneMode)." |
| container="array" typedef="enumList" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of scene modes for android.control.sceneMode that are supported by this camera |
| device. |
| </description> |
| <range>Any value listed in android.control.sceneMode</range> |
| <details> |
| This list contains scene modes that can be set for the camera device. |
| Only scene modes that have been fully implemented for the |
| camera device may be included here. Implementations are not expected |
| to be consistent across all devices. |
| |
| If no scene modes are supported by the camera device, this |
| will be set to DISABLED. Otherwise DISABLED will not be listed. |
| |
| FACE_PRIORITY is always listed if face detection is |
| supported (i.e.`android.statistics.info.maxFaceCount > |
| 0`). |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="availableVideoStabilizationModes" type="byte" |
| visibility="public" type_notes="List of enums." container="array" |
| typedef="enumList" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of video stabilization modes for android.control.videoStabilizationMode |
| that are supported by this camera device. |
| </description> |
| <range>Any value listed in android.control.videoStabilizationMode</range> |
| <details> |
| OFF will always be listed. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="awbAvailableModes" type="byte" visibility="public" |
| type_notes="List of enums" |
| container="array" typedef="enumList" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of auto-white-balance modes for android.control.awbMode that are supported by this |
| camera device. |
| </description> |
| <range>Any value listed in android.control.awbMode</range> |
| <details> |
| Not all the auto-white-balance modes may be supported by a |
| given camera device. This entry lists the valid modes for |
| android.control.awbMode for this camera device. |
| |
| All camera devices will support ON mode. |
| |
| Camera devices that support the MANUAL_POST_PROCESSING capability will always support OFF |
| mode, which enables application control of white balance, by using |
| android.colorCorrection.transform and android.colorCorrection.gains |
| (android.colorCorrection.mode must be set to TRANSFORM_MATRIX). This includes all FULL |
| mode camera devices. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="maxRegions" type="int32" visibility="ndk_public" |
| container="array" hwlevel="legacy"> |
| <array> |
| <size>3</size> |
| </array> |
| <description> |
| List of the maximum number of regions that can be used for metering in |
| auto-exposure (AE), auto-white balance (AWB), and auto-focus (AF); |
| this corresponds to the maximum number of elements in |
| android.control.aeRegions, android.control.awbRegions, |
| and android.control.afRegions. |
| </description> |
| <range> |
| Value must be &gt;= 0 for each element. For full-capability devices |
| this value must be &gt;= 1 for AE and AF. The order of the elements is: |
| `(AE, AWB, AF)`.</range> |
| <tag id="BC" /> |
| </entry> |
| <entry name="maxRegionsAe" type="int32" visibility="java_public" |
| synthetic="true" hwlevel="legacy"> |
| <description> |
| The maximum number of metering regions that can be used by the auto-exposure (AE) |
| routine. |
| </description> |
| <range>Value will be &gt;= 0. For FULL-capability devices, this |
| value will be &gt;= 1. |
| </range> |
| <details> |
| This corresponds to the maximum allowed number of elements in |
| android.control.aeRegions. |
| </details> |
| <hal_details>This entry is private to the framework. Fill in |
| maxRegions to have this entry be automatically populated. |
| </hal_details> |
| </entry> |
| <entry name="maxRegionsAwb" type="int32" visibility="java_public" |
| synthetic="true" hwlevel="legacy"> |
| <description> |
| The maximum number of metering regions that can be used by the auto-white balance (AWB) |
| routine. |
| </description> |
| <range>Value will be &gt;= 0. |
| </range> |
| <details> |
| This corresponds to the maximum allowed number of elements in |
| android.control.awbRegions. |
| </details> |
| <hal_details>This entry is private to the framework. Fill in |
| maxRegions to have this entry be automatically populated. |
| </hal_details> |
| </entry> |
| <entry name="maxRegionsAf" type="int32" visibility="java_public" |
| synthetic="true" hwlevel="legacy"> |
| <description> |
| The maximum number of metering regions that can be used by the auto-focus (AF) routine. |
| </description> |
| <range>Value will be &gt;= 0. For FULL-capability devices, this |
| value will be &gt;= 1. |
| </range> |
| <details> |
| This corresponds to the maximum allowed number of elements in |
| android.control.afRegions. |
| </details> |
| <hal_details>This entry is private to the framework. Fill in |
| maxRegions to have this entry be automatically populated. |
| </hal_details> |
| </entry> |
| <entry name="sceneModeOverrides" type="byte" visibility="system" |
| container="array" hwlevel="limited"> |
| <array> |
| <size>3</size> |
| <size>length(availableSceneModes)</size> |
| </array> |
| <description> |
| Ordered list of auto-exposure, auto-white balance, and auto-focus |
| settings to use with each available scene mode. |
| </description> |
| <range> |
| For each available scene mode, the list must contain three |
| entries containing the android.control.aeMode, |
| android.control.awbMode, and android.control.afMode values used |
| by the camera device. The entry order is `(aeMode, awbMode, afMode)` |
| where aeMode has the lowest index position. |
| </range> |
| <details> |
| When a scene mode is enabled, the camera device is expected |
| to override android.control.aeMode, android.control.awbMode, |
| and android.control.afMode with its preferred settings for |
| that scene mode. |
| |
| The order of this list matches that of availableSceneModes, |
| with 3 entries for each mode. The overrides listed |
| for FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported) are ignored, |
| since for that mode the application-set android.control.aeMode, |
| android.control.awbMode, and android.control.afMode values are |
| used instead, matching the behavior when android.control.mode |
| is set to AUTO. It is recommended that the FACE_PRIORITY and |
| FACE_PRIORITY_LOW_LIGHT (if supported) overrides should be set to 0. |
| |
| For example, if availableSceneModes contains |
| `(FACE_PRIORITY, ACTION, NIGHT)`, then the camera framework |
| expects sceneModeOverrides to have 9 entries formatted like: |
| `(0, 0, 0, ON_AUTO_FLASH, AUTO, CONTINUOUS_PICTURE, |
| ON_AUTO_FLASH, INCANDESCENT, AUTO)`. |
| </details> |
| <hal_details> |
| To maintain backward compatibility, this list will be made available |
| in the static metadata of the camera service. The camera service will |
| use these values to set android.control.aeMode, |
| android.control.awbMode, and android.control.afMode when using a scene |
| mode other than FACE_PRIORITY and FACE_PRIORITY_LOW_LIGHT (if supported). |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| </static> |
| <dynamic> |
| <entry name="aePrecaptureId" type="int32" visibility="system" deprecated="true"> |
| <description>The ID sent with the latest |
| CAMERA2_TRIGGER_PRECAPTURE_METERING call</description> |
| <deprecation_description> |
| Removed in camera HAL v3 |
| </deprecation_description> |
| <details>Must be 0 if no |
| CAMERA2_TRIGGER_PRECAPTURE_METERING trigger received yet |
| by HAL. Always updated even if AE algorithm ignores the |
| trigger</details> |
| </entry> |
| <clone entry="android.control.aeAntibandingMode" kind="controls"> |
| </clone> |
| <clone entry="android.control.aeExposureCompensation" kind="controls"> |
| </clone> |
| <clone entry="android.control.aeLock" kind="controls"> |
| </clone> |
| <clone entry="android.control.aeMode" kind="controls"> |
| </clone> |
| <clone entry="android.control.aeRegions" kind="controls"> |
| </clone> |
| <clone entry="android.control.aeTargetFpsRange" kind="controls"> |
| </clone> |
| <clone entry="android.control.aePrecaptureTrigger" kind="controls"> |
| </clone> |
| <entry name="aeState" type="byte" visibility="public" enum="true" |
| hwlevel="limited"> |
| <enum> |
| <value>INACTIVE |
| <notes>AE is off or recently reset. |
| |
| When a camera device is opened, it starts in |
| this state. This is a transient state, the camera device may skip reporting |
| this state in capture result.</notes></value> |
| <value>SEARCHING |
| <notes>AE doesn't yet have a good set of control values |
| for the current scene. |
| |
| This is a transient state, the camera device may skip |
| reporting this state in capture result.</notes></value> |
| <value>CONVERGED |
| <notes>AE has a good set of control values for the |
| current scene.</notes></value> |
| <value>LOCKED |
| <notes>AE has been locked.</notes></value> |
| <value>FLASH_REQUIRED |
| <notes>AE has a good set of control values, but flash |
| needs to be fired for good quality still |
| capture.</notes></value> |
| <value>PRECAPTURE |
| <notes>AE has been asked to do a precapture sequence |
| and is currently executing it. |
| |
| Precapture can be triggered through setting |
| android.control.aePrecaptureTrigger to START. Currently |
| active and completed (if it causes camera device internal AE lock) precapture |
| metering sequence can be canceled through setting |
| android.control.aePrecaptureTrigger to CANCEL. |
| |
| Once PRECAPTURE completes, AE will transition to CONVERGED |
| or FLASH_REQUIRED as appropriate. This is a transient |
| state, the camera device may skip reporting this state in |
| capture result.</notes></value> |
| </enum> |
| <description>Current state of the auto-exposure (AE) algorithm.</description> |
| <details>Switching between or enabling AE modes (android.control.aeMode) always |
| resets the AE state to INACTIVE. Similarly, switching between android.control.mode, |
| or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all |
| the algorithm states to INACTIVE. |
| |
| The camera device can do several state transitions between two results, if it is |
| allowed by the state transition table. For example: INACTIVE may never actually be |
| seen in a result. |
| |
| The state in the result is the state for this image (in sync with this image): if |
| AE state becomes CONVERGED, then the image data associated with this result should |
| be good to use. |
| |
| Below are state transition tables for different AE modes. |
| |
| State | Transition Cause | New State | Notes |
| :------------:|:----------------:|:---------:|:-----------------------: |
| INACTIVE | | INACTIVE | Camera device auto exposure algorithm is disabled |
| |
| When android.control.aeMode is AE_MODE_ON*: |
| |
| State | Transition Cause | New State | Notes |
| :-------------:|:--------------------------------------------:|:--------------:|:-----------------: |
| INACTIVE | Camera device initiates AE scan | SEARCHING | Values changing |
| INACTIVE | android.control.aeLock is ON | LOCKED | Values locked |
| SEARCHING | Camera device finishes AE scan | CONVERGED | Good values, not changing |
| SEARCHING | Camera device finishes AE scan | FLASH_REQUIRED | Converged but too dark w/o flash |
| SEARCHING | android.control.aeLock is ON | LOCKED | Values locked |
| CONVERGED | Camera device initiates AE scan | SEARCHING | Values changing |
| CONVERGED | android.control.aeLock is ON | LOCKED | Values locked |
| FLASH_REQUIRED | Camera device initiates AE scan | SEARCHING | Values changing |
| FLASH_REQUIRED | android.control.aeLock is ON | LOCKED | Values locked |
| LOCKED | android.control.aeLock is OFF | SEARCHING | Values not good after unlock |
| LOCKED | android.control.aeLock is OFF | CONVERGED | Values good after unlock |
| LOCKED | android.control.aeLock is OFF | FLASH_REQUIRED | Exposure good, but too dark |
| PRECAPTURE | Sequence done. android.control.aeLock is OFF | CONVERGED | Ready for high-quality capture |
| PRECAPTURE | Sequence done. android.control.aeLock is ON | LOCKED | Ready for high-quality capture |
| LOCKED | aeLock is ON and aePrecaptureTrigger is START | LOCKED | Precapture trigger is ignored when AE is already locked |
| LOCKED | aeLock is ON and aePrecaptureTrigger is CANCEL| LOCKED | Precapture trigger is ignored when AE is already locked |
| Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START | PRECAPTURE | Start AE precapture metering sequence |
| Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL| INACTIVE | Currently active precapture metering sequence is canceled |
| |
| If the camera device supports AE external flash mode (ON_EXTERNAL_FLASH is included in |
| android.control.aeAvailableModes), android.control.aeState must be FLASH_REQUIRED after |
| the camera device finishes AE scan and it's too dark without flash. |
| |
| For the above table, the camera device may skip reporting any state changes that happen |
| without application intervention (i.e. mode switch, trigger, locking). Any state that |
| can be skipped in that manner is called a transient state. |
| |
| For example, for above AE modes (AE_MODE_ON*), in addition to the state transitions |
| listed in above table, it is also legal for the camera device to skip one or more |
| transient states between two results. See below table for examples: |
| |
| State | Transition Cause | New State | Notes |
| :-------------:|:-----------------------------------------------------------:|:--------------:|:-----------------: |
| INACTIVE | Camera device finished AE scan | CONVERGED | Values are already good, transient states are skipped by camera device. |
| Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence, transient states are skipped by camera device. |
| Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is START, sequence done | CONVERGED | Converged after a precapture sequence, transient states are skipped by camera device. |
| Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged | FLASH_REQUIRED | Converged but too dark w/o flash after a precapture sequence is canceled, transient states are skipped by camera device. |
| Any state (excluding LOCKED) | android.control.aePrecaptureTrigger is CANCEL, converged | CONVERGED | Converged after a precapture sequences canceled, transient states are skipped by camera device. |
| CONVERGED | Camera device finished AE scan | FLASH_REQUIRED | Converged but too dark w/o flash after a new scan, transient states are skipped by camera device. |
| FLASH_REQUIRED | Camera device finished AE scan | CONVERGED | Converged after a new scan, transient states are skipped by camera device. |
| </details> |
| </entry> |
| <clone entry="android.control.afMode" kind="controls"> |
| </clone> |
| <clone entry="android.control.afRegions" kind="controls"> |
| </clone> |
| <clone entry="android.control.afTrigger" kind="controls"> |
| </clone> |
| <entry name="afState" type="byte" visibility="public" enum="true" |
| hwlevel="legacy"> |
| <enum> |
| <value>INACTIVE |
| <notes>AF is off or has not yet tried to scan/been asked |
| to scan. |
| |
| When a camera device is opened, it starts in this |
| state. This is a transient state, the camera device may |
| skip reporting this state in capture |
| result.</notes></value> |
| <value>PASSIVE_SCAN |
| <notes>AF is currently performing an AF scan initiated the |
| camera device in a continuous autofocus mode. |
| |
| Only used by CONTINUOUS_* AF modes. This is a transient |
| state, the camera device may skip reporting this state in |
| capture result.</notes></value> |
| <value>PASSIVE_FOCUSED |
| <notes>AF currently believes it is in focus, but may |
| restart scanning at any time. |
| |
| Only used by CONTINUOUS_* AF modes. This is a transient |
| state, the camera device may skip reporting this state in |
| capture result.</notes></value> |
| <value>ACTIVE_SCAN |
| <notes>AF is performing an AF scan because it was |
| triggered by AF trigger. |
| |
| Only used by AUTO or MACRO AF modes. This is a transient |
| state, the camera device may skip reporting this state in |
| capture result.</notes></value> |
| <value>FOCUSED_LOCKED |
| <notes>AF believes it is focused correctly and has locked |
| focus. |
| |
| This state is reached only after an explicit START AF trigger has been |
| sent (android.control.afTrigger), when good focus has been obtained. |
| |
| The lens will remain stationary until the AF mode (android.control.afMode) is changed or |
| a new AF trigger is sent to the camera device (android.control.afTrigger). |
| </notes></value> |
| <value>NOT_FOCUSED_LOCKED |
| <notes>AF has failed to focus successfully and has locked |
| focus. |
| |
| This state is reached only after an explicit START AF trigger has been |
| sent (android.control.afTrigger), when good focus cannot be obtained. |
| |
| The lens will remain stationary until the AF mode (android.control.afMode) is changed or |
| a new AF trigger is sent to the camera device (android.control.afTrigger). |
| </notes></value> |
| <value>PASSIVE_UNFOCUSED |
| <notes>AF finished a passive scan without finding focus, |
| and may restart scanning at any time. |
| |
| Only used by CONTINUOUS_* AF modes. This is a transient state, the camera |
| device may skip reporting this state in capture result. |
| |
| LEGACY camera devices do not support this state. When a passive |
| scan has finished, it will always go to PASSIVE_FOCUSED. |
| </notes></value> |
| </enum> |
| <description>Current state of auto-focus (AF) algorithm.</description> |
| <details> |
| Switching between or enabling AF modes (android.control.afMode) always |
| resets the AF state to INACTIVE. Similarly, switching between android.control.mode, |
| or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all |
| the algorithm states to INACTIVE. |
| |
| The camera device can do several state transitions between two results, if it is |
| allowed by the state transition table. For example: INACTIVE may never actually be |
| seen in a result. |
| |
| The state in the result is the state for this image (in sync with this image): if |
| AF state becomes FOCUSED, then the image data associated with this result should |
| be sharp. |
| |
| Below are state transition tables for different AF modes. |
| |
| When android.control.afMode is AF_MODE_OFF or AF_MODE_EDOF: |
| |
| State | Transition Cause | New State | Notes |
| :------------:|:----------------:|:---------:|:-----------: |
| INACTIVE | | INACTIVE | Never changes |
| |
| When android.control.afMode is AF_MODE_AUTO or AF_MODE_MACRO: |
| |
| State | Transition Cause | New State | Notes |
| :-----------------:|:----------------:|:------------------:|:--------------: |
| INACTIVE | AF_TRIGGER | ACTIVE_SCAN | Start AF sweep, Lens now moving |
| ACTIVE_SCAN | AF sweep done | FOCUSED_LOCKED | Focused, Lens now locked |
| ACTIVE_SCAN | AF sweep done | NOT_FOCUSED_LOCKED | Not focused, Lens now locked |
| ACTIVE_SCAN | AF_CANCEL | INACTIVE | Cancel/reset AF, Lens now locked |
| FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF |
| FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep, Lens now moving |
| NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Cancel/reset AF |
| NOT_FOCUSED_LOCKED | AF_TRIGGER | ACTIVE_SCAN | Start new sweep, Lens now moving |
| Any state | Mode change | INACTIVE | |
| |
| For the above table, the camera device may skip reporting any state changes that happen |
| without application intervention (i.e. mode switch, trigger, locking). Any state that |
| can be skipped in that manner is called a transient state. |
| |
| For example, for these AF modes (AF_MODE_AUTO and AF_MODE_MACRO), in addition to the |
| state transitions listed in above table, it is also legal for the camera device to skip |
| one or more transient states between two results. See below table for examples: |
| |
| State | Transition Cause | New State | Notes |
| :-----------------:|:----------------:|:------------------:|:--------------: |
| INACTIVE | AF_TRIGGER | FOCUSED_LOCKED | Focus is already good or good after a scan, lens is now locked. |
| INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | Focus failed after a scan, lens is now locked. |
| FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | Focus is already good or good after a scan, lens is now locked. |
| NOT_FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | Focus is good after a scan, lens is not locked. |
| |
| |
| When android.control.afMode is AF_MODE_CONTINUOUS_VIDEO: |
| |
| State | Transition Cause | New State | Notes |
| :-----------------:|:-----------------------------------:|:------------------:|:--------------: |
| INACTIVE | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving |
| INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query, Lens now locked |
| PASSIVE_SCAN | Camera device completes current scan| PASSIVE_FOCUSED | End AF scan, Lens now locked |
| PASSIVE_SCAN | Camera device fails current scan | PASSIVE_UNFOCUSED | End AF scan, Lens now locked |
| PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Immediate transition, if focus is good. Lens now locked |
| PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate transition, if focus is bad. Lens now locked |
| PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens position, Lens now locked |
| PASSIVE_FOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving |
| PASSIVE_UNFOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving |
| PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate transition, lens now locked |
| PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate transition, lens now locked |
| FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect |
| FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan |
| NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect |
| NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan |
| |
| When android.control.afMode is AF_MODE_CONTINUOUS_PICTURE: |
| |
| State | Transition Cause | New State | Notes |
| :-----------------:|:------------------------------------:|:------------------:|:--------------: |
| INACTIVE | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving |
| INACTIVE | AF_TRIGGER | NOT_FOCUSED_LOCKED | AF state query, Lens now locked |
| PASSIVE_SCAN | Camera device completes current scan | PASSIVE_FOCUSED | End AF scan, Lens now locked |
| PASSIVE_SCAN | Camera device fails current scan | PASSIVE_UNFOCUSED | End AF scan, Lens now locked |
| PASSIVE_SCAN | AF_TRIGGER | FOCUSED_LOCKED | Eventual transition once the focus is good. Lens now locked |
| PASSIVE_SCAN | AF_TRIGGER | NOT_FOCUSED_LOCKED | Eventual transition if cannot find focus. Lens now locked |
| PASSIVE_SCAN | AF_CANCEL | INACTIVE | Reset lens position, Lens now locked |
| PASSIVE_FOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving |
| PASSIVE_UNFOCUSED | Camera device initiates new scan | PASSIVE_SCAN | Start AF scan, Lens now moving |
| PASSIVE_FOCUSED | AF_TRIGGER | FOCUSED_LOCKED | Immediate trans. Lens now locked |
| PASSIVE_UNFOCUSED | AF_TRIGGER | NOT_FOCUSED_LOCKED | Immediate trans. Lens now locked |
| FOCUSED_LOCKED | AF_TRIGGER | FOCUSED_LOCKED | No effect |
| FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan |
| NOT_FOCUSED_LOCKED | AF_TRIGGER | NOT_FOCUSED_LOCKED | No effect |
| NOT_FOCUSED_LOCKED | AF_CANCEL | INACTIVE | Restart AF scan |
| |
| When switch between AF_MODE_CONTINUOUS_* (CAF modes) and AF_MODE_AUTO/AF_MODE_MACRO |
| (AUTO modes), the initial INACTIVE or PASSIVE_SCAN states may be skipped by the |
| camera device. When a trigger is included in a mode switch request, the trigger |
| will be evaluated in the context of the new mode in the request. |
| See below table for examples: |
| |
| State | Transition Cause | New State | Notes |
| :-----------:|:--------------------------------------:|:----------------------------------------:|:--------------: |
| any state | CAF-->AUTO mode switch | INACTIVE | Mode switch without trigger, initial state must be INACTIVE |
| any state | CAF-->AUTO mode switch with AF_TRIGGER | trigger-reachable states from INACTIVE | Mode switch with trigger, INACTIVE is skipped |
| any state | AUTO-->CAF mode switch | passively reachable states from INACTIVE | Mode switch without trigger, passive transient state is skipped |
| </details> |
| </entry> |
| <entry name="afTriggerId" type="int32" visibility="system" deprecated="true"> |
| <description>The ID sent with the latest |
| CAMERA2_TRIGGER_AUTOFOCUS call</description> |
| <deprecation_description> |
| Removed in camera HAL v3 |
| </deprecation_description> |
| <details>Must be 0 if no CAMERA2_TRIGGER_AUTOFOCUS trigger |
| received yet by HAL. Always updated even if AF algorithm |
| ignores the trigger</details> |
| </entry> |
| <clone entry="android.control.awbLock" kind="controls"> |
| </clone> |
| <clone entry="android.control.awbMode" kind="controls"> |
| </clone> |
| <clone entry="android.control.awbRegions" kind="controls"> |
| </clone> |
| <clone entry="android.control.captureIntent" kind="controls"> |
| </clone> |
| <entry name="awbState" type="byte" visibility="public" enum="true" |
| hwlevel="limited"> |
| <enum> |
| <value>INACTIVE |
| <notes>AWB is not in auto mode, or has not yet started metering. |
| |
| When a camera device is opened, it starts in this |
| state. This is a transient state, the camera device may |
| skip reporting this state in capture |
| result.</notes></value> |
| <value>SEARCHING |
| <notes>AWB doesn't yet have a good set of control |
| values for the current scene. |
| |
| This is a transient state, the camera device |
| may skip reporting this state in capture result.</notes></value> |
| <value>CONVERGED |
| <notes>AWB has a good set of control values for the |
| current scene.</notes></value> |
| <value>LOCKED |
| <notes>AWB has been locked. |
| </notes></value> |
| </enum> |
| <description>Current state of auto-white balance (AWB) algorithm.</description> |
| <details>Switching between or enabling AWB modes (android.control.awbMode) always |
| resets the AWB state to INACTIVE. Similarly, switching between android.control.mode, |
| or android.control.sceneMode if `android.control.mode == USE_SCENE_MODE` resets all |
| the algorithm states to INACTIVE. |
| |
| The camera device can do several state transitions between two results, if it is |
| allowed by the state transition table. So INACTIVE may never actually be seen in |
| a result. |
| |
| The state in the result is the state for this image (in sync with this image): if |
| AWB state becomes CONVERGED, then the image data associated with this result should |
| be good to use. |
| |
| Below are state transition tables for different AWB modes. |
| |
| When `android.control.awbMode != AWB_MODE_AUTO`: |
| |
| State | Transition Cause | New State | Notes |
| :------------:|:----------------:|:---------:|:-----------------------: |
| INACTIVE | |INACTIVE |Camera device auto white balance algorithm is disabled |
| |
| When android.control.awbMode is AWB_MODE_AUTO: |
| |
| State | Transition Cause | New State | Notes |
| :-------------:|:--------------------------------:|:-------------:|:-----------------: |
| INACTIVE | Camera device initiates AWB scan | SEARCHING | Values changing |
| INACTIVE | android.control.awbLock is ON | LOCKED | Values locked |
| SEARCHING | Camera device finishes AWB scan | CONVERGED | Good values, not changing |
| SEARCHING | android.control.awbLock is ON | LOCKED | Values locked |
| CONVERGED | Camera device initiates AWB scan | SEARCHING | Values changing |
| CONVERGED | android.control.awbLock is ON | LOCKED | Values locked |
| LOCKED | android.control.awbLock is OFF | SEARCHING | Values not good after unlock |
| |
| For the above table, the camera device may skip reporting any state changes that happen |
| without application intervention (i.e. mode switch, trigger, locking). Any state that |
| can be skipped in that manner is called a transient state. |
| |
| For example, for this AWB mode (AWB_MODE_AUTO), in addition to the state transitions |
| listed in above table, it is also legal for the camera device to skip one or more |
| transient states between two results. See below table for examples: |
| |
| State | Transition Cause | New State | Notes |
| :-------------:|:--------------------------------:|:-------------:|:-----------------: |
| INACTIVE | Camera device finished AWB scan | CONVERGED | Values are already good, transient states are skipped by camera device. |
| LOCKED | android.control.awbLock is OFF | CONVERGED | Values good after unlock, transient states are skipped by camera device. |
| </details> |
| </entry> |
| <clone entry="android.control.effectMode" kind="controls"> |
| </clone> |
| <clone entry="android.control.mode" kind="controls"> |
| </clone> |
| <clone entry="android.control.sceneMode" kind="controls"> |
| </clone> |
| <clone entry="android.control.videoStabilizationMode" kind="controls"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="availableHighSpeedVideoConfigurations" type="int32" visibility="hidden" |
| container="array" typedef="highSpeedVideoConfiguration" hwlevel="limited"> |
| <array> |
| <size>5</size> |
| <size>n</size> |
| </array> |
| <description> |
| List of available high speed video size, fps range and max batch size configurations |
| supported by the camera device, in the format of (width, height, fps_min, fps_max, batch_size_max). |
| </description> |
| <range> |
| For each configuration, the fps_max &gt;= 120fps. |
| </range> |
| <details> |
| When CONSTRAINED_HIGH_SPEED_VIDEO is supported in android.request.availableCapabilities, |
| this metadata will list the supported high speed video size, fps range and max batch size |
| configurations. All the sizes listed in this configuration will be a subset of the sizes |
| reported by {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} |
| for processed non-stalling formats. |
| |
| For the high speed video use case, the application must |
| select the video size and fps range from this metadata to configure the recording and |
| preview streams and setup the recording requests. For example, if the application intends |
| to do high speed recording, it can select the maximum size reported by this metadata to |
| configure output streams. Once the size is selected, application can filter this metadata |
| by selected size and get the supported fps ranges, and use these fps ranges to setup the |
| recording requests. Note that for the use case of multiple output streams, application |
| must select one unique size from this metadata to use (e.g., preview and recording streams |
| must have the same size). Otherwise, the high speed capture session creation will fail. |
| |
| The min and max fps will be multiple times of 30fps. |
| |
| High speed video streaming extends significant performance pressure to camera hardware, |
| to achieve efficient high speed streaming, the camera device may have to aggregate |
| multiple frames together and send to camera device for processing where the request |
| controls are same for all the frames in this batch. Max batch size indicates |
| the max possible number of frames the camera device will group together for this high |
| speed stream configuration. This max batch size will be used to generate a high speed |
| recording request list by |
| {@link android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}. |
| The max batch size for each configuration will satisfy below conditions: |
| |
| * Each max batch size will be a divisor of its corresponding fps_max / 30. For example, |
| if max_fps is 300, max batch size will only be 1, 2, 5, or 10. |
| * The camera device may choose smaller internal batch size for each configuration, but |
| the actual batch size will be a divisor of max batch size. For example, if the max batch |
| size is 8, the actual batch size used by camera device will only be 1, 2, 4, or 8. |
| * The max batch size in each configuration entry must be no larger than 32. |
| |
| The camera device doesn't have to support batch mode to achieve high speed video recording, |
| in such case, batch_size_max will be reported as 1 in each configuration entry. |
| |
| This fps ranges in this configuration list can only be used to create requests |
| that are submitted to a high speed camera capture session created by |
| {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. |
| The fps ranges reported in this metadata must not be used to setup capture requests for |
| normal capture session, or it will cause request error. |
| </details> |
| <hal_details> |
| All the sizes listed in this configuration will be a subset of the sizes reported by |
| android.scaler.availableStreamConfigurations for processed non-stalling output formats. |
| Note that for all high speed video configurations, HAL must be able to support a minimum |
| of two streams, though the application might choose to configure just one stream. |
| |
| The HAL may support multiple sensor modes for high speed outputs, for example, 120fps |
| sensor mode and 120fps recording, 240fps sensor mode for 240fps recording. The application |
| usually starts preview first, then starts recording. To avoid sensor mode switch caused |
| stutter when starting recording as much as possible, the application may want to ensure |
| the same sensor mode is used for preview and recording. Therefore, The HAL must advertise |
| the variable fps range [30, fps_max] for each fixed fps range in this configuration list. |
| For example, if the HAL advertises [120, 120] and [240, 240], the HAL must also advertise |
| [30, 120] and [30, 240] for each configuration. In doing so, if the application intends to |
| do 120fps recording, it can select [30, 120] to start preview, and [120, 120] to start |
| recording. For these variable fps ranges, it's up to the HAL to decide the actual fps |
| values that are suitable for smooth preview streaming. |
| If the HAL sees different max_fps values that fall into different sensor modes in a |
| sequence of requests, the HAL must switch the sensor mode as quick as possible to minimize |
| the mode switch caused stutter. |
| |
| HAL can also support 60fps preview during high speed recording session by advertising |
| [60, max_fps] for preview and [max_fps, max_fps] for recording. However, HAL must not |
| advertise both 30fps preview and 60fps preview for the same recording frame rate. |
| |
| Starting from AIDL ICameraDevice V2, camera HAL must report the valid batch_size_max. The |
| field will actually be used to generate a high speed recording request list. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="aeLockAvailable" type="byte" visibility="public" enum="true" |
| typedef="boolean" hwlevel="legacy"> |
| <enum> |
| <value>FALSE</value> |
| <value>TRUE</value> |
| </enum> |
| <description>Whether the camera device supports android.control.aeLock</description> |
| <details> |
| Devices with MANUAL_SENSOR capability or BURST_CAPTURE capability will always |
| list `true`. This includes FULL devices. |
| </details> |
| <tag id="BC"/> |
| </entry> |
| <entry name="awbLockAvailable" type="byte" visibility="public" enum="true" |
| typedef="boolean" hwlevel="legacy"> |
| <enum> |
| <value>FALSE</value> |
| <value>TRUE</value> |
| </enum> |
| <description>Whether the camera device supports android.control.awbLock</description> |
| <details> |
| Devices with MANUAL_POST_PROCESSING capability or BURST_CAPTURE capability will |
| always list `true`. This includes FULL devices. |
| </details> |
| <tag id="BC"/> |
| </entry> |
| <entry name="availableModes" type="byte" visibility="public" |
| type_notes="List of enums (android.control.mode)." container="array" |
| typedef="enumList" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of control modes for android.control.mode that are supported by this camera |
| device. |
| </description> |
| <range>Any value listed in android.control.mode</range> |
| <details> |
| This list contains control modes that can be set for the camera device. |
| LEGACY mode devices will always support AUTO mode. LIMITED and FULL |
| devices will always support OFF, AUTO modes. |
| </details> |
| </entry> |
| <entry name="postRawSensitivityBoostRange" type="int32" visibility="public" |
| type_notes="Range of supported post RAW sensitivity boosts" |
| container="array" typedef="rangeInt"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>Range of boosts for android.control.postRawSensitivityBoost supported |
| by this camera device. |
| </description> |
| <units>ISO arithmetic units, the same as android.sensor.sensitivity</units> |
| <details> |
| Devices support post RAW sensitivity boost will advertise |
| android.control.postRawSensitivityBoost key for controlling |
| post RAW sensitivity boost. |
| |
| This key will be `null` for devices that do not support any RAW format |
| outputs. For devices that do support RAW format outputs, this key will always |
| present, and if a device does not support post RAW sensitivity boost, it will |
| list `(100, 100)` in this key. |
| </details> |
| <hal_details> |
| This key is added in legacy HAL3.4. For legacy HAL3.3 or earlier devices, camera |
| framework will generate this key as `(100, 100)` if device supports any of RAW output |
| formats. All legacy HAL3.4 and above devices should list this key if device supports |
| any of RAW output formats. |
| </hal_details> |
| </entry> |
| </static> |
| <controls> |
| <entry name="postRawSensitivityBoost" type="int32" visibility="public"> |
| <description>The amount of additional sensitivity boost applied to output images |
| after RAW sensor data is captured. |
| </description> |
| <units>ISO arithmetic units, the same as android.sensor.sensitivity</units> |
| <range>android.control.postRawSensitivityBoostRange</range> |
| <details> |
| Some camera devices support additional digital sensitivity boosting in the |
| camera processing pipeline after sensor RAW image is captured. |
| Such a boost will be applied to YUV/JPEG format output images but will not |
| have effect on RAW output formats like RAW_SENSOR, RAW10, RAW12 or RAW_OPAQUE. |
| |
| This key will be `null` for devices that do not support any RAW format |
| outputs. For devices that do support RAW format outputs, this key will always |
| present, and if a device does not support post RAW sensitivity boost, it will |
| list `100` in this key. |
| |
| If the camera device cannot apply the exact boost requested, it will reduce the |
| boost to the nearest supported value. |
| The final boost value used will be available in the output capture result. |
| |
| For devices that support post RAW sensitivity boost, the YUV/JPEG output images |
| of such device will have the total sensitivity of |
| `android.sensor.sensitivity * android.control.postRawSensitivityBoost / 100` |
| The sensitivity of RAW format images will always be `android.sensor.sensitivity` |
| |
| This control is only effective if android.control.aeMode or android.control.mode is set to |
| OFF; otherwise the auto-exposure algorithm will override this value. |
| </details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.control.postRawSensitivityBoost" kind="controls"> |
| </clone> |
| </dynamic> |
| <controls> |
| <entry name="enableZsl" type="byte" visibility="public" enum="true" typedef="boolean"> |
| <enum> |
| <value>FALSE |
| <notes>Requests with android.control.captureIntent == STILL_CAPTURE must be captured |
| after previous requests.</notes></value> |
| <value>TRUE |
| <notes>Requests with android.control.captureIntent == STILL_CAPTURE may or may not be |
| captured before previous requests.</notes></value> |
| </enum> |
| <description>Allow camera device to enable zero-shutter-lag mode for requests with |
| android.control.captureIntent == STILL_CAPTURE. |
| </description> |
| <details> |
| If enableZsl is `true`, the camera device may enable zero-shutter-lag mode for requests with |
| STILL_CAPTURE capture intent. The camera device may use images captured in the past to |
| produce output images for a zero-shutter-lag request. The result metadata including the |
| android.sensor.timestamp reflects the source frames used to produce output images. |
| Therefore, the contents of the output images and the result metadata may be out of order |
| compared to previous regular requests. enableZsl does not affect requests with other |
| capture intents. |
| |
| For example, when requests are submitted in the following order: |
| Request A: enableZsl is ON, android.control.captureIntent is PREVIEW |
| Request B: enableZsl is ON, android.control.captureIntent is STILL_CAPTURE |
| |
| The output images for request B may have contents captured before the output images for |
| request A, and the result metadata for request B may be older than the result metadata for |
| request A. |
| |
| Note that when enableZsl is `true`, it is not guaranteed to get output images captured in |
| the past for requests with STILL_CAPTURE capture intent. |
| |
| For applications targeting SDK versions O and newer, the value of enableZsl in |
| TEMPLATE_STILL_CAPTURE template may be `true`. The value in other templates is always |
| `false` if present. |
| |
| For applications targeting SDK versions older than O, the value of enableZsl in all |
| capture templates is always `false` if present. |
| |
| For application-operated ZSL, use CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template. |
| </details> |
| <hal_details> |
| It is valid for HAL to produce regular output images for requests with STILL_CAPTURE |
| capture intent. |
| </hal_details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.control.enableZsl" kind="controls"> |
| </clone> |
| <entry name="afSceneChange" type="byte" visibility="public" enum="true" hal_version="3.3"> |
| <enum> |
| <value>NOT_DETECTED |
| <notes>Scene change is not detected within the AF region(s).</notes></value> |
| <value>DETECTED |
| <notes>Scene change is detected within the AF region(s).</notes></value> |
| </enum> |
| <description>Whether a significant scene change is detected within the currently-set AF |
| region(s).</description> |
| <details>When the camera focus routine detects a change in the scene it is looking at, |
| such as a large shift in camera viewpoint, significant motion in the scene, or a |
| significant illumination change, this value will be set to DETECTED for a single capture |
| result. Otherwise the value will be NOT_DETECTED. The threshold for detection is similar |
| to what would trigger a new passive focus scan to begin in CONTINUOUS autofocus modes. |
| |
| This key will be available if the camera device advertises this key via {@link |
| android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}. |
| </details> |
| </entry> |
| </dynamic> |
| <static> |
| <entry name="availableExtendedSceneModeMaxSizes" type="int32" |
| visibility="ndk_public" optional="true" |
| type_notes="List of extended scene modes and the corresponding max streaming sizes." |
| container="array" hwlevel="limited" hal_version="3.5"> |
| <array> |
| <size>3</size> |
| <size>n</size> |
| </array> |
| <description> |
| The list of extended scene modes for android.control.extendedSceneMode that are supported |
| by this camera device, and each extended scene mode's maximum streaming (non-stall) size |
| with effect. |
| </description> |
| <units>(mode, width, height)</units> |
| <details> |
| For DISABLED mode, the camera behaves normally with no extended scene mode enabled. |
| |
| For BOKEH_STILL_CAPTURE mode, the maximum streaming dimension specifies the limit |
| under which bokeh is effective when capture intent is PREVIEW. Note that when capture |
| intent is PREVIEW, the bokeh effect may not be as high in quality compared to |
| STILL_CAPTURE intent in order to maintain reasonable frame rate. The maximum streaming |
| dimension must be one of the YUV_420_888 or PRIVATE resolutions in |
| availableStreamConfigurations, or (0, 0) if preview bokeh is not supported. If the |
| application configures a stream larger than the maximum streaming dimension, bokeh |
| effect may not be applied for this stream for PREVIEW intent. |
| |
| For BOKEH_CONTINUOUS mode, the maximum streaming dimension specifies the limit under |
| which bokeh is effective. This dimension must be one of the YUV_420_888 or PRIVATE |
| resolutions in availableStreamConfigurations, and if the sensor maximum resolution is |
| larger than or equal to 1080p, the maximum streaming dimension must be at least 1080p. |
| If the application configures a stream with larger dimension, the stream may not have |
| bokeh effect applied. |
| </details> |
| <hal_details> |
| For available extended scene modes, DISABLED will always be listed. |
| |
| HAL must support at list one non-OFF extended scene mode if extendedSceneMode control is |
| available on the camera device. For DISABLED mode, the maximum streaming resolution must |
| be set to (0, 0). |
| </hal_details> |
| </entry> |
| <entry name="availableExtendedSceneModeZoomRatioRanges" type="float" |
| visibility="ndk_public" optional="true" |
| type_notes="Zoom ranges for all supported non-OFF extended scene modes." |
| container="array" hwlevel="limited" hal_version="3.5"> |
| <array> |
| <size>2</size> |
| <size>n</size> |
| </array> |
| <description> |
| The ranges of supported zoom ratio for non-DISABLED android.control.extendedSceneMode. |
| </description> |
| <units>(minZoom, maxZoom)</units> |
| <details> |
| When extended scene mode is set, the camera device may have limited range of zoom ratios |
| compared to when extended scene mode is DISABLED. This tag lists the zoom ratio ranges |
| for all supported non-DISABLED extended scene modes, in the same order as in |
| android.control.availableExtended. |
| |
| Range [1.0, 1.0] means that no zoom (optical or digital) is supported. |
| </details> |
| </entry> |
| <entry name="availableExtendedSceneModeCapabilities" type="int32" visibility="public" |
| synthetic="true" container="array" typedef="capability" hal_version="3.5"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>The list of extended scene modes for android.control.extendedSceneMode that |
| are supported by this camera device, and each extended scene mode's capabilities such |
| as maximum streaming size, and supported zoom ratio ranges.</description> |
| <details> |
| For DISABLED mode, the camera behaves normally with no extended scene mode enabled. |
| |
| For BOKEH_STILL_CAPTURE mode, the maximum streaming dimension specifies the limit |
| under which bokeh is effective when capture intent is PREVIEW. Note that when capture |
| intent is PREVIEW, the bokeh effect may not be as high quality compared to STILL_CAPTURE |
| intent in order to maintain reasonable frame rate. The maximum streaming dimension must |
| be one of the YUV_420_888 or PRIVATE resolutions in availableStreamConfigurations, or |
| (0, 0) if preview bokeh is not supported. If the application configures a stream |
| larger than the maximum streaming dimension, bokeh effect may not be applied for this |
| stream for PREVIEW intent. |
| |
| For BOKEH_CONTINUOUS mode, the maximum streaming dimension specifies the limit under |
| which bokeh is effective. This dimension must be one of the YUV_420_888 or PRIVATE |
| resolutions in availableStreamConfigurations, and if the sensor maximum resolution is |
| larger than or equal to 1080p, the maximum streaming dimension must be at least 1080p. |
| If the application configures a stream with larger dimension, the stream may not have |
| bokeh effect applied. |
| |
| When extended scene mode is set, the camera device may have limited range of zoom ratios |
| compared to when the mode is DISABLED. availableExtendedSceneModeCapabilities lists the |
| zoom ranges for all supported extended modes. A range of (1.0, 1.0) means that no zoom |
| (optical or digital) is supported. |
| </details> |
| </entry> |
| </static> |
| <controls> |
| <entry name="extendedSceneMode" type="byte" visibility="public" optional="true" |
| enum="true" hal_version="3.5"> |
| <enum> |
| <value id="0">DISABLED |
| <notes>Extended scene mode is disabled.</notes></value> |
| <value>BOKEH_STILL_CAPTURE |
| <notes>High quality bokeh mode is enabled for all non-raw streams (including YUV, |
| JPEG, and IMPLEMENTATION_DEFINED) when capture intent is STILL_CAPTURE. Due to the |
| extra image processing, this mode may introduce additional stall to non-raw streams. |
| This mode should be used in high quality still capture use case. |
| </notes> |
| </value> |
| <value>BOKEH_CONTINUOUS |
| <notes>Bokeh effect must not slow down capture rate relative to sensor raw output, |
| and the effect is applied to all processed streams no larger than the maximum |
| streaming dimension. This mode should be used if performance and power are a |
| priority, such as video recording. |
| </notes> |
| </value> |
| <value visibility="hidden" id="0x40">VENDOR_START |
| <notes> |
| Vendor defined extended scene modes. These depend on vendor implementation. |
| </notes> |
| </value> |
| </enum> |
| <description>Whether extended scene mode is enabled for a particular capture request. |
| </description> |
| <details> |
| With bokeh mode, the camera device may blur out the parts of scene that are not in |
| focus, creating a bokeh (or shallow depth of field) effect for people or objects. |
| |
| When set to BOKEH_STILL_CAPTURE mode with STILL_CAPTURE capture intent, due to the extra |
| processing needed for high quality bokeh effect, the stall may be longer than when |
| capture intent is not STILL_CAPTURE. |
| |
| When set to BOKEH_STILL_CAPTURE mode with PREVIEW capture intent, |
| |
| * If the camera device has BURST_CAPTURE capability, the frame rate requirement of |
| BURST_CAPTURE must still be met. |
| * All streams not larger than the maximum streaming dimension for BOKEH_STILL_CAPTURE mode |
| (queried via {@link android.hardware.camera2.CameraCharacteristics#CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_CAPABILITIES|ACAMERA_CONTROL_AVAILABLE_EXTENDED_SCENE_MODE_MAX_SIZES}) |
| will have preview bokeh effect applied. |
| |
| When set to BOKEH_CONTINUOUS mode, configured streams dimension should not exceed this mode's |
| maximum streaming dimension in order to have bokeh effect applied. Bokeh effect may not |
| be available for streams larger than the maximum streaming dimension. |
| |
| Switching between different extended scene modes may involve reconfiguration of the camera |
| pipeline, resulting in long latency. The application should check this key against the |
| available session keys queried via |
| {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys|ACameraManager_getCameraCharacteristics}. |
| |
| For a logical multi-camera, bokeh may be implemented by stereo vision from sub-cameras |
| with different field of view. As a result, when bokeh mode is enabled, the camera device |
| may override android.scaler.cropRegion or android.control.zoomRatio, and the field of |
| view may be smaller than when bokeh mode is off. |
| </details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.control.extendedSceneMode" kind="controls"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="zoomRatioRange" type="float" visibility="public" |
| type_notes="The range of zoom ratios that this camera device supports." |
| container="array" typedef="rangeFloat" hwlevel="limited" hal_version="3.5" |
| session_characteristics_key_since="35"> |
| <array> |
| <size>2</size> |
| </array> |
| <description> |
| Minimum and maximum zoom ratios supported by this camera device. |
| </description> |
| <units>A pair of zoom ratio in floating-points: (minZoom, maxZoom)</units> |
| <range> |
| maxZoom >= 1.0 >= minZoom |
| </range> |
| <details> |
| If the camera device supports zoom-out from 1x zoom, minZoom will be less than 1.0, and |
| setting android.control.zoomRatio to values less than 1.0 increases the camera's field |
| of view. |
| </details> |
| <hal_details> |
| When the key is reported, the camera device's android.scaler.availableMaxDigitalZoom |
| must be less than or equal to maxZoom. The camera framework makes sure to always |
| control zoom via android.control.zoomRatio. The android.scaler.cropRegion tag is only |
| used to do horizontal or vertical cropping (but not both) to achieve aspect ratio |
| different than the camera sensor's native aspect ratio. |
| |
| For a logical multi-camera device, this key must either be reported for both the logical |
| camera device and all its physical sub-cameras, or none of them. |
| |
| When the key is not reported, camera framework derives the application-facing |
| zoomRatioRange to be (1, android.scaler.availableMaxDigitalZoom). |
| </hal_details> |
| </entry> |
| </static> |
| <controls> |
| <entry name="zoomRatio" type="float" visibility="public" hwlevel="limited" |
| hal_version="3.5"> |
| <description> |
| The desired zoom ratio |
| </description> |
| <range>android.control.zoomRatioRange</range> |
| <details> |
| Instead of using android.scaler.cropRegion for zoom, the application can now choose to |
| use this tag to specify the desired zoom level. |
| |
| By using this control, the application gains a simpler way to control zoom, which can |
| be a combination of optical and digital zoom. For example, a multi-camera system may |
| contain more than one lens with different focal lengths, and the user can use optical |
| zoom by switching between lenses. Using zoomRatio has benefits in the scenarios below: |
| |
| * Zooming in from a wide-angle lens to a telephoto lens: A floating-point ratio provides |
| better precision compared to an integer value of android.scaler.cropRegion. |
| * Zooming out from a wide lens to an ultrawide lens: zoomRatio supports zoom-out whereas |
| android.scaler.cropRegion doesn't. |
| |
| To illustrate, here are several scenarios of different zoom ratios, crop regions, |
| and output streams, for a hypothetical camera device with an active array of size |
| `(2000,1500)`. |
| |
| * Camera Configuration: |
| * Active array size: `2000x1500` (3 MP, 4:3 aspect ratio) |
| * Output stream #1: `640x480` (VGA, 4:3 aspect ratio) |
| * Output stream #2: `1280x720` (720p, 16:9 aspect ratio) |
| * Case #1: 4:3 crop region with 2.0x zoom ratio |
| * Zoomed field of view: 1/4 of original field of view |
| * Crop region: `Rect(0, 0, 2000, 1500) // (left, top, right, bottom)` (post zoom) |
| *  |
| * `640x480` stream source area: `(0, 0, 2000, 1500)` (equal to crop region) |
| * `1280x720` stream source area: `(0, 187, 2000, 1312)` (letterboxed) |
| * Case #2: 16:9 crop region with 2.0x zoom. |
| * Zoomed field of view: 1/4 of original field of view |
| * Crop region: `Rect(0, 187, 2000, 1312)` |
| *  |
| * `640x480` stream source area: `(250, 187, 1750, 1312)` (pillarboxed) |
| * `1280x720` stream source area: `(0, 187, 2000, 1312)` (equal to crop region) |
| * Case #3: 1:1 crop region with 0.5x zoom out to ultrawide lens. |
| * Zoomed field of view: 4x of original field of view (switched from wide lens to ultrawide lens) |
| * Crop region: `Rect(250, 0, 1750, 1500)` |
| *  |
| * `640x480` stream source area: `(250, 187, 1750, 1312)` (letterboxed) |
| * `1280x720` stream source area: `(250, 328, 1750, 1172)` (letterboxed) |
| |
| As seen from the graphs above, the coordinate system of cropRegion now changes to the |
| effective after-zoom field-of-view, and is represented by the rectangle of (0, 0, |
| activeArrayWith, activeArrayHeight). The same applies to AE/AWB/AF regions, and faces. |
| This coordinate system change isn't applicable to RAW capture and its related |
| metadata such as intrinsicCalibration and lensShadingMap. |
| |
| Using the same hypothetical example above, and assuming output stream #1 (640x480) is |
| the viewfinder stream, the application can achieve 2.0x zoom in one of two ways: |
| |
| * zoomRatio = 2.0, scaler.cropRegion = (0, 0, 2000, 1500) |
| * zoomRatio = 1.0 (default), scaler.cropRegion = (500, 375, 1500, 1125) |
| |
| If the application intends to set aeRegions to be top-left quarter of the viewfinder |
| field-of-view, the android.control.aeRegions should be set to (0, 0, 1000, 750) with |
| zoomRatio set to 2.0. Alternatively, the application can set aeRegions to the equivalent |
| region of (500, 375, 1000, 750) for zoomRatio of 1.0. If the application doesn't |
| explicitly set android.control.zoomRatio, its value defaults to 1.0. |
| |
| One limitation of controlling zoom using zoomRatio is that the android.scaler.cropRegion |
| must only be used for letterboxing or pillarboxing of the sensor active array, and no |
| FREEFORM cropping can be used with android.control.zoomRatio other than 1.0. If |
| android.control.zoomRatio is not 1.0, and android.scaler.cropRegion is set to be |
| windowboxing, the camera framework will override the android.scaler.cropRegion to be |
| the active array. |
| |
| In the capture request, if the application sets android.control.zoomRatio to a |
| value != 1.0, the android.control.zoomRatio tag in the capture result reflects the |
| effective zoom ratio achieved by the camera device, and the android.scaler.cropRegion |
| adjusts for additional crops that are not zoom related. Otherwise, if the application |
| sets android.control.zoomRatio to 1.0, or does not set it at all, the |
| android.control.zoomRatio tag in the result metadata will also be 1.0. |
| |
| When the application requests a physical stream for a logical multi-camera, the |
| android.control.zoomRatio in the physical camera result metadata will be 1.0, and |
| the android.scaler.cropRegion tag reflects the amount of zoom and crop done by the |
| physical camera device. |
| </details> |
| <hal_details> |
| For all capture request templates, this field must be set to 1.0 in order to have |
| consistent field of views between different modes. |
| </hal_details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.control.zoomRatio" kind="controls"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="availableHighSpeedVideoConfigurationsMaximumResolution" type="int32" |
| visibility="hidden" container="array" typedef="highSpeedVideoConfiguration" |
| hal_version="3.6"> |
| <array> |
| <size>5</size> |
| <size>n</size> |
| </array> |
| <description> |
| List of available high speed video size, fps range and max batch size configurations |
| supported by the camera device, in the format of |
| (width, height, fps_min, fps_max, batch_size_max), |
| when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <range> |
| For each configuration, the fps_max &gt;= 120fps. |
| </range> |
| <details> |
| Analogous to android.control.availableHighSpeedVideoConfigurations, for configurations |
| which are applicable when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <hal_details> |
| Refer to hal details for android.control.availableHighSpeedVideoConfigurations. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| </static> |
| <controls> |
| <entry name="afRegionsSet" type="byte" visibility="fwk_only" |
| enum="true" typedef="boolean"> |
| <enum> |
| <value>FALSE |
| <notes>AF regions (android.control.afRegions) have not been set by the camera client. |
| </notes> |
| </value> |
| <value>TRUE |
| <notes> |
| AF regions (android.control.afRegions) have been set by the camera client. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Framework-only private key which informs camera fwk that the AF regions has been set |
| by the client and those regions need not be corrected when android.sensor.pixelMode is |
| set to MAXIMUM_RESOLUTION. |
| </description> |
| <details> |
| This must be set to TRUE by the camera2 java fwk when the camera client sets |
| android.control.afRegions. |
| </details> |
| </entry> |
| <entry name="aeRegionsSet" type="byte" visibility="fwk_only" |
| enum="true" typedef="boolean"> |
| <enum> |
| <value>FALSE |
| <notes> AE regions (android.control.aeRegions) have not been set by the camera client. |
| </notes> |
| </value> |
| <value>TRUE |
| <notes> |
| AE regions (android.control.aeRegions) have been set by the camera client. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Framework-only private key which informs camera fwk that the AE regions has been set |
| by the client and those regions need not be corrected when android.sensor.pixelMode is |
| set to MAXIMUM_RESOLUTION. |
| </description> |
| <details> |
| This must be set to TRUE by the camera2 java fwk when the camera client sets |
| android.control.aeRegions. |
| </details> |
| </entry> |
| <entry name="awbRegionsSet" type="byte" visibility="fwk_only" |
| enum="true" typedef="boolean"> |
| <enum> |
| <value>FALSE |
| <notes> AWB regions (android.control.awbRegions) have not been set by the camera client. |
| </notes> |
| </value> |
| <value>TRUE |
| <notes> |
| AWB regions (android.control.awbRegions) have been set by the camera client. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Framework-only private key which informs camera fwk that the AF regions has been set |
| by the client and those regions need not be corrected when android.sensor.pixelMode is |
| set to MAXIMUM_RESOLUTION. |
| </description> |
| <details> |
| This must be set to TRUE by the camera2 java fwk when the camera client sets |
| android.control.awbRegions. |
| </details> |
| </entry> |
| <entry name="settingsOverride" type="int32" visibility="public" enum="true" |
| hal_version="3.9"> |
| <enum> |
| <value>OFF |
| <notes> |
| No keys are applied sooner than the other keys when applying CaptureRequest |
| settings to the camera device. This is the default value. |
| </notes> |
| </value> |
| <value>ZOOM |
| <notes> |
| Zoom related keys are applied sooner than the other keys in the CaptureRequest. The |
| zoom related keys are: |
| |
| * android.control.zoomRatio |
| * android.scaler.cropRegion |
| * android.control.aeRegions |
| * android.control.awbRegions |
| * android.control.afRegions |
| |
| Even though android.control.aeRegions, android.control.awbRegions, |
| and android.control.afRegions are not directly zoom related, applications |
| typically scale these regions together with android.scaler.cropRegion to have a |
| consistent mapping within the current field of view. In this aspect, they are |
| related to android.scaler.cropRegion and android.control.zoomRatio. |
| </notes> |
| </value> |
| <value visibility="hidden" id="0x4000">VENDOR_START |
| <notes> |
| Vendor defined settingsOverride. These depend on vendor implementation. |
| </notes> |
| </value> |
| </enum> |
| <description>The desired CaptureRequest settings override with which certain keys are |
| applied earlier so that they can take effect sooner. |
| </description> |
| <range>android.control.availableSettingsOverrides</range> |
| <details> |
| There are some CaptureRequest keys which can be applied earlier than others |
| when controls within a CaptureRequest aren't required to take effect at the same time. |
| One such example is zoom. Zoom can be applied at a later stage of the camera pipeline. |
| As soon as the camera device receives the CaptureRequest, it can apply the requested |
| zoom value onto an earlier request that's already in the pipeline, thus improves zoom |
| latency. |
| |
| This key's value in the capture result reflects whether the controls for this capture |
| are overridden "by" a newer request. This means that if a capture request turns on |
| settings override, the capture result of an earlier request will contain the key value |
| of ZOOM. On the other hand, if a capture request has settings override turned on, |
| but all newer requests have it turned off, the key's value in the capture result will |
| be OFF because this capture isn't overridden by a newer capture. In the two examples |
| below, the capture results columns illustrate the settingsOverride values in different |
| scenarios. |
| |
| Assuming the zoom settings override can speed up by 1 frame, below example illustrates |
| the speed-up at the start of capture session: |
| |
| Camera session created |
| Request 1 (zoom=1.0x, override=ZOOM) -> |
| Request 2 (zoom=1.2x, override=ZOOM) -> |
| Request 3 (zoom=1.4x, override=ZOOM) -> Result 1 (zoom=1.2x, override=ZOOM) |
| Request 4 (zoom=1.6x, override=ZOOM) -> Result 2 (zoom=1.4x, override=ZOOM) |
| Request 5 (zoom=1.8x, override=ZOOM) -> Result 3 (zoom=1.6x, override=ZOOM) |
| -> Result 4 (zoom=1.8x, override=ZOOM) |
| -> Result 5 (zoom=1.8x, override=OFF) |
| |
| The application can turn on settings override and use zoom as normal. The example |
| shows that the later zoom values (1.2x, 1.4x, 1.6x, and 1.8x) overwrite the zoom |
| values (1.0x, 1.2x, 1.4x, and 1.8x) of earlier requests (#1, #2, #3, and #4). |
| |
| The application must make sure the settings override doesn't interfere with user |
| journeys requiring simultaneous application of all controls in CaptureRequest on the |
| requested output targets. For example, if the application takes a still capture using |
| CameraCaptureSession#capture, and the repeating request immediately sets a different |
| zoom value using override, the inflight still capture could have its zoom value |
| overwritten unexpectedly. |
| |
| So the application is strongly recommended to turn off settingsOverride when taking |
| still/burst captures, and turn it back on when there is only repeating viewfinder |
| request and no inflight still/burst captures. |
| |
| Below is the example demonstrating the transitions in and out of the |
| settings override: |
| |
| Request 1 (zoom=1.0x, override=OFF) |
| Request 2 (zoom=1.2x, override=OFF) |
| Request 3 (zoom=1.4x, override=ZOOM) -> Result 1 (zoom=1.0x, override=OFF) |
| Request 4 (zoom=1.6x, override=ZOOM) -> Result 2 (zoom=1.4x, override=ZOOM) |
| Request 5 (zoom=1.8x, override=OFF) -> Result 3 (zoom=1.6x, override=ZOOM) |
| -> Result 4 (zoom=1.6x, override=OFF) |
| -> Result 5 (zoom=1.8x, override=OFF) |
| |
| This example shows that: |
| |
| * The application "ramps in" settings override by setting the control to ZOOM. |
| In the example, request #3 enables zoom settings override. Because the camera device |
| can speed up applying zoom by 1 frame, the outputs of request #2 has 1.4x zoom, the |
| value specified in request #3. |
| * The application "ramps out" of settings override by setting the control to OFF. In |
| the example, request #5 changes the override to OFF. Because request #4's zoom |
| takes effect in result #3, result #4's zoom remains the same until new value takes |
| effect in result #5. |
| </details> |
| <hal_details> |
| The HAL must set this value to OFF in all of the Capture templates. |
| |
| Typically the HAL unblocks processCaptureRequest at the same rate as the sensor capture, |
| and the HAL queues the capture settings in its pipeline when processCaptureRequest |
| returns. However, when the settings override is enabled, the HAL can optimize the |
| overridden controls' latency by applying them as soon as processCaptureRequest is |
| called, rather than when it's unblocked. |
| |
| For devices launching with API level 35 or newer, to avoid regression on zoom |
| smoothness, when zoom settings override is on during pinch zoom (zoom value gradually |
| changes), the zoom effect in the camera output must not become more jittery. More |
| specifically, the variation of zoom pipeline latency must not increase. The latency |
| improvement must be at least 1 frame regardless of zoom in or zoom out. |
| |
| For devices launched before API level 35, if zoom settings override is on, when |
| zooming in, the HAL must be able to apply the zoom related settings at least 1 |
| frame ahead. |
| </hal_details> |
| </entry> |
| </controls> |
| <static> |
| <entry name="availableSettingsOverrides" type="int32" visibility="public" |
| optional="true" type_notes="list of enums" container="array" typedef="enumList" |
| hal_version="3.9"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>List of available settings overrides supported by the camera device that can |
| be used to speed up certain controls. |
| </description> |
| <range>Any value listed in android.control.settingsOverride</range> |
| <details>When not all controls within a CaptureRequest are required to take effect |
| at the same time on the outputs, the camera device may apply certain request keys sooner |
| to improve latency. This list contains such supported settings overrides. Each settings |
| override corresponds to a set of CaptureRequest keys that can be sped up when applying. |
| |
| A supported settings override can be passed in via |
| {@link android.hardware.camera2.CaptureRequest#CONTROL_SETTINGS_OVERRIDE}, and the |
| CaptureRequest keys corresponding to the override are applied as soon as possible, not |
| bound by per-frame synchronization. See android.control.settingsOverride for the |
| CaptureRequest keys for each override. |
| |
| OFF is always included in this list. |
| </details> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.control.settingsOverride" kind="controls"> |
| </clone> |
| <entry name="settingsOverridingFrameNumber" type="int32" visibility="system" |
| hal_version="3.9"> |
| <description>The frame number of the newer request overriding this capture. |
| </description> |
| <details>Must be equal to the frame number of the result if this capture isn't |
| overridden by a newer request, i.e. if android.control.settingsOverride is OFF |
| in the capture result. On the other hand, if the capture is overridden by a newer |
| request, the value of this tag (unsigned) must be larger than the frame number of |
| the capture result.</details> |
| </entry> |
| </dynamic> |
| <controls> |
| <entry name="autoframing" type="byte" visibility="public" |
| enum="true" hwlevel="limited" hal_version="3.9"> |
| <enum> |
| <value>OFF |
| <notes> |
| Disable autoframing. |
| </notes> |
| </value> |
| <value>ON |
| <notes> |
| Enable autoframing to keep people in the frame's field of view. |
| </notes> |
| </value> |
| <value visibility="hidden">AUTO |
| <notes> |
| Automatically select ON or OFF based on the system level preferences. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Automatic crop, pan and zoom to keep objects in the center of the frame. |
| </description> |
| <details> |
| Auto-framing is a special mode provided by the camera device to dynamically crop, zoom |
| or pan the camera feed to try to ensure that the people in a scene occupy a reasonable |
| portion of the viewport. It is primarily designed to support video calling in |
| situations where the user isn't directly in front of the device, especially for |
| wide-angle cameras. |
| android.scaler.cropRegion and android.control.zoomRatio in CaptureResult will be used |
| to denote the coordinates of the auto-framed region. |
| Zoom and video stabilization controls are disabled when auto-framing is enabled. The 3A |
| regions must map the screen coordinates into the scaler crop returned from the capture |
| result instead of using the active array sensor. |
| </details> |
| <hal_details> |
| While auto-framing is ON, the aspect ratio of the auto-framed region must match the |
| aspect ratio of the configured output stream. |
| When reporting CaptureResult, SCALER_CROP_REGION might not adequately describe the |
| actual sensor pixels. In this case, it is acceptable for the returned parameters to only |
| be an approximation of the image sensor region that is actually used. |
| When auto-framing is turned off, the transition should be immediate, with no panning or |
| zooming to transition to the settings requested by the app. When it is turned on, the |
| transition can be immediate or smooth. |
| </hal_details> |
| </entry> |
| </controls> |
| <static> |
| <entry name="autoframingAvailable" type="byte" visibility="public" enum="true" |
| typedef="boolean" hwlevel="limited" hal_version="3.9"> |
| <enum> |
| <value>FALSE</value> |
| <value>TRUE</value> |
| </enum> |
| <description>Whether the camera device supports android.control.autoframing. |
| </description> |
| <details> |
| Will be `false` if auto-framing is not available. |
| </details> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.control.autoframing" kind="controls"> |
| </clone> |
| <entry name="autoframingState" type="byte" visibility="public" |
| enum="true" hwlevel="limited" hal_version="3.9"> |
| <enum> |
| <value>INACTIVE |
| <notes> |
| Auto-framing is inactive. |
| </notes> |
| </value> |
| <value>FRAMING |
| <notes> |
| Auto-framing is in process - either zooming in, zooming out or pan is taking place. |
| </notes> |
| </value> |
| <value>CONVERGED |
| <notes> |
| Auto-framing has reached a stable state (frame/fov is not being adjusted). The state |
| may transition back to FRAMING if the scene changes. |
| </notes> |
| </value> |
| </enum> |
| <description>Current state of auto-framing. |
| </description> |
| <details> |
| When the camera doesn't have auto-framing available (i.e |
| `android.control.autoframingAvailable` == false) or it is not enabled (i.e |
| `android.control.autoframing` == OFF), the state will always be INACTIVE. |
| Other states indicate the current auto-framing state: |
| |
| * When `android.control.autoframing` is set to ON, auto-framing will take |
| place. While the frame is aligning itself to center the object (doing things like |
| zooming in, zooming out or pan), the state will be FRAMING. |
| * When field of view is not being adjusted anymore and has reached a stable state, the |
| state will be CONVERGED. |
| </details> |
| </entry> |
| </dynamic> |
| <static> |
| <entry name="lowLightBoostInfoLuminanceRange" type="float" visibility="public" |
| optional="true" container="array" typedef="rangeFloat" |
| aconfig_flag="camera_ae_mode_low_light_boost" hal_version="3.10"> |
| <array> |
| <size>2</size> |
| </array> |
| <description> |
| The operating luminance range of low light boost measured in lux (lx). |
| </description> |
| <range> |
| The lower bound indicates the lowest scene luminance value the AE mode |
| 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY' can operate within. Scenes of lower luminance |
| than this may receive less brightening, increased noise, or artifacts. |
| |
| The upper bound indicates the luminance threshold at the point when the mode is enabled. |
| For example, 'Range[0.3, 30.0]' defines 0.3 lux being the lowest scene luminance the |
| mode can reliably support. 30.0 lux represents the threshold when this mode is |
| activated. Scenes measured at less than or equal to 30 lux will activate low light |
| boost. |
| |
| If this key is defined, then the AE mode 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY' will |
| also be present. |
| </range> |
| </entry> |
| </static> |
| <dynamic> |
| <entry name="lowLightBoostState" type="byte" visibility="public" optional="true" enum="true" |
| aconfig_flag="camera_ae_mode_low_light_boost" hal_version="3.10"> |
| <enum> |
| <value>INACTIVE |
| <notes> |
| The AE mode 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY' is enabled but not applied. |
| </notes></value> |
| <value>ACTIVE |
| <notes> |
| The AE mode 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY' is enabled and applied. |
| </notes></value> |
| </enum> |
| <description> |
| Current state of the low light boost AE mode. |
| </description> |
| <details> |
| When low light boost is enabled by setting the AE mode to |
| 'ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY', it can dynamically apply a low light |
| boost when the light level threshold is exceeded. |
| |
| This state indicates when low light boost is 'ACTIVE' and applied. Similarly, it can |
| indicate when it is not being applied by returning 'INACTIVE'. |
| |
| The default value will always be 'INACTIVE'. |
| </details> |
| </entry> |
| </dynamic> |
| <controls> |
| <entry name="zoomMethod" type="byte" visibility="fwk_public" enum="true" |
| hwlevel="limited" aconfig_flag="zoom_method" hal_version="3.11"> |
| <enum> |
| <value id="0">AUTO |
| <notes> |
| The camera device automatically detects whether the application does zoom with |
| android.scaler.cropRegion or android.control.zoomRatio, and in turn decides which |
| metadata tag reflects the effective zoom level. |
| </notes></value> |
| <value id="1">ZOOM_RATIO |
| <notes> |
| The application intends to control zoom via android.control.zoomRatio, and |
| the effective zoom level is reflected by android.control.zoomRatio in capture results. |
| </notes></value> |
| </enum> |
| <description> |
| Whether the application uses android.scaler.cropRegion or android.control.zoomRatio |
| to control zoom levels. |
| </description> |
| <details> |
| If set to AUTO, the camera device detects which capture request key the application uses |
| to do zoom, android.scaler.cropRegion or android.control.zoomRatio. If |
| the application doesn't set android.scaler.zoomRatio or sets it to 1.0 in the capture |
| request, the effective zoom level is reflected in android.scaler.cropRegion in capture |
| results. If android.control.zoomRatio is set to values other than 1.0, the effective |
| zoom level is reflected in android.control.zoomRatio. AUTO is the default value |
| for this control, and also the behavior of the OS before Android version |
| {@link android.os.Build.VERSION_CODES#BAKLAVA BAKLAVA}. |
| |
| If set to ZOOM_RATIO, the application explicitly specifies zoom level be controlled |
| by android.control.zoomRatio, and the effective zoom level is reflected in |
| android.control.zoomRatio in capture results. This addresses an ambiguity with AUTO, |
| with which the camera device cannot know if the application is using cropRegion or |
| zoomRatio at 1.0x. |
| </details> |
| <hal_details> |
| Do not use this key directly. It's for camera framework usage, |
| and not for HAL consumption. |
| </hal_details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.control.zoomMethod" kind="controls"> |
| </clone> |
| </dynamic> |
| <controls> |
| <entry name="aePriorityMode" type="byte" visibility="public" |
| optional="true" enum="true" aconfig_flag="ae_priority" hal_version="3.11"> |
| <enum> |
| <value>OFF |
| <notes> |
| Disable AE priority mode. This is the default value. |
| </notes> |
| </value> |
| <value>SENSOR_SENSITIVITY_PRIORITY |
| <notes> |
| The camera device's auto-exposure routine is active and |
| prioritizes the application-selected ISO (android.sensor.sensitivity). |
| |
| The application has control over android.sensor.sensitivity while |
| the application's values for android.sensor.exposureTime and |
| android.sensor.frameDuration are ignored. |
| </notes> |
| </value> |
| <value>SENSOR_EXPOSURE_TIME_PRIORITY |
| <notes> |
| The camera device's auto-exposure routine is active and |
| prioritizes the application-selected exposure time |
| (android.sensor.exposureTime). |
| |
| The application has control over android.sensor.exposureTime while |
| the application's values for android.sensor.sensitivity and |
| android.sensor.frameDuration are ignored. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Turn on AE priority mode. |
| </description> |
| <details> |
| This control is only effective if android.control.mode is |
| AUTO and android.control.aeMode is set to one of its |
| ON modes, with the exception of ON_LOW_LIGHT_BOOST_BRIGHTNESS_PRIORITY. |
| |
| When a priority mode is enabled, the camera device's |
| auto-exposure routine will maintain the application's |
| selected parameters relevant to the priority mode while overriding |
| the remaining exposure parameters |
| (android.sensor.exposureTime, android.sensor.sensitivity, and |
| android.sensor.frameDuration). For example, if |
| SENSOR_SENSITIVITY_PRIORITY mode is enabled, the camera device will |
| maintain the application-selected android.sensor.sensitivity |
| while adjusting android.sensor.exposureTime |
| and android.sensor.frameDuration. The overridden fields for a |
| given capture will be available in its CaptureResult. |
| </details> |
| <hal_details> |
| The total sensitivity applied for SENSOR_SENSITIVITY_PRIORITY should not be |
| adjusted by any HAL applied android.control.postRawSensitivityBoost. |
| </hal_details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.control.aePriorityMode" kind="controls"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="aeAvailablePriorityModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="enumList" |
| aconfig_flag="ae_priority" hal_version="3.11"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of auto-exposure priority modes for android.control.aePriorityMode |
| that are supported by this camera device. |
| </description> |
| <range>Any value listed in android.control.aePriorityMode</range> |
| <details> |
| This entry lists the valid modes for |
| android.control.aePriorityMode for this camera device. |
| If no AE priority modes are available for a device, this will only list OFF. |
| </details> |
| </entry> |
| </static> |
| </section> |
| <section name="demosaic"> |
| <controls> |
| <entry name="mode" type="byte" enum="true"> |
| <enum> |
| <value>FAST |
| <notes>Minimal or no slowdown of frame rate compared to |
| Bayer RAW output.</notes></value> |
| <value>HIGH_QUALITY |
| <notes>Improved processing quality but the frame rate might be slowed down |
| relative to raw output.</notes></value> |
| </enum> |
| <description>Controls the quality of the demosaicing |
| processing.</description> |
| <tag id="FUTURE" /> |
| </entry> |
| </controls> |
| </section> |
| <section name="edge"> |
| <controls> |
| <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full"> |
| <enum> |
| <value>OFF |
| <notes>No edge enhancement is applied.</notes></value> |
| <value>FAST |
| <notes>Apply edge enhancement at a quality level that does not slow down frame rate |
| relative to sensor output. It may be the same as OFF if edge enhancement will |
| slow down frame rate relative to sensor.</notes></value> |
| <value>HIGH_QUALITY |
| <notes>Apply high-quality edge enhancement, at a cost of possibly reduced output frame rate. |
| </notes></value> |
| <value optional="true">ZERO_SHUTTER_LAG <notes>Edge enhancement is applied at different |
| levels for different output streams, based on resolution. Streams at maximum recording |
| resolution (see {@link |
| android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession}) |
| or below have edge enhancement applied, while higher-resolution streams have no edge |
| enhancement applied. The level of edge enhancement for low-resolution streams is tuned |
| so that frame rate is not impacted, and the quality is equal to or better than FAST |
| (since it is only applied to lower-resolution outputs, quality may improve from FAST). |
| |
| This mode is intended to be used by applications operating in a zero-shutter-lag mode |
| with YUV or PRIVATE reprocessing, where the application continuously captures |
| high-resolution intermediate buffers into a circular buffer, from which a final image is |
| produced via reprocessing when a user takes a picture. For such a use case, the |
| high-resolution buffers must not have edge enhancement applied to maximize efficiency of |
| preview and to avoid double-applying enhancement when reprocessed, while low-resolution |
| buffers (used for recording or preview, generally) need edge enhancement applied for |
| reasonable preview quality. |
| |
| This mode is guaranteed to be supported by devices that support either the |
| YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities |
| (android.request.availableCapabilities lists either of those capabilities) and it will |
| be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template. |
| </notes></value> |
| </enum> |
| <description>Operation mode for edge |
| enhancement.</description> |
| <range>android.edge.availableEdgeModes</range> |
| <details>Edge enhancement improves sharpness and details in the captured image. OFF means |
| no enhancement will be applied by the camera device. |
| |
| FAST/HIGH_QUALITY both mean camera device determined enhancement |
| will be applied. HIGH_QUALITY mode indicates that the |
| camera device will use the highest-quality enhancement algorithms, |
| even if it slows down capture rate. FAST means the camera device will |
| not slow down capture rate when applying edge enhancement. FAST may be the same as OFF if |
| edge enhancement will slow down capture rate. Every output stream will have a similar |
| amount of enhancement applied. |
| |
| ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular |
| buffer of high-resolution images during preview and reprocess image(s) from that buffer |
| into a final capture when triggered by the user. In this mode, the camera device applies |
| edge enhancement to low-resolution streams (below maximum recording resolution) to |
| maximize preview quality, but does not apply edge enhancement to high-resolution streams, |
| since those will be reprocessed later if necessary. |
| |
| For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera |
| device will apply FAST/HIGH_QUALITY YUV-domain edge enhancement, respectively. |
| The camera device may adjust its internal edge enhancement parameters for best |
| image quality based on the android.reprocess.effectiveExposureFactor, if it is set. |
| </details> |
| <hal_details> |
| For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to |
| adjust the internal edge enhancement reduction parameters appropriately to get the best |
| quality images. |
| </hal_details> |
| <tag id="V1" /> |
| <tag id="REPROC" /> |
| </entry> |
| <entry name="strength" type="byte"> |
| <description>Control the amount of edge enhancement |
| applied to the images</description> |
| <units>1-10; 10 is maximum sharpening</units> |
| <tag id="FUTURE" /> |
| </entry> |
| </controls> |
| <static> |
| <entry name="availableEdgeModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="enumList" |
| hwlevel="full"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of edge enhancement modes for android.edge.mode that are supported by this camera |
| device. |
| </description> |
| <range>Any value listed in android.edge.mode</range> |
| <details> |
| Full-capability camera devices must always support OFF; camera devices that support |
| YUV_REPROCESSING or PRIVATE_REPROCESSING will list ZERO_SHUTTER_LAG; all devices will |
| list FAST. |
| </details> |
| <hal_details> |
| HAL must support both FAST and HIGH_QUALITY if edge enhancement control is available |
| on the camera device, but the underlying implementation can be the same for both modes. |
| That is, if the highest quality implementation on the camera device does not slow down |
| capture rate, then FAST and HIGH_QUALITY will generate the same output. |
| </hal_details> |
| <tag id="V1" /> |
| <tag id="REPROC" /> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.edge.mode" kind="controls"> |
| <tag id="V1" /> |
| <tag id="REPROC" /> |
| </clone> |
| </dynamic> |
| </section> |
| <section name="flash"> |
| <controls> |
| <entry name="firingPower" type="byte"> |
| <description>Power for flash firing/torch</description> |
| <units>10 is max power; 0 is no flash. Linear</units> |
| <range>0 - 10</range> |
| <details>Power for snapshot may use a different scale than |
| for torch mode. Only one entry for torch mode will be |
| used</details> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="firingTime" type="int64"> |
| <description>Firing time of flash relative to start of |
| exposure</description> |
| <units>nanoseconds</units> |
| <range>0-(exposure time-flash duration)</range> |
| <details>Clamped to (0, exposure time - flash |
| duration).</details> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="legacy"> |
| <enum> |
| <value>OFF |
| <notes> |
| Do not fire the flash for this capture. |
| </notes> |
| </value> |
| <value>SINGLE |
| <notes> |
| If the flash is available and charged, fire flash |
| for this capture. |
| </notes> |
| </value> |
| <value>TORCH |
| <notes> |
| Transition flash to continuously on. |
| </notes> |
| </value> |
| </enum> |
| <description>The desired mode for for the camera device's flash control.</description> |
| <details> |
| This control is only effective when flash unit is available |
| (`android.flash.info.available == true`). |
| |
| When this control is used, the android.control.aeMode must be set to ON or OFF. |
| Otherwise, the camera device auto-exposure related flash control (ON_AUTO_FLASH, |
| ON_ALWAYS_FLASH, or ON_AUTO_FLASH_REDEYE) will override this control. |
| |
| When set to OFF, the camera device will not fire flash for this capture. |
| |
| When set to SINGLE, the camera device will fire flash regardless of the camera |
| device's auto-exposure routine's result. When used in still capture case, this |
| control should be used along with auto-exposure (AE) precapture metering sequence |
| (android.control.aePrecaptureTrigger), otherwise, the image may be incorrectly exposed. |
| |
| When set to TORCH, the flash will be on continuously. This mode can be used |
| for use cases such as preview, auto-focus assist, still capture, or video recording. |
| |
| The flash status will be reported by android.flash.state in the capture result metadata. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| </controls> |
| <static> |
| <namespace name="info"> |
| <entry name="available" type="byte" visibility="public" enum="true" |
| typedef="boolean" hwlevel="legacy"> |
| <enum> |
| <value>FALSE</value> |
| <value>TRUE</value> |
| </enum> |
| <description>Whether this camera device has a |
| flash unit.</description> |
| <details> |
| Will be `false` if no flash is available. |
| |
| If there is no flash unit, none of the flash controls do |
| anything.</details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="chargeDuration" type="int64"> |
| <description>Time taken before flash can fire |
| again</description> |
| <units>nanoseconds</units> |
| <range>0-1e9</range> |
| <details>1 second too long/too short for recharge? Should |
| this be power-dependent?</details> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="strengthMaximumLevel" type="int32" visibility="public" hal_version="3.8"> |
| <description>Maximum flashlight brightness level. |
| </description> |
| <details>If this value is greater than 1, then the device supports controlling the |
| flashlight brightness level via |
| {@link android.hardware.camera2.CameraManager#turnOnTorchWithStrengthLevel}. |
| If this value is equal to 1, flashlight brightness control is not supported. |
| The value for this key will be null for devices with no flash unit. |
| |
| The maximum value is guaranteed to be safe to use for an indefinite duration in |
| terms of device flashlight lifespan, but may be too bright for comfort for many |
| use cases. Use the default torch brightness value to avoid problems with an |
| over-bright flashlight. |
| </details> |
| </entry> |
| <entry name="strengthDefaultLevel" type="int32" visibility="public" hal_version="3.8"> |
| <description>Default flashlight brightness level to be set via |
| {@link android.hardware.camera2.CameraManager#turnOnTorchWithStrengthLevel}. |
| </description> |
| <details> |
| If flash unit is available this will be greater than or equal to 1 and less |
| or equal to `android.flash.info.strengthMaximumLevel`. |
| |
| Setting flashlight brightness above the default level |
| (i.e.`android.flash.info.strengthDefaultLevel`) may make the device more |
| likely to reach thermal throttling conditions and slow down, or drain the |
| battery quicker than normal. To minimize such issues, it is recommended to |
| start the flashlight at this default brightness until a user explicitly requests |
| a brighter level. |
| Note that the value for this key will be null for devices with no flash unit. |
| The default level should always be > 0. |
| </details> |
| </entry> |
| </namespace> |
| <entry name="colorTemperature" type="byte"> |
| <description>The x,y whitepoint of the |
| flash</description> |
| <units>pair of floats</units> |
| <range>0-1 for both</range> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="maxEnergy" type="byte"> |
| <description>Max energy output of the flash for a full |
| power single flash</description> |
| <units>lumen-seconds</units> |
| <range>&gt;= 0</range> |
| <tag id="FUTURE" /> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.flash.firingPower" kind="controls"> |
| </clone> |
| <clone entry="android.flash.firingTime" kind="controls"> |
| </clone> |
| <clone entry="android.flash.mode" kind="controls"></clone> |
| <entry name="state" type="byte" visibility="public" enum="true" |
| hwlevel="limited"> |
| <enum> |
| <value>UNAVAILABLE |
| <notes>No flash on camera.</notes></value> |
| <value>CHARGING |
| <notes>Flash is charging and cannot be fired.</notes></value> |
| <value>READY |
| <notes>Flash is ready to fire.</notes></value> |
| <value>FIRED |
| <notes>Flash fired for this capture.</notes></value> |
| <value>PARTIAL |
| <notes>Flash partially illuminated this frame. |
| |
| This is usually due to the next or previous frame having |
| the flash fire, and the flash spilling into this capture |
| due to hardware limitations.</notes></value> |
| </enum> |
| <description>Current state of the flash |
| unit.</description> |
| <details> |
| When the camera device doesn't have flash unit |
| (i.e. `android.flash.info.available == false`), this state will always be UNAVAILABLE. |
| Other states indicate the current flash status. |
| |
| In certain conditions, this will be available on LEGACY devices: |
| |
| * Flash-less cameras always return UNAVAILABLE. |
| * Using android.control.aeMode `==` ON_ALWAYS_FLASH |
| will always return FIRED. |
| * Using android.flash.mode `==` TORCH |
| will always return FIRED. |
| |
| In all other conditions the state will not be available on |
| LEGACY devices (i.e. it will be `null`). |
| </details> |
| </entry> |
| </dynamic> |
| <controls> |
| <entry name="strengthLevel" type="int32" visibility="public" hwlevel="legacy" |
| hal_version="3.10"> |
| <description>Flash strength level to be used when manual flash control is active. |
| </description> |
| <range>`[1-android.flash.torchStrengthMaxLevel]` when the android.flash.mode is |
| set to TORCH; |
| `[1-android.flash.singleStrengthMaxLevel]` when the android.flash.mode is |
| set to SINGLE |
| </range> |
| <details>Flash strength level to use in capture mode i.e. when the applications control |
| flash with either `SINGLE` or `TORCH` mode. |
| |
| Use android.flash.singleStrengthMaxLevel and |
| android.flash.torchStrengthMaxLevel to check whether the device supports |
| flash strength control or not. |
| If the values of android.flash.singleStrengthMaxLevel and |
| android.flash.torchStrengthMaxLevel are greater than 1, |
| then the device supports manual flash strength control. |
| |
| If the android.flash.mode `==` `TORCH` the value must be &gt;= 1 |
| and &lt;= android.flash.torchStrengthMaxLevel. |
| If the application doesn't set the key and |
| android.flash.torchStrengthMaxLevel &gt; 1, |
| then the flash will be fired at the default level set by HAL in |
| android.flash.torchStrengthDefaultLevel. |
| If the android.flash.mode `==` `SINGLE`, then the value must be &gt;= 1 |
| and &lt;= android.flash.singleStrengthMaxLevel. |
| If the application does not set this key and |
| android.flash.singleStrengthMaxLevel &gt; 1, |
| then the flash will be fired at the default level set by HAL |
| in android.flash.singleStrengthDefaultLevel. |
| If android.control.aeMode is set to any of `ON_AUTO_FLASH`, `ON_ALWAYS_FLASH`, |
| `ON_AUTO_FLASH_REDEYE`, `ON_EXTERNAL_FLASH` values, then the strengthLevel will be ignored. |
| |
| When AE mode is ON and flash mode is TORCH or SINGLE, the application should make sure |
| the AE mode, flash mode, and flash strength level remain the same between precapture |
| trigger request and final capture request. The flash strength level being set during |
| precapture sequence is used by the camera device as a reference. The actual strength |
| may be less, and the auto-exposure routine makes sure proper conversions of sensor |
| exposure time and sensitivities between precapture and final capture for the specified |
| strength level. |
| </details> |
| </entry> |
| </controls> |
| <static> |
| <entry name="singleStrengthMaxLevel" type="int32" visibility="public" |
| hwlevel="legacy" hal_version="3.10"> |
| <description>Maximum flash brightness level for manual flash control in `SINGLE` mode. |
| </description> |
| <details> |
| Maximum flash brightness level in camera capture mode and |
| android.flash.mode set to `SINGLE`. |
| Value will be &gt; 1 if the manual flash strength control feature is supported, |
| otherwise the value will be equal to 1. |
| Note that this level is just a number of supported levels (the granularity of control). |
| There is no actual physical power units tied to this level. |
| </details> |
| </entry> |
| <entry name="singleStrengthDefaultLevel" type="int32" |
| visibility="public" hwlevel="legacy" hal_version="3.10"> |
| <description>Default flash brightness level for manual flash control in `SINGLE` mode. |
| </description> |
| <details> |
| If flash unit is available this will be greater than or equal to 1 and less |
| or equal to android.flash.singleStrengthMaxLevel. |
| Note for devices that do not support the manual flash strength control |
| feature, this level will always be equal to 1. |
| </details> |
| </entry> |
| <entry name="torchStrengthMaxLevel" type="int32" visibility="public" |
| hwlevel="legacy" hal_version="3.10"> |
| <description>Maximum flash brightness level for manual flash control in `TORCH` mode |
| </description> |
| <details> |
| Maximum flash brightness level in camera capture mode and |
| android.flash.mode set to `TORCH`. |
| Value will be &gt; 1 if the manual flash strength control feature is supported, |
| otherwise the value will be equal to 1. |
| |
| Note that this level is just a number of supported levels(the granularity of control). |
| There is no actual physical power units tied to this level. |
| There is no relation between android.flash.torchStrengthMaxLevel and |
| android.flash.singleStrengthMaxLevel i.e. the ratio of |
| android.flash.torchStrengthMaxLevel:android.flash.singleStrengthMaxLevel |
| is not guaranteed to be the ratio of actual brightness. |
| </details> |
| </entry> |
| <entry name="torchStrengthDefaultLevel" type="int32" visibility="public" |
| hwlevel="legacy" hal_version="3.10"> |
| <description>Default flash brightness level for manual flash control in `TORCH` mode |
| </description> |
| <details> |
| If flash unit is available this will be greater than or equal to 1 and less |
| or equal to android.flash.torchStrengthMaxLevel. |
| Note for the devices that do not support the manual flash strength control feature, |
| this level will always be equal to 1. |
| </details> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.flash.strengthLevel" kind="controls"> |
| </clone> |
| </dynamic> |
| </section> |
| <section name="hotPixel"> |
| <controls> |
| <entry name="mode" type="byte" visibility="public" enum="true"> |
| <enum> |
| <value>OFF |
| <notes> |
| No hot pixel correction is applied. |
| |
| The frame rate must not be reduced relative to sensor raw output |
| for this option. |
| |
| The hotpixel map may be returned in android.statistics.hotPixelMap. |
| </notes> |
| </value> |
| <value>FAST |
| <notes> |
| Hot pixel correction is applied, without reducing frame |
| rate relative to sensor raw output. |
| |
| The hotpixel map may be returned in android.statistics.hotPixelMap. |
| </notes> |
| </value> |
| <value>HIGH_QUALITY |
| <notes> |
| High-quality hot pixel correction is applied, at a cost |
| of possibly reduced frame rate relative to sensor raw output. |
| |
| The hotpixel map may be returned in android.statistics.hotPixelMap. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Operational mode for hot pixel correction. |
| </description> |
| <range>android.hotPixel.availableHotPixelModes</range> |
| <details> |
| Hotpixel correction interpolates out, or otherwise removes, pixels |
| that do not accurately measure the incoming light (i.e. pixels that |
| are stuck at an arbitrary value or are oversensitive). |
| </details> |
| <tag id="V1" /> |
| <tag id="RAW" /> |
| </entry> |
| </controls> |
| <static> |
| <entry name="availableHotPixelModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="enumList"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of hot pixel correction modes for android.hotPixel.mode that are supported by this |
| camera device. |
| </description> |
| <range>Any value listed in android.hotPixel.mode</range> |
| <details> |
| FULL mode camera devices will always support FAST. |
| </details> |
| <hal_details> |
| To avoid performance issues, there will be significantly fewer hot |
| pixels than actual pixels on the camera sensor. |
| HAL must support both FAST and HIGH_QUALITY if hot pixel correction control is available |
| on the camera device, but the underlying implementation can be the same for both modes. |
| That is, if the highest quality implementation on the camera device does not slow down |
| capture rate, then FAST and HIGH_QUALITY will generate the same output. |
| </hal_details> |
| <tag id="V1" /> |
| <tag id="RAW" /> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.hotPixel.mode" kind="controls"> |
| <tag id="V1" /> |
| <tag id="RAW" /> |
| </clone> |
| </dynamic> |
| </section> |
| <section name="jpeg"> |
| <controls> |
| <entry name="gpsLocation" type="byte" visibility="java_public" synthetic="true" |
| typedef="location" hwlevel="legacy"> |
| <description> |
| A location object to use when generating image GPS metadata. |
| </description> |
| <details> |
| Setting a location object in a request will include the GPS coordinates of the location |
| into any JPEG images captured based on the request. These coordinates can then be |
| viewed by anyone who receives the JPEG image. |
| |
| This tag is also used for HEIC image capture. |
| </details> |
| </entry> |
| <entry name="gpsCoordinates" type="double" visibility="ndk_public" |
| type_notes="latitude, longitude, altitude. First two in degrees, the third in meters" |
| container="array" hwlevel="legacy"> |
| <array> |
| <size>3</size> |
| </array> |
| <description>GPS coordinates to include in output JPEG |
| EXIF.</description> |
| <range>(-180 - 180], [-90,90], [-inf, inf]</range> |
| <details>This tag is also used for HEIC image capture.</details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="gpsProcessingMethod" type="byte" visibility="ndk_public" |
| typedef="string" hwlevel="legacy"> |
| <description>32 characters describing GPS algorithm to |
| include in EXIF.</description> |
| <units>UTF-8 null-terminated string</units> |
| <details>This tag is also used for HEIC image capture.</details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="gpsTimestamp" type="int64" visibility="ndk_public" hwlevel="legacy"> |
| <description>Time GPS fix was made to include in |
| EXIF.</description> |
| <units>UTC in seconds since January 1, 1970</units> |
| <details>This tag is also used for HEIC image capture.</details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="orientation" type="int32" visibility="public" hwlevel="legacy"> |
| <description>The orientation for a JPEG image.</description> |
| <units>Degrees in multiples of 90</units> |
| <range>0, 90, 180, 270</range> |
| <details> |
| The clockwise rotation angle in degrees, relative to the orientation |
| to the camera, that the JPEG picture needs to be rotated by, to be viewed |
| upright. |
| |
| Camera devices may either encode this value into the JPEG EXIF header, or |
| rotate the image data to match this orientation. When the image data is rotated, |
| the thumbnail data will also be rotated. Additionally, in the case where the image data |
| is rotated, {@link android.media.Image#getWidth} and {@link android.media.Image#getHeight} |
| will not be updated to reflect the height and width of the rotated image. |
| |
| Note that this orientation is relative to the orientation of the camera sensor, given |
| by android.sensor.orientation. |
| |
| To translate from the device orientation given by the Android sensor APIs for camera |
| sensors which are not EXTERNAL, the following sample code may be used: |
| |
| private int getJpegOrientation(CameraCharacteristics c, int deviceOrientation) { |
| if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN) return 0; |
| int sensorOrientation = c.get(CameraCharacteristics.SENSOR_ORIENTATION); |
| |
| // Round device orientation to a multiple of 90 |
| deviceOrientation = (deviceOrientation + 45) / 90 * 90; |
| |
| // Reverse device orientation for front-facing cameras |
| boolean facingFront = c.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT; |
| if (facingFront) deviceOrientation = -deviceOrientation; |
| |
| // Calculate desired JPEG orientation relative to camera orientation to make |
| // the image upright relative to the device orientation |
| int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360; |
| |
| return jpegOrientation; |
| } |
| |
| For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will |
| also be set to EXTERNAL. The above code is not relevant in such case. |
| |
| This tag is also used to describe the orientation of the HEIC image capture, in which |
| case the rotation is reflected by |
| {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}, and not by |
| rotating the image data itself. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="quality" type="byte" visibility="public" hwlevel="legacy"> |
| <description>Compression quality of the final JPEG |
| image.</description> |
| <range>1-100; larger is higher quality</range> |
| <details>85-95 is typical usage range. This tag is also used to describe the quality |
| of the HEIC image capture.</details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="thumbnailQuality" type="byte" visibility="public" hwlevel="legacy"> |
| <description>Compression quality of JPEG |
| thumbnail.</description> |
| <range>1-100; larger is higher quality</range> |
| <details>This tag is also used to describe the quality of the HEIC image capture.</details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="thumbnailSize" type="int32" visibility="public" |
| container="array" typedef="size" hwlevel="legacy"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>Resolution of embedded JPEG thumbnail.</description> |
| <range>android.jpeg.availableThumbnailSizes</range> |
| <details>When set to (0, 0) value, the JPEG EXIF will not contain thumbnail, |
| but the captured JPEG will still be a valid image. |
| |
| For best results, when issuing a request for a JPEG image, the thumbnail size selected |
| should have the same aspect ratio as the main JPEG output. |
| |
| If the thumbnail image aspect ratio differs from the JPEG primary image aspect |
| ratio, the camera device creates the thumbnail by cropping it from the primary image. |
| For example, if the primary image has 4:3 aspect ratio, the thumbnail image has |
| 16:9 aspect ratio, the primary image will be cropped vertically (letterbox) to |
| generate the thumbnail image. The thumbnail image will always have a smaller Field |
| Of View (FOV) than the primary image when aspect ratios differ. |
| |
| When an android.jpeg.orientation of non-zero degree is requested, |
| the camera device will handle thumbnail rotation in one of the following ways: |
| |
| * Set the {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag} |
| and keep jpeg and thumbnail image data unrotated. |
| * Rotate the jpeg and thumbnail image data and not set |
| {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}. In this |
| case, LIMITED or FULL hardware level devices will report rotated thumbnail size in |
| capture result, so the width and height will be interchanged if 90 or 270 degree |
| orientation is requested. LEGACY device will always report unrotated thumbnail |
| size. |
| |
| The tag is also used as thumbnail size for HEIC image format capture, in which case the |
| the thumbnail rotation is reflected by |
| {@link android.media.ExifInterface#TAG_ORIENTATION EXIF orientation flag}, and not by |
| rotating the thumbnail data itself. |
| </details> |
| <hal_details> |
| The HAL must not squeeze or stretch the downscaled primary image to generate thumbnail. |
| The cropping must be done on the primary jpeg image rather than the sensor pre-correction |
| active array. The stream cropping rule specified by "S5. Cropping" in camera3.h doesn't |
| apply to the thumbnail image cropping. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| </controls> |
| <static> |
| <entry name="availableThumbnailSizes" type="int32" visibility="public" |
| container="array" typedef="size" hwlevel="legacy"> |
| <array> |
| <size>2</size> |
| <size>n</size> |
| </array> |
| <description>List of JPEG thumbnail sizes for android.jpeg.thumbnailSize supported by this |
| camera device.</description> |
| <details> |
| This list will include at least one non-zero resolution, plus `(0,0)` for indicating no |
| thumbnail should be generated. |
| |
| Below conditions will be satisfied for this size list: |
| |
| * The sizes will be sorted by increasing pixel area (width x height). |
| If several resolutions have the same area, they will be sorted by increasing width. |
| * The aspect ratio of the largest thumbnail size will be same as the |
| aspect ratio of largest JPEG output size in android.scaler.availableStreamConfigurations. |
| The largest size is defined as the size that has the largest pixel area |
| in a given size list. |
| * Each output JPEG size in android.scaler.availableStreamConfigurations will have at least |
| one corresponding size that has the same aspect ratio in availableThumbnailSizes, |
| and vice versa. |
| * All non-`(0, 0)` sizes will have non-zero widths and heights. |
| |
| This list is also used as supported thumbnail sizes for HEIC image format capture. |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="maxSize" type="int32" visibility="system"> |
| <description>Maximum size in bytes for the compressed |
| JPEG buffer, in default sensor pixel mode (see android.sensor.pixelMode)</description> |
| <range>Must be large enough to fit any JPEG produced by |
| the camera</range> |
| <details>This is used for sizing the gralloc buffers for |
| JPEG</details> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.jpeg.gpsLocation" kind="controls"> |
| </clone> |
| <clone entry="android.jpeg.gpsCoordinates" kind="controls"> |
| </clone> |
| <clone entry="android.jpeg.gpsProcessingMethod" |
| kind="controls"></clone> |
| <clone entry="android.jpeg.gpsTimestamp" kind="controls"> |
| </clone> |
| <clone entry="android.jpeg.orientation" kind="controls"> |
| </clone> |
| <clone entry="android.jpeg.quality" kind="controls"> |
| </clone> |
| <entry name="size" type="int32"> |
| <description>The size of the compressed JPEG image, in |
| bytes</description> |
| <range>&gt;= 0</range> |
| <details>If no JPEG output is produced for the request, |
| this must be 0. |
| |
| Otherwise, this describes the real size of the compressed |
| JPEG image placed in the output stream. More specifically, |
| if android.jpeg.maxSize = 1000000, and a specific capture |
| has android.jpeg.size = 500000, then the output buffer from |
| the JPEG stream will be 1000000 bytes, of which the first |
| 500000 make up the real data.</details> |
| <tag id="FUTURE" /> |
| </entry> |
| <clone entry="android.jpeg.thumbnailQuality" |
| kind="controls"></clone> |
| <clone entry="android.jpeg.thumbnailSize" kind="controls"> |
| </clone> |
| </dynamic> |
| </section> |
| <section name="lens"> |
| <controls> |
| <entry name="aperture" type="float" visibility="public" hwlevel="full"> |
| <description>The desired lens aperture size, as a ratio of lens focal length to the |
| effective aperture diameter.</description> |
| <units>The f-number (f/N)</units> |
| <range>android.lens.info.availableApertures</range> |
| <details>Setting this value is only supported on the camera devices that have a variable |
| aperture lens. |
| |
| When this is supported and android.control.aeMode is OFF, |
| this can be set along with android.sensor.exposureTime, |
| android.sensor.sensitivity, and android.sensor.frameDuration |
| to achieve manual exposure control. |
| |
| The requested aperture value may take several frames to reach the |
| requested value; the camera device will report the current (intermediate) |
| aperture size in capture result metadata while the aperture is changing. |
| While the aperture is still changing, android.lens.state will be set to MOVING. |
| |
| When this is supported and android.control.aeMode is one of |
| the ON modes, this will be overridden by the camera device |
| auto-exposure algorithm, the overridden values are then provided |
| back to the user in the corresponding result.</details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="filterDensity" type="float" visibility="public" hwlevel="full"> |
| <description> |
| The desired setting for the lens neutral density filter(s). |
| </description> |
| <units>Exposure Value (EV)</units> |
| <range>android.lens.info.availableFilterDensities</range> |
| <details> |
| This control will not be supported on most camera devices. |
| |
| Lens filters are typically used to lower the amount of light the |
| sensor is exposed to (measured in steps of EV). As used here, an EV |
| step is the standard logarithmic representation, which are |
| non-negative, and inversely proportional to the amount of light |
| hitting the sensor. For example, setting this to 0 would result |
| in no reduction of the incoming light, and setting this to 2 would |
| mean that the filter is set to reduce incoming light by two stops |
| (allowing 1/4 of the prior amount of light to the sensor). |
| |
| It may take several frames before the lens filter density changes |
| to the requested value. While the filter density is still changing, |
| android.lens.state will be set to MOVING. |
| </details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="focalLength" type="float" visibility="public" hwlevel="legacy"> |
| <description> |
| The desired lens focal length; used for optical zoom. |
| </description> |
| <units>Millimeters</units> |
| <range>android.lens.info.availableFocalLengths</range> |
| <details> |
| This setting controls the physical focal length of the camera |
| device's lens. Changing the focal length changes the field of |
| view of the camera device, and is usually used for optical zoom. |
| |
| Like android.lens.focusDistance and android.lens.aperture, this |
| setting won't be applied instantaneously, and it may take several |
| frames before the lens can change to the requested focal length. |
| While the focal length is still changing, android.lens.state will |
| be set to MOVING. |
| |
| Optical zoom via this control will not be supported on most devices. Starting from API |
| level 30, the camera device may combine optical and digital zoom through the |
| android.control.zoomRatio control. |
| </details> |
| <hal_details> |
| For a logical camera device supporting both optical and digital zoom, if focalLength and |
| cropRegion change in the same request, the camera device must make sure that the new |
| focalLength and cropRegion take effect in the same frame. This is to make sure that there |
| is no visible field-of-view jump during zoom. For example, if cropRegion is applied |
| immediately, but focalLength takes more than 1 frame to take effect, the camera device |
| will delay the cropRegion so that it's synchronized with focalLength. |
| |
| Starting from API level 30, it's strongly recommended for HAL to implement the |
| combination of optical and digital zoom using the new android.control.zoomRatio API, in |
| lieu of using android.lens.focalLength and android.scaler.cropRegion. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="focusDistance" type="float" visibility="public" hwlevel="full"> |
| <description>Desired distance to plane of sharpest focus, |
| measured from frontmost surface of the lens.</description> |
| <units>See android.lens.info.focusDistanceCalibration for details</units> |
| <range>&gt;= 0</range> |
| <details> |
| This control can be used for setting manual focus, on devices that support |
| the MANUAL_SENSOR capability and have a variable-focus lens (see |
| android.lens.info.minimumFocusDistance). |
| |
| A value of `0.0f` means infinity focus. The value set will be clamped to |
| `[0.0f, android.lens.info.minimumFocusDistance]`. |
| |
| Like android.lens.focalLength, this setting won't be applied |
| instantaneously, and it may take several frames before the lens |
| can move to the requested focus distance. While the lens is still moving, |
| android.lens.state will be set to MOVING. |
| |
| LEGACY devices support at most setting this to `0.0f` |
| for infinity focus. |
| </details> |
| <tag id="BC" /> |
| <tag id="V1" /> |
| </entry> |
| <entry name="opticalStabilizationMode" type="byte" visibility="public" |
| enum="true" hwlevel="limited"> |
| <enum> |
| <value>OFF |
| <notes>Optical stabilization is unavailable.</notes> |
| </value> |
| <value optional="true">ON |
| <notes>Optical stabilization is enabled.</notes> |
| </value> |
| </enum> |
| <description> |
| Sets whether the camera device uses optical image stabilization (OIS) |
| when capturing images. |
| </description> |
| <range>android.lens.info.availableOpticalStabilization</range> |
| <details> |
| OIS is used to compensate for motion blur due to small |
| movements of the camera during capture. Unlike digital image |
| stabilization (android.control.videoStabilizationMode), OIS |
| makes use of mechanical elements to stabilize the camera |
| sensor, and thus allows for longer exposure times before |
| camera shake becomes apparent. |
| |
| Switching between different optical stabilization modes may take several |
| frames to initialize, the camera device will report the current mode in |
| capture result metadata. For example, When "ON" mode is requested, the |
| optical stabilization modes in the first several capture results may still |
| be "OFF", and it will become "ON" when the initialization is done. |
| |
| If a camera device supports both OIS and digital image stabilization |
| (android.control.videoStabilizationMode), turning both modes on may produce undesirable |
| interaction, so it is recommended not to enable both at the same time. |
| |
| If android.control.videoStabilizationMode is set to "PREVIEW_STABILIZATION", |
| android.lens.opticalStabilizationMode is overridden. The camera sub-system may choose |
| to turn on hardware based image stabilization in addition to software based stabilization |
| if it deems that appropriate. This key's value in the capture result will reflect which |
| OIS mode was chosen. |
| |
| Not all devices will support OIS; see |
| android.lens.info.availableOpticalStabilization for |
| available controls. |
| </details> |
| <tag id="V1" /> |
| </entry> |
| </controls> |
| <static> |
| <namespace name="info"> |
| <entry name="availableApertures" type="float" visibility="public" |
| container="array" hwlevel="full"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>List of aperture size values for android.lens.aperture that are |
| supported by this camera device.</description> |
| <units>The aperture f-number</units> |
| <details>If the camera device doesn't support a variable lens aperture, |
| this list will contain only one value, which is the fixed aperture size. |
| |
| If the camera device supports a variable aperture, the aperture values |
| in this list will be sorted in ascending order.</details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="availableFilterDensities" type="float" visibility="public" |
| container="array" hwlevel="full"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of neutral density filter values for |
| android.lens.filterDensity that are supported by this camera device. |
| </description> |
| <units>Exposure value (EV)</units> |
| <range> |
| Values are &gt;= 0 |
| </range> |
| <details> |
| If a neutral density filter is not supported by this camera device, |
| this list will contain only 0. Otherwise, this list will include every |
| filter density supported by the camera device, in ascending order. |
| </details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="availableFocalLengths" type="float" visibility="public" |
| type_notes="The list of available focal lengths" |
| container="array" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of focal lengths for android.lens.focalLength that are supported by this camera |
| device. |
| </description> |
| <units>Millimeters</units> |
| <range> |
| Values are &gt; 0 |
| </range> |
| <details> |
| If optical zoom is not supported, this list will only contain |
| a single value corresponding to the fixed focal length of the |
| device. Otherwise, this list will include every focal length supported |
| by the camera device, in ascending order. |
| </details> |
| <tag id="BC" /> |
| <tag id="V1" /> |
| </entry> |
| <entry name="availableOpticalStabilization" type="byte" |
| visibility="public" type_notes="list of enums" container="array" |
| typedef="enumList" hwlevel="limited"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of optical image stabilization (OIS) modes for |
| android.lens.opticalStabilizationMode that are supported by this camera device. |
| </description> |
| <range>Any value listed in android.lens.opticalStabilizationMode</range> |
| <details> |
| If OIS is not supported by a given camera device, this list will |
| contain only OFF. |
| </details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="hyperfocalDistance" type="float" visibility="public" optional="true" |
| hwlevel="limited" permission_needed="true"> |
| <description>Hyperfocal distance for this lens.</description> |
| <units>See android.lens.info.focusDistanceCalibration for details</units> |
| <range>If lens is fixed focus, &gt;= 0. If lens has focuser unit, the value is |
| within `(0.0f, android.lens.info.minimumFocusDistance]`</range> |
| <details> |
| If the lens is not fixed focus, the camera device will report this |
| field when android.lens.info.focusDistanceCalibration is APPROXIMATE or CALIBRATED. |
| </details> |
| </entry> |
| <entry name="minimumFocusDistance" type="float" visibility="public" optional="true" |
| hwlevel="limited" permission_needed="true"> |
| <description>Shortest distance from frontmost surface |
| of the lens that can be brought into sharp focus.</description> |
| <units>See android.lens.info.focusDistanceCalibration for details</units> |
| <range>&gt;= 0</range> |
| <details>If the lens is fixed-focus, this will be |
| 0.</details> |
| <hal_details>Mandatory for FULL devices; LIMITED devices |
| must always set this value to 0 for fixed-focus; and may omit |
| the minimum focus distance otherwise. |
| |
| This field is also mandatory for all devices advertising |
| the MANUAL_SENSOR capability.</hal_details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="shadingMapSize" type="int32" visibility="ndk_public" |
| type_notes="width and height (N, M) of lens shading map provided by the camera device." |
| container="array" typedef="size" hwlevel="full"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>Dimensions of lens shading map.</description> |
| <range>Both values &gt;= 1</range> |
| <details> |
| The map should be on the order of 30-40 rows and columns, and |
| must be smaller than 64x64. |
| </details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="focusDistanceCalibration" type="byte" visibility="public" |
| enum="true" hwlevel="limited"> |
| <enum> |
| <value>UNCALIBRATED |
| <notes> |
| The lens focus distance is not accurate, and the units used for |
| android.lens.focusDistance do not correspond to any physical units. |
| |
| Setting the lens to the same focus distance on separate occasions may |
| result in a different real focus distance, depending on factors such |
| as the orientation of the device, the age of the focusing mechanism, |
| and the device temperature. The focus distance value will still be |
| in the range of `[0, android.lens.info.minimumFocusDistance]`, where 0 |
| represents the farthest focus. |
| </notes> |
| </value> |
| <value>APPROXIMATE |
| <notes> |
| The lens focus distance is measured in diopters. |
| |
| However, setting the lens to the same focus distance |
| on separate occasions may result in a different real |
| focus distance, depending on factors such as the |
| orientation of the device, the age of the focusing |
| mechanism, and the device temperature. |
| </notes> |
| </value> |
| <value>CALIBRATED |
| <notes> |
| The lens focus distance is measured in diopters, and |
| is calibrated. |
| |
| The lens mechanism is calibrated so that setting the |
| same focus distance is repeatable on multiple |
| occasions with good accuracy, and the focus distance |
| corresponds to the real physical distance to the plane |
| of best focus. |
| </notes> |
| </value> |
| </enum> |
| <description>The lens focus distance calibration quality.</description> |
| <details> |
| The lens focus distance calibration quality determines the reliability of |
| focus related metadata entries, i.e. android.lens.focusDistance, |
| android.lens.focusRange, android.lens.info.hyperfocalDistance, and |
| android.lens.info.minimumFocusDistance. |
| |
| APPROXIMATE and CALIBRATED devices report the focus metadata in |
| units of diopters (1/meter), so `0.0f` represents focusing at infinity, |
| and increasing positive numbers represent focusing closer and closer |
| to the camera device. The focus distance control also uses diopters |
| on these devices. |
| |
| UNCALIBRATED devices do not use units that are directly comparable |
| to any real physical measurement, but `0.0f` still represents farthest |
| focus, and android.lens.info.minimumFocusDistance represents the |
| nearest focus the device can achieve. |
| </details> |
| <hal_details> |
| For devices advertise APPROXIMATE quality or higher, diopters 0 (infinity |
| focus) must work. When autofocus is disabled (android.control.afMode == OFF) |
| and the lens focus distance is set to 0 diopters |
| (android.lens.focusDistance == 0), the lens will move to focus at infinity |
| and is stably focused at infinity even if the device tilts. It may take the |
| lens some time to move; during the move the lens state should be MOVING and |
| the output diopter value should be changing toward 0. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| </namespace> |
| <entry name="facing" type="byte" visibility="public" enum="true" hwlevel="legacy"> |
| <enum> |
| <value>FRONT |
| <notes> |
| The camera device faces the same direction as the device's screen. |
| </notes></value> |
| <value>BACK |
| <notes> |
| The camera device faces the opposite direction as the device's screen. |
| </notes></value> |
| <value>EXTERNAL |
| <notes> |
| The camera device is an external camera, and has no fixed facing relative to the |
| device's screen. |
| </notes></value> |
| </enum> |
| <description>Direction the camera faces relative to |
| device screen.</description> |
| </entry> |
| <entry name="poseRotation" type="float" visibility="public" |
| container="array" permission_needed="true"> |
| <array> |
| <size>4</size> |
| </array> |
| <description> |
| The orientation of the camera relative to the sensor |
| coordinate system. |
| </description> |
| <units> |
| Quaternion coefficients |
| </units> |
| <details> |
| The four coefficients that describe the quaternion |
| rotation from the Android sensor coordinate system to a |
| camera-aligned coordinate system where the X-axis is |
| aligned with the long side of the image sensor, the Y-axis |
| is aligned with the short side of the image sensor, and |
| the Z-axis is aligned with the optical axis of the sensor. |
| |
| To convert from the quaternion coefficients `(x,y,z,w)` |
| to the axis of rotation `(a_x, a_y, a_z)` and rotation |
| amount `theta`, the following formulas can be used: |
| |
| theta = 2 * acos(w) |
| a_x = x / sin(theta/2) |
| a_y = y / sin(theta/2) |
| a_z = z / sin(theta/2) |
| |
| To create a 3x3 rotation matrix that applies the rotation |
| defined by this quaternion, the following matrix can be |
| used: |
| |
| R = [ 1 - 2y^2 - 2z^2, 2xy - 2zw, 2xz + 2yw, |
| 2xy + 2zw, 1 - 2x^2 - 2z^2, 2yz - 2xw, |
| 2xz - 2yw, 2yz + 2xw, 1 - 2x^2 - 2y^2 ] |
| |
| This matrix can then be used to apply the rotation to a |
| column vector point with |
| |
| `p' = Rp` |
| |
| where `p` is in the device sensor coordinate system, and |
| `p'` is in the camera-oriented coordinate system. |
| |
| If android.lens.poseReference is UNDEFINED, the quaternion rotation cannot |
| be accurately represented by the camera device, and will be represented by |
| default values matching its default facing. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="poseTranslation" type="float" visibility="public" |
| container="array" permission_needed="true"> |
| <array> |
| <size>3</size> |
| </array> |
| <description>Position of the camera optical center.</description> |
| <units>Meters</units> |
| <details> |
| The position of the camera device's lens optical center, |
| as a three-dimensional vector `(x,y,z)`. |
| |
| Prior to Android P, or when android.lens.poseReference is PRIMARY_CAMERA, this position |
| is relative to the optical center of the largest camera device facing in the same |
| direction as this camera, in the {@link android.hardware.SensorEvent Android sensor |
| coordinate axes}. Note that only the axis definitions are shared with the sensor |
| coordinate system, but not the origin. |
| |
| If this device is the largest or only camera device with a given facing, then this |
| position will be `(0, 0, 0)`; a camera device with a lens optical center located 3 cm |
| from the main sensor along the +X axis (to the right from the user's perspective) will |
| report `(0.03, 0, 0)`. Note that this means that, for many computer vision |
| applications, the position needs to be negated to convert it to a translation from the |
| camera to the origin. |
| |
| To transform a pixel coordinates between two cameras facing the same direction, first |
| the source camera android.lens.distortion must be corrected for. Then the source |
| camera android.lens.intrinsicCalibration needs to be applied, followed by the |
| android.lens.poseRotation of the source camera, the translation of the source camera |
| relative to the destination camera, the android.lens.poseRotation of the destination |
| camera, and finally the inverse of android.lens.intrinsicCalibration of the destination |
| camera. This obtains a radial-distortion-free coordinate in the destination camera pixel |
| coordinates. |
| |
| To compare this against a real image from the destination camera, the destination camera |
| image then needs to be corrected for radial distortion before comparison or sampling. |
| |
| When android.lens.poseReference is GYROSCOPE, then this position is relative to |
| the center of the primary gyroscope on the device. The axis definitions are the same as |
| with PRIMARY_CAMERA. |
| |
| When android.lens.poseReference is UNDEFINED, this position cannot be accurately |
| represented by the camera device, and will be represented as `(0, 0, 0)`. |
| |
| When android.lens.poseReference is AUTOMOTIVE, then this position is relative to the |
| origin of the automotive sensor coordinate system, which is at the center of the rear |
| axle. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.lens.aperture" kind="controls"> |
| <tag id="V1" /> |
| </clone> |
| <clone entry="android.lens.filterDensity" kind="controls"> |
| <tag id="V1" /> |
| </clone> |
| <clone entry="android.lens.focalLength" kind="controls"> |
| <tag id="BC" /> |
| </clone> |
| <clone entry="android.lens.focusDistance" kind="controls"> |
| <details>Should be zero for fixed-focus cameras</details> |
| <tag id="BC" /> |
| </clone> |
| <entry name="focusRange" type="float" visibility="public" |
| type_notes="Range of scene distances that are in focus" |
| container="array" typedef="pairFloatFloat" hwlevel="limited"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>The range of scene distances that are in |
| sharp focus (depth of field).</description> |
| <units>A pair of focus distances in diopters: (near, |
| far); see android.lens.info.focusDistanceCalibration for details.</units> |
| <range>&gt;=0</range> |
| <details>If variable focus not supported, can still report |
| fixed depth of field range</details> |
| <tag id="BC" /> |
| </entry> |
| <clone entry="android.lens.opticalStabilizationMode" |
| kind="controls"> |
| <tag id="V1" /> |
| </clone> |
| <entry name="state" type="byte" visibility="public" enum="true" hwlevel="limited"> |
| <enum> |
| <value>STATIONARY |
| <notes> |
| The lens parameters (android.lens.focalLength, android.lens.focusDistance, |
| android.lens.filterDensity and android.lens.aperture) are not changing. |
| </notes> |
| </value> |
| <value>MOVING |
| <notes> |
| One or several of the lens parameters |
| (android.lens.focalLength, android.lens.focusDistance, |
| android.lens.filterDensity or android.lens.aperture) is |
| currently changing. |
| </notes> |
| </value> |
| </enum> |
| <description>Current lens status.</description> |
| <details> |
| For lens parameters android.lens.focalLength, android.lens.focusDistance, |
| android.lens.filterDensity and android.lens.aperture, when changes are requested, |
| they may take several frames to reach the requested values. This state indicates |
| the current status of the lens parameters. |
| |
| When the state is STATIONARY, the lens parameters are not changing. This could be |
| either because the parameters are all fixed, or because the lens has had enough |
| time to reach the most recently-requested values. |
| If all these lens parameters are not changeable for a camera device, as listed below: |
| |
| * Fixed focus (`android.lens.info.minimumFocusDistance == 0`), which means |
| android.lens.focusDistance parameter will always be 0. |
| * Fixed focal length (android.lens.info.availableFocalLengths contains single value), |
| which means the optical zoom is not supported. |
| * No ND filter (android.lens.info.availableFilterDensities contains only 0). |
| * Fixed aperture (android.lens.info.availableApertures contains single value). |
| |
| Then this state will always be STATIONARY. |
| |
| When the state is MOVING, it indicates that at least one of the lens parameters |
| is changing. |
| </details> |
| <tag id="V1" /> |
| </entry> |
| <clone entry="android.lens.poseRotation" kind="static"> |
| </clone> |
| <clone entry="android.lens.poseTranslation" kind="static"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="intrinsicCalibration" type="float" visibility="public" |
| container="array" permission_needed="true"> |
| <array> |
| <size>5</size> |
| </array> |
| <description> |
| The parameters for this camera device's intrinsic |
| calibration. |
| </description> |
| <units> |
| Pixels in the |
| android.sensor.info.preCorrectionActiveArraySize |
| coordinate system. |
| </units> |
| <details> |
| The five calibration parameters that describe the |
| transform from camera-centric 3D coordinates to sensor |
| pixel coordinates: |
| |
| [f_x, f_y, c_x, c_y, s] |
| |
| Where `f_x` and `f_y` are the horizontal and vertical |
| focal lengths, `[c_x, c_y]` is the position of the optical |
| axis, and `s` is a skew parameter for the sensor plane not |
| being aligned with the lens plane. |
| |
| These are typically used within a transformation matrix K: |
| |
| K = [ f_x, s, c_x, |
| 0, f_y, c_y, |
| 0 0, 1 ] |
| |
| which can then be combined with the camera pose rotation |
| `R` and translation `t` (android.lens.poseRotation and |
| android.lens.poseTranslation, respectively) to calculate the |
| complete transform from world coordinates to pixel |
| coordinates: |
| |
| P = [ K 0 * [ R -Rt |
| 0 1 ] 0 1 ] |
| |
| (Note the negation of poseTranslation when mapping from camera |
| to world coordinates, and multiplication by the rotation). |
| |
| With `p_w` being a point in the world coordinate system |
| and `p_s` being a point in the camera active pixel array |
| coordinate system, and with the mapping including the |
| homogeneous division by z: |
| |
| p_h = (x_h, y_h, z_h) = P p_w |
| p_s = p_h / z_h |
| |
| so `[x_s, y_s]` is the pixel coordinates of the world |
| point, `z_s = 1`, and `w_s` is a measurement of disparity |
| (depth) in pixel coordinates. |
| |
| Note that the coordinate system for this transform is the |
| android.sensor.info.preCorrectionActiveArraySize system, |
| where `(0,0)` is the top-left of the |
| preCorrectionActiveArraySize rectangle. Once the pose and |
| intrinsic calibration transforms have been applied to a |
| world point, then the android.lens.distortion |
| transform needs to be applied, and the result adjusted to |
| be in the android.sensor.info.activeArraySize coordinate |
| system (where `(0, 0)` is the top-left of the |
| activeArraySize rectangle), to determine the final pixel |
| coordinate of the world point for processed (non-RAW) |
| output buffers. |
| |
| For camera devices, the center of pixel `(x,y)` is located at |
| coordinate `(x + 0.5, y + 0.5)`. So on a device with a |
| precorrection active array of size `(10,10)`, the valid pixel |
| indices go from `(0,0)-(9,9)`, and an perfectly-built camera would |
| have an optical center at the exact center of the pixel grid, at |
| coordinates `(5.0, 5.0)`, which is the top-left corner of pixel |
| `(5,5)`. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="radialDistortion" type="float" visibility="public" |
| deprecated="true" container="array" permission_needed="true"> |
| <array> |
| <size>6</size> |
| </array> |
| <description> |
| The correction coefficients to correct for this camera device's |
| radial and tangential lens distortion. |
| </description> |
| <deprecation_description> |
| This field was inconsistently defined in terms of its |
| normalization. Use android.lens.distortion instead. |
| </deprecation_description> |
| <units> |
| Unitless coefficients. |
| </units> |
| <details> |
| Four radial distortion coefficients `[kappa_0, kappa_1, kappa_2, |
| kappa_3]` and two tangential distortion coefficients |
| `[kappa_4, kappa_5]` that can be used to correct the |
| lens's geometric distortion with the mapping equations: |
| |
| x_c = x_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) + |
| kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 ) |
| y_c = y_i * ( kappa_0 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) + |
| kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 ) |
| |
| Here, `[x_c, y_c]` are the coordinates to sample in the |
| input image that correspond to the pixel values in the |
| corrected image at the coordinate `[x_i, y_i]`: |
| |
| correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage) |
| |
| The pixel coordinates are defined in a normalized |
| coordinate system related to the |
| android.lens.intrinsicCalibration calibration fields. |
| Both `[x_i, y_i]` and `[x_c, y_c]` have `(0,0)` at the |
| lens optical center `[c_x, c_y]`. The maximum magnitudes |
| of both x and y coordinates are normalized to be 1 at the |
| edge further from the optical center, so the range |
| for both dimensions is `-1 <= x <= 1`. |
| |
| Finally, `r` represents the radial distance from the |
| optical center, `r^2 = x_i^2 + y_i^2`, and its magnitude |
| is therefore no larger than `|r| <= sqrt(2)`. |
| |
| The distortion model used is the Brown-Conrady model. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.lens.intrinsicCalibration" kind="static"> |
| </clone> |
| <clone entry="android.lens.radialDistortion" kind="static"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="poseReference" type="byte" visibility="public" enum="true" |
| permission_needed="true" hal_version="3.3" > |
| <enum> |
| <value>PRIMARY_CAMERA |
| <notes>The value of android.lens.poseTranslation is relative to the optical center of |
| the largest camera device facing the same direction as this camera. |
| |
| This is the default value for API levels before Android P. |
| </notes> |
| </value> |
| <value>GYROSCOPE |
| <notes>The value of android.lens.poseTranslation is relative to the position of the |
| primary gyroscope of this Android device. |
| </notes> |
| </value> |
| <value hal_version="3.5">UNDEFINED |
| <notes>The camera device cannot represent the values of android.lens.poseTranslation |
| and android.lens.poseRotation accurately enough. One such example is a camera device |
| on the cover of a foldable phone: in order to measure the pose translation and rotation, |
| some kind of hinge position sensor would be needed. |
| |
| The value of android.lens.poseTranslation must be all zeros, and |
| android.lens.poseRotation must be values matching its default facing. |
| </notes> |
| </value> |
| <value hal_version="3.8">AUTOMOTIVE |
| <notes>The value of android.lens.poseTranslation is relative to the origin of the |
| automotive sensor coordinate system, which is at the center of the rear axle. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| The origin for android.lens.poseTranslation, and the accuracy of |
| android.lens.poseTranslation and android.lens.poseRotation. |
| </description> |
| <details> |
| Different calibration methods and use cases can produce better or worse results |
| depending on the selected coordinate origin. |
| </details> |
| </entry> |
| <entry name="distortion" type="float" visibility="public" container="array" |
| permission_needed="true" hal_version="3.3" > |
| <array> |
| <size>5</size> |
| </array> |
| <description> |
| The correction coefficients to correct for this camera device's |
| radial and tangential lens distortion. |
| |
| Replaces the deprecated android.lens.radialDistortion field, which was |
| inconsistently defined. |
| </description> |
| <units> |
| Unitless coefficients. |
| </units> |
| <details> |
| Three radial distortion coefficients `[kappa_1, kappa_2, |
| kappa_3]` and two tangential distortion coefficients |
| `[kappa_4, kappa_5]` that can be used to correct the |
| lens's geometric distortion with the mapping equations: |
| |
| x_c = x_i * ( 1 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) + |
| kappa_4 * (2 * x_i * y_i) + kappa_5 * ( r^2 + 2 * x_i^2 ) |
| y_c = y_i * ( 1 + kappa_1 * r^2 + kappa_2 * r^4 + kappa_3 * r^6 ) + |
| kappa_5 * (2 * x_i * y_i) + kappa_4 * ( r^2 + 2 * y_i^2 ) |
| |
| Here, `[x_c, y_c]` are the coordinates to sample in the |
| input image that correspond to the pixel values in the |
| corrected image at the coordinate `[x_i, y_i]`: |
| |
| correctedImage(x_i, y_i) = sample_at(x_c, y_c, inputImage) |
| |
| The pixel coordinates are defined in a coordinate system |
| related to the android.lens.intrinsicCalibration |
| calibration fields; see that entry for details of the mapping stages. |
| Both `[x_i, y_i]` and `[x_c, y_c]` |
| have `(0,0)` at the lens optical center `[c_x, c_y]`, and |
| the range of the coordinates depends on the focal length |
| terms of the intrinsic calibration. |
| |
| Finally, `r` represents the radial distance from the |
| optical center, `r^2 = x_i^2 + y_i^2`. |
| |
| The distortion model used is the Brown-Conrady model. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="distortionMaximumResolution" type="float" visibility="public" container="array" |
| permission_needed="true" hal_version="3.6" > |
| <array> |
| <size>5</size> |
| </array> |
| <description> |
| The correction coefficients to correct for this camera device's |
| radial and tangential lens distortion for a |
| CaptureRequest with android.sensor.pixelMode set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units> |
| Unitless coefficients. |
| </units> |
| <details> |
| Analogous to android.lens.distortion, when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="intrinsicCalibrationMaximumResolution" type="float" visibility="public" |
| container="array" permission_needed="true" hal_version="3.6"> |
| <array> |
| <size>5</size> |
| </array> |
| <description> |
| The parameters for this camera device's intrinsic |
| calibration when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units> |
| Pixels in the |
| android.sensor.info.preCorrectionActiveArraySizeMaximumResolution |
| coordinate system. |
| </units> |
| <details> |
| Analogous to android.lens.intrinsicCalibration, when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.lens.distortion" kind="static"> |
| </clone> |
| </dynamic> |
| </section> |
| <section name="noiseReduction"> |
| <controls> |
| <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full"> |
| <enum> |
| <value>OFF |
| <notes>No noise reduction is applied.</notes></value> |
| <value>FAST |
| <notes>Noise reduction is applied without reducing frame rate relative to sensor |
| output. It may be the same as OFF if noise reduction will reduce frame rate |
| relative to sensor.</notes></value> |
| <value>HIGH_QUALITY |
| <notes>High-quality noise reduction is applied, at the cost of possibly reduced frame |
| rate relative to sensor output.</notes></value> |
| <value optional="true">MINIMAL |
| <notes>MINIMAL noise reduction is applied without reducing frame rate relative to |
| sensor output. </notes></value> |
| <value optional="true">ZERO_SHUTTER_LAG |
| |
| <notes>Noise reduction is applied at different levels for different output streams, |
| based on resolution. Streams at maximum recording resolution (see {@link |
| android.hardware.camera2.CameraDevice#createCaptureSession|ACameraDevice_createCaptureSession}) |
| or below have noise reduction applied, while higher-resolution streams have MINIMAL (if |
| supported) or no noise reduction applied (if MINIMAL is not supported.) The degree of |
| noise reduction for low-resolution streams is tuned so that frame rate is not impacted, |
| and the quality is equal to or better than FAST (since it is only applied to |
| lower-resolution outputs, quality may improve from FAST). |
| |
| This mode is intended to be used by applications operating in a zero-shutter-lag mode |
| with YUV or PRIVATE reprocessing, where the application continuously captures |
| high-resolution intermediate buffers into a circular buffer, from which a final image is |
| produced via reprocessing when a user takes a picture. For such a use case, the |
| high-resolution buffers must not have noise reduction applied to maximize efficiency of |
| preview and to avoid over-applying noise filtering when reprocessing, while |
| low-resolution buffers (used for recording or preview, generally) need noise reduction |
| applied for reasonable preview quality. |
| |
| This mode is guaranteed to be supported by devices that support either the |
| YUV_REPROCESSING or PRIVATE_REPROCESSING capabilities |
| (android.request.availableCapabilities lists either of those capabilities) and it will |
| be the default mode for CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template. |
| </notes></value> |
| </enum> |
| <description>Mode of operation for the noise reduction algorithm.</description> |
| <range>android.noiseReduction.availableNoiseReductionModes</range> |
| <details>The noise reduction algorithm attempts to improve image quality by removing |
| excessive noise added by the capture process, especially in dark conditions. |
| |
| OFF means no noise reduction will be applied by the camera device, for both raw and |
| YUV domain. |
| |
| MINIMAL means that only sensor raw domain basic noise reduction is enabled ,to remove |
| demosaicing or other processing artifacts. For YUV_REPROCESSING, MINIMAL is same as OFF. |
| This mode is optional, may not be support by all devices. The application should check |
| android.noiseReduction.availableNoiseReductionModes before using it. |
| |
| FAST/HIGH_QUALITY both mean camera device determined noise filtering |
| will be applied. HIGH_QUALITY mode indicates that the camera device |
| will use the highest-quality noise filtering algorithms, |
| even if it slows down capture rate. FAST means the camera device will not |
| slow down capture rate when applying noise filtering. FAST may be the same as MINIMAL if |
| MINIMAL is listed, or the same as OFF if any noise filtering will slow down capture rate. |
| Every output stream will have a similar amount of enhancement applied. |
| |
| ZERO_SHUTTER_LAG is meant to be used by applications that maintain a continuous circular |
| buffer of high-resolution images during preview and reprocess image(s) from that buffer |
| into a final capture when triggered by the user. In this mode, the camera device applies |
| noise reduction to low-resolution streams (below maximum recording resolution) to maximize |
| preview quality, but does not apply noise reduction to high-resolution streams, since |
| those will be reprocessed later if necessary. |
| |
| For YUV_REPROCESSING, these FAST/HIGH_QUALITY modes both mean that the camera device |
| will apply FAST/HIGH_QUALITY YUV domain noise reduction, respectively. The camera device |
| may adjust the noise reduction parameters for best image quality based on the |
| android.reprocess.effectiveExposureFactor if it is set. |
| </details> |
| <hal_details> |
| For YUV_REPROCESSING The HAL can use android.reprocess.effectiveExposureFactor to |
| adjust the internal noise reduction parameters appropriately to get the best quality |
| images. |
| </hal_details> |
| <tag id="V1" /> |
| <tag id="REPROC" /> |
| </entry> |
| <entry name="strength" type="byte"> |
| <description>Control the amount of noise reduction |
| applied to the images</description> |
| <units>1-10; 10 is max noise reduction</units> |
| <range>1 - 10</range> |
| <tag id="FUTURE" /> |
| </entry> |
| </controls> |
| <static> |
| <entry name="availableNoiseReductionModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="enumList" hwlevel="limited"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of noise reduction modes for android.noiseReduction.mode that are supported |
| by this camera device. |
| </description> |
| <range>Any value listed in android.noiseReduction.mode</range> |
| <details> |
| Full-capability camera devices will always support OFF and FAST. |
| |
| Camera devices that support YUV_REPROCESSING or PRIVATE_REPROCESSING will support |
| ZERO_SHUTTER_LAG. |
| |
| Legacy-capability camera devices will only support FAST mode. |
| </details> |
| <hal_details> |
| HAL must support both FAST and HIGH_QUALITY if noise reduction control is available |
| on the camera device, but the underlying implementation can be the same for both modes. |
| That is, if the highest quality implementation on the camera device does not slow down |
| capture rate, then FAST and HIGH_QUALITY will generate the same output. |
| </hal_details> |
| <tag id="V1" /> |
| <tag id="REPROC" /> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.noiseReduction.mode" kind="controls"> |
| <tag id="V1" /> |
| <tag id="REPROC" /> |
| </clone> |
| </dynamic> |
| </section> |
| <section name="quirks"> |
| <static> |
| <entry name="meteringCropRegion" type="byte" visibility="system" deprecated="true" optional="true"> |
| <description>If set to 1, the camera service does not |
| scale 'normalized' coordinates with respect to the crop |
| region. This applies to metering input (a{e,f,wb}Region |
| and output (face rectangles).</description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <details>Normalized coordinates refer to those in the |
| (-1000,1000) range mentioned in the |
| android.hardware.Camera API. |
| |
| HAL implementations should instead always use and emit |
| sensor array-relative coordinates for all region data. Does |
| not need to be listed in static metadata. Support will be |
| removed in future versions of camera service.</details> |
| </entry> |
| <entry name="triggerAfWithAuto" type="byte" visibility="system" deprecated="true" optional="true"> |
| <description>If set to 1, then the camera service always |
| switches to FOCUS_MODE_AUTO before issuing a AF |
| trigger.</description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <details>HAL implementations should implement AF trigger |
| modes for AUTO, MACRO, CONTINUOUS_FOCUS, and |
| CONTINUOUS_PICTURE modes instead of using this flag. Does |
| not need to be listed in static metadata. Support will be |
| removed in future versions of camera service</details> |
| </entry> |
| <entry name="useZslFormat" type="byte" visibility="system" deprecated="true" optional="true"> |
| <description>If set to 1, the camera service uses |
| CAMERA2_PIXEL_FORMAT_ZSL instead of |
| HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED for the zero |
| shutter lag stream</description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <details>HAL implementations should use gralloc usage flags |
| to determine that a stream will be used for |
| zero-shutter-lag, instead of relying on an explicit |
| format setting. Does not need to be listed in static |
| metadata. Support will be removed in future versions of |
| camera service.</details> |
| </entry> |
| <entry name="usePartialResult" type="byte" visibility="hidden" deprecated="true" optional="true"> |
| <description> |
| If set to 1, the HAL will always split result |
| metadata for a single capture into multiple buffers, |
| returned using multiple process_capture_result calls. |
| </description> |
| <deprecation_description> |
| Not used in HALv3 or newer; replaced by better partials mechanism |
| </deprecation_description> |
| <details> |
| Does not need to be listed in static |
| metadata. Support for partial results will be reworked in |
| future versions of camera service. This quirk will stop |
| working at that point; DO NOT USE without careful |
| consideration of future support. |
| </details> |
| <hal_details> |
| Refer to `camera3_capture_result::partial_result` |
| for information on how to implement partial results. |
| </hal_details> |
| </entry> |
| </static> |
| <dynamic> |
| <entry name="partialResult" type="byte" visibility="hidden" deprecated="true" optional="true" enum="true" typedef="boolean"> |
| <enum> |
| <value>FINAL |
| <notes>The last or only metadata result buffer |
| for this capture.</notes> |
| </value> |
| <value>PARTIAL |
| <notes>A partial buffer of result metadata for this |
| capture. More result buffers for this capture will be sent |
| by the camera device, the last of which will be marked |
| FINAL.</notes> |
| </value> |
| </enum> |
| <description> |
| Whether a result given to the framework is the |
| final one for the capture, or only a partial that contains a |
| subset of the full set of dynamic metadata |
| values.</description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <range>Optional. Default value is FINAL.</range> |
| <details> |
| The entries in the result metadata buffers for a |
| single capture may not overlap, except for this entry. The |
| FINAL buffers must retain FIFO ordering relative to the |
| requests that generate them, so the FINAL buffer for frame 3 must |
| always be sent to the framework after the FINAL buffer for frame 2, and |
| before the FINAL buffer for frame 4. PARTIAL buffers may be returned |
| in any order relative to other frames, but all PARTIAL buffers for a given |
| capture must arrive before the FINAL buffer for that capture. This entry may |
| only be used by the camera device if quirks.usePartialResult is set to 1. |
| </details> |
| <hal_details> |
| Refer to `camera3_capture_result::partial_result` |
| for information on how to implement partial results. |
| </hal_details> |
| </entry> |
| </dynamic> |
| </section> |
| <section name="request"> |
| <controls> |
| <entry name="frameCount" type="int32" visibility="system" deprecated="true"> |
| <description>A frame counter set by the framework. Must |
| be maintained unchanged in output frame. This value monotonically |
| increases with every new result (that is, each new result has a unique |
| frameCount value). |
| </description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <units>incrementing integer</units> |
| <range>Any int.</range> |
| </entry> |
| <entry name="id" type="int32" visibility="hidden"> |
| <description>An application-specified ID for the current |
| request. Must be maintained unchanged in output |
| frame</description> |
| <units>arbitrary integer assigned by application</units> |
| <range>Any int</range> |
| <tag id="V1" /> |
| </entry> |
| <entry name="inputStreams" type="int32" visibility="system" deprecated="true" |
| container="array"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>List which camera reprocess stream is used |
| for the source of reprocessing data.</description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <units>List of camera reprocess stream IDs</units> |
| <range> |
| Typically, only one entry allowed, must be a valid reprocess stream ID. |
| </range> |
| <details>Only meaningful when android.request.type == |
| REPROCESS. Ignored otherwise</details> |
| <tag id="HAL2" /> |
| </entry> |
| <entry name="metadataMode" type="byte" visibility="system" |
| enum="true"> |
| <enum> |
| <value>NONE |
| <notes>No metadata should be produced on output, except |
| for application-bound buffer data. If no |
| application-bound streams exist, no frame should be |
| placed in the output frame queue. If such streams |
| exist, a frame should be placed on the output queue |
| with null metadata but with the necessary output buffer |
| information. Timestamp information should still be |
| included with any output stream buffers</notes></value> |
| <value>FULL |
| <notes>All metadata should be produced. Statistics will |
| only be produced if they are separately |
| enabled</notes></value> |
| </enum> |
| <description>How much metadata to produce on |
| output</description> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="outputStreams" type="int32" visibility="system" deprecated="true" |
| container="array"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>Lists which camera output streams image data |
| from this capture must be sent to</description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <units>List of camera stream IDs</units> |
| <range>List must only include streams that have been |
| created</range> |
| <details>If no output streams are listed, then the image |
| data should simply be discarded. The image data must |
| still be captured for metadata and statistics production, |
| and the lens and flash must operate as requested.</details> |
| <tag id="HAL2" /> |
| </entry> |
| <entry name="type" type="byte" visibility="system" deprecated="true" enum="true"> |
| <enum> |
| <value>CAPTURE |
| <notes>Capture a new image from the imaging hardware, |
| and process it according to the |
| settings</notes></value> |
| <value>REPROCESS |
| <notes>Process previously captured data; the |
| android.request.inputStreams parameter determines the |
| source reprocessing stream. TODO: Mark dynamic metadata |
| needed for reprocessing with [RP]</notes></value> |
| </enum> |
| <description>The type of the request; either CAPTURE or |
| REPROCESS. For legacy HAL3, this tag is redundant. |
| </description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <tag id="HAL2" /> |
| </entry> |
| </controls> |
| <static> |
| <entry name="maxNumOutputStreams" type="int32" visibility="ndk_public" |
| container="array" hwlevel="legacy"> |
| <array> |
| <size>3</size> |
| </array> |
| <description>The maximum numbers of different types of output streams |
| that can be configured and used simultaneously by a camera device. |
| </description> |
| <range> |
| For processed (and stalling) format streams, &gt;= 1. |
| |
| For Raw format (either stalling or non-stalling) streams, &gt;= 0. |
| |
| For processed (but not stalling) format streams, &gt;= 3 |
| for FULL mode devices (`android.info.supportedHardwareLevel == FULL`); |
| &gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`). |
| </range> |
| <details> |
| This is a 3 element tuple that contains the max number of output simultaneous |
| streams for raw sensor, processed (but not stalling), and processed (and stalling) |
| formats respectively. For example, assuming that JPEG is typically a processed and |
| stalling stream, if max raw sensor format output stream number is 1, max YUV streams |
| number is 3, and max JPEG stream number is 2, then this tuple should be `(1, 3, 2)`. |
| |
| This lists the upper bound of the number of output streams supported by |
| the camera device. Using more streams simultaneously may require more hardware and |
| CPU resources that will consume more power. The image format for an output stream can |
| be any supported format provided by android.scaler.availableStreamConfigurations. |
| The formats defined in android.scaler.availableStreamConfigurations can be categorized |
| into the 3 stream types as below: |
| |
| * Processed (but stalling): any non-RAW format with a stallDurations &gt; 0. |
| Typically {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG format}. |
| * Raw formats: {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16 |
| RAW_SENSOR}, {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10 RAW10}, or |
| {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12 RAW12}. |
| * Processed (but not-stalling): any non-RAW format without a stall duration. Typically |
| {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888}, |
| {@link android.graphics.ImageFormat#NV21 NV21}, {@link |
| android.graphics.ImageFormat#YV12 YV12}, or {@link |
| android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8 Y8} . |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="maxNumOutputRaw" type="int32" visibility="java_public" synthetic="true" |
| hwlevel="legacy"> |
| <description>The maximum numbers of different types of output streams |
| that can be configured and used simultaneously by a camera device |
| for any `RAW` formats. |
| </description> |
| <range> |
| &gt;= 0 |
| </range> |
| <details> |
| This value contains the max number of output simultaneous |
| streams from the raw sensor. |
| |
| This lists the upper bound of the number of output streams supported by |
| the camera device. Using more streams simultaneously may require more hardware and |
| CPU resources that will consume more power. The image format for this kind of an output stream can |
| be any `RAW` and supported format provided by android.scaler.streamConfigurationMap. |
| |
| In particular, a `RAW` format is typically one of: |
| |
| * {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16 RAW_SENSOR} |
| * {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10 RAW10} |
| * {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12 RAW12} |
| |
| LEGACY mode devices (android.info.supportedHardwareLevel `==` LEGACY) |
| never support raw streams. |
| </details> |
| </entry> |
| <entry name="maxNumOutputProc" type="int32" visibility="java_public" synthetic="true" |
| hwlevel="legacy"> |
| <description>The maximum numbers of different types of output streams |
| that can be configured and used simultaneously by a camera device |
| for any processed (but not-stalling) formats. |
| </description> |
| <range> |
| &gt;= 3 |
| for FULL mode devices (`android.info.supportedHardwareLevel == FULL`); |
| &gt;= 2 for LIMITED mode devices (`android.info.supportedHardwareLevel == LIMITED`). |
| </range> |
| <details> |
| This value contains the max number of output simultaneous |
| streams for any processed (but not-stalling) formats. |
| |
| This lists the upper bound of the number of output streams supported by |
| the camera device. Using more streams simultaneously may require more hardware and |
| CPU resources that will consume more power. The image format for this kind of an output stream can |
| be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap. |
| |
| Processed (but not-stalling) is defined as any non-RAW format without a stall duration. |
| Typically: |
| |
| * {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888} |
| * {@link android.graphics.ImageFormat#NV21 NV21} |
| * {@link android.graphics.ImageFormat#YV12 YV12} |
| * Implementation-defined formats, i.e. {@link |
| android.hardware.camera2.params.StreamConfigurationMap#isOutputSupportedFor(Class)} |
| * {@link android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8 Y8} |
| |
| For full guarantees, query {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a |
| processed format -- it will return 0 for a non-stalling stream. |
| |
| LEGACY devices will support at least 2 processing/non-stalling streams. |
| </details> |
| </entry> |
| <entry name="maxNumOutputProcStalling" type="int32" visibility="java_public" synthetic="true" |
| hwlevel="legacy"> |
| <description>The maximum numbers of different types of output streams |
| that can be configured and used simultaneously by a camera device |
| for any processed (and stalling) formats. |
| </description> |
| <range> |
| &gt;= 1 |
| </range> |
| <details> |
| This value contains the max number of output simultaneous |
| streams for any processed (but not-stalling) formats. |
| |
| This lists the upper bound of the number of output streams supported by |
| the camera device. Using more streams simultaneously may require more hardware and |
| CPU resources that will consume more power. The image format for this kind of an output stream can |
| be any non-`RAW` and supported format provided by android.scaler.streamConfigurationMap. |
| |
| A processed and stalling format is defined as any non-RAW format with a stallDurations |
| &gt; 0. Typically only the {@link |
| android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG format} is a stalling format. |
| |
| For full guarantees, query {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration} with a |
| processed format -- it will return a non-0 value for a stalling stream. |
| |
| LEGACY devices will support up to 1 processing/stalling stream. |
| </details> |
| </entry> |
| <entry name="maxNumReprocessStreams" type="int32" visibility="system" |
| deprecated="true" container="array"> |
| <array> |
| <size>1</size> |
| </array> |
| <description>How many reprocessing streams of any type |
| can be allocated at the same time.</description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <range>&gt;= 0</range> |
| <details> |
| Only used by HAL2.x. |
| |
| When set to 0, it means no reprocess stream is supported. |
| </details> |
| <tag id="HAL2" /> |
| </entry> |
| <entry name="maxNumInputStreams" type="int32" visibility="java_public" hwlevel="full"> |
| <description> |
| The maximum numbers of any type of input streams |
| that can be configured and used simultaneously by a camera device. |
| </description> |
| <range> |
| 0 or 1. |
| </range> |
| <details>When set to 0, it means no input stream is supported. |
| |
| The image format for a input stream can be any supported format returned by {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. When using an |
| input stream, there must be at least one output stream configured to to receive the |
| reprocessed images. |
| |
| When an input stream and some output streams are used in a reprocessing request, |
| only the input buffer will be used to produce these output stream buffers, and a |
| new sensor image will not be captured. |
| |
| For example, for Zero Shutter Lag (ZSL) still capture use case, the input |
| stream image format will be PRIVATE, the associated output stream image format |
| should be JPEG. |
| </details> |
| <hal_details> |
| For the reprocessing flow and controls, see |
| hardware/libhardware/include/hardware/camera3.h Section 10 for more details. |
| </hal_details> |
| <tag id="REPROC" /> |
| </entry> |
| </static> |
| <dynamic> |
| <entry name="frameCount" type="int32" visibility="hidden" deprecated="true"> |
| <description>A frame counter set by the framework. This value monotonically |
| increases with every new result (that is, each new result has a unique |
| frameCount value).</description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <units>count of frames</units> |
| <range>&gt; 0</range> |
| <details>Reset on release()</details> |
| </entry> |
| <clone entry="android.request.id" kind="controls"></clone> |
| <clone entry="android.request.metadataMode" |
| kind="controls"></clone> |
| <clone entry="android.request.outputStreams" |
| kind="controls"></clone> |
| <entry name="pipelineDepth" type="byte" visibility="public" hwlevel="legacy"> |
| <description>Specifies the number of pipeline stages the frame went |
| through from when it was exposed to when the final completed result |
| was available to the framework.</description> |
| <range>&lt;= android.request.pipelineMaxDepth</range> |
| <details>Depending on what settings are used in the request, and |
| what streams are configured, the data may undergo less processing, |
| and some pipeline stages skipped. |
| |
| See android.request.pipelineMaxDepth for more details. |
| </details> |
| <hal_details> |
| This value must always represent the accurate count of how many |
| pipeline stages were actually used. |
| </hal_details> |
| </entry> |
| </dynamic> |
| <static> |
| <entry name="pipelineMaxDepth" type="byte" visibility="public" hwlevel="legacy"> |
| <description>Specifies the number of maximum pipeline stages a frame |
| has to go through from when it's exposed to when it's available |
| to the framework.</description> |
| <details>A typical minimum value for this is 2 (one stage to expose, |
| one stage to readout) from the sensor. The ISP then usually adds |
| its own stages to do custom HW processing. Further stages may be |
| added by SW processing. |
| |
| Depending on what settings are used (e.g. YUV, JPEG) and what |
| processing is enabled (e.g. face detection), the actual pipeline |
| depth (specified by android.request.pipelineDepth) may be less than |
| the max pipeline depth. |
| |
| A pipeline depth of X stages is equivalent to a pipeline latency of |
| X frame intervals. |
| |
| This value will normally be 8 or less, however, for high speed capture session, |
| the max pipeline depth will be up to 8 x size of high speed capture request list. |
| </details> |
| <hal_details> |
| This value should be 4 or less, expect for the high speed recording session, where the |
| max batch sizes may be larger than 1. |
| </hal_details> |
| </entry> |
| <entry name="partialResultCount" type="int32" visibility="public" optional="true"> |
| <description>Defines how many sub-components |
| a result will be composed of. |
| </description> |
| <range>&gt;= 1</range> |
| <details>In order to combat the pipeline latency, partial results |
| may be delivered to the application layer from the camera device as |
| soon as they are available. |
| |
| Optional; defaults to 1. A value of 1 means that partial |
| results are not supported, and only the final TotalCaptureResult will |
| be produced by the camera device. |
| |
| A typical use case for this might be: after requesting an |
| auto-focus (AF) lock the new AF state might be available 50% |
| of the way through the pipeline. The camera device could |
| then immediately dispatch this state via a partial result to |
| the application, and the rest of the metadata via later |
| partial results. |
| </details> |
| </entry> |
| <entry name="availableCapabilities" type="byte" visibility="public" |
| enum="true" container="array" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <enum> |
| <value>BACKWARD_COMPATIBLE |
| <notes>The minimal set of capabilities that every camera |
| device (regardless of android.info.supportedHardwareLevel) |
| supports. |
| |
| This capability is listed by all normal devices, and |
| indicates that the camera device has a feature set |
| that's comparable to the baseline requirements for the |
| older android.hardware.Camera API. |
| |
| Devices with the DEPTH_OUTPUT capability might not list this |
| capability, indicating that they support only depth measurement, |
| not standard color output. |
| </notes> |
| </value> |
| <value optional="true">MANUAL_SENSOR |
| <notes> |
| The camera device can be manually controlled (3A algorithms such |
| as auto-exposure, and auto-focus can be bypassed). |
| The camera device supports basic manual control of the sensor image |
| acquisition related stages. This means the following controls are |
| guaranteed to be supported: |
| |
| * Manual frame duration control |
| * android.sensor.frameDuration |
| * android.sensor.info.maxFrameDuration |
| * Manual exposure control |
| * android.sensor.exposureTime |
| * android.sensor.info.exposureTimeRange |
| * Manual sensitivity control |
| * android.sensor.sensitivity |
| * android.sensor.info.sensitivityRange |
| * Manual lens control (if the lens is adjustable) |
| * android.lens.* |
| * Manual flash control (if a flash unit is present) |
| * android.flash.* |
| * Manual black level locking |
| * android.blackLevel.lock |
| * Auto exposure lock |
| * android.control.aeLock |
| |
| If any of the above 3A algorithms are enabled, then the camera |
| device will accurately report the values applied by 3A in the |
| result. |
| |
| A given camera device may also support additional manual sensor controls, |
| but this capability only covers the above list of controls. |
| |
| If this is supported, android.scaler.streamConfigurationMap will |
| additionally return a min frame duration that is greater than |
| zero for each supported size-format combination. |
| |
| For camera devices with LOGICAL_MULTI_CAMERA capability, when the underlying active |
| physical camera switches, exposureTime, sensitivity, and lens properties may change |
| even if AE/AF is locked. However, the overall auto exposure and auto focus experience |
| for users will be consistent. Refer to LOGICAL_MULTI_CAMERA capability for details. |
| </notes> |
| </value> |
| <value optional="true">MANUAL_POST_PROCESSING |
| <notes> |
| The camera device post-processing stages can be manually controlled. |
| The camera device supports basic manual control of the image post-processing |
| stages. This means the following controls are guaranteed to be supported: |
| |
| * Manual tonemap control |
| * android.tonemap.curve |
| * android.tonemap.mode |
| * android.tonemap.maxCurvePoints |
| * android.tonemap.gamma |
| * android.tonemap.presetCurve |
| |
| * Manual white balance control |
| * android.colorCorrection.transform |
| * android.colorCorrection.gains |
| * Manual lens shading map control |
| * android.shading.mode |
| * android.statistics.lensShadingMapMode |
| * android.statistics.lensShadingMap |
| * android.lens.info.shadingMapSize |
| * Manual aberration correction control (if aberration correction is supported) |
| * android.colorCorrection.aberrationMode |
| * android.colorCorrection.availableAberrationModes |
| * Auto white balance lock |
| * android.control.awbLock |
| |
| If auto white balance is enabled, then the camera device |
| will accurately report the values applied by AWB in the result. |
| |
| A given camera device may also support additional post-processing |
| controls, but this capability only covers the above list of controls. |
| |
| For camera devices with LOGICAL_MULTI_CAMERA capability, when underlying active |
| physical camera switches, tonemap, white balance, and shading map may change even if |
| awb is locked. However, the overall post-processing experience for users will be |
| consistent. Refer to LOGICAL_MULTI_CAMERA capability for details. |
| </notes> |
| </value> |
| <value optional="true">RAW |
| <notes> |
| The camera device supports outputting RAW buffers and |
| metadata for interpreting them. |
| |
| Devices supporting the RAW capability allow both for |
| saving DNG files, and for direct application processing of |
| raw sensor images. |
| |
| * RAW_SENSOR is supported as an output format. |
| * The maximum available resolution for RAW_SENSOR streams |
| will match either the value in |
| android.sensor.info.pixelArraySize or |
| android.sensor.info.preCorrectionActiveArraySize. |
| * All DNG-related optional metadata entries are provided |
| by the camera device. |
| </notes> |
| </value> |
| <value optional="true" visibility="java_public">PRIVATE_REPROCESSING |
| <notes> |
| The camera device supports the Zero Shutter Lag reprocessing use case. |
| |
| * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`. |
| * {@link android.graphics.ImageFormat#PRIVATE} is supported as an output/input format, |
| that is, {@link android.graphics.ImageFormat#PRIVATE} is included in the lists of |
| formats returned by {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}. |
| * {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput} |
| returns non-empty int[] for each supported input format returned by {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. |
| * Each size returned by {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getInputSizes |
| getInputSizes(ImageFormat.PRIVATE)} is also included in {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes |
| getOutputSizes(ImageFormat.PRIVATE)} |
| * Using {@link android.graphics.ImageFormat#PRIVATE} does not cause a frame rate drop |
| relative to the sensor's maximum capture rate (at that resolution). |
| * {@link android.graphics.ImageFormat#PRIVATE} will be reprocessable into both |
| {@link android.graphics.ImageFormat#YUV_420_888} and |
| {@link android.graphics.ImageFormat#JPEG} formats. |
| * For a MONOCHROME camera supporting Y8 format, {@link |
| android.graphics.ImageFormat#PRIVATE} will be reprocessable into |
| {@link android.graphics.ImageFormat#Y8}. |
| * The maximum available resolution for PRIVATE streams |
| (both input/output) will match the maximum available |
| resolution of JPEG streams. |
| * Static metadata android.reprocess.maxCaptureStall. |
| * Only below controls are effective for reprocessing requests and |
| will be present in capture results, other controls in reprocess |
| requests will be ignored by the camera device. |
| * android.jpeg.* |
| * android.noiseReduction.mode |
| * android.edge.mode |
| * android.noiseReduction.availableNoiseReductionModes and |
| android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode. |
| </notes> |
| </value> |
| <value optional="true">READ_SENSOR_SETTINGS |
| <notes> |
| The camera device supports accurately reporting the sensor settings for many of |
| the sensor controls while the built-in 3A algorithm is running. This allows |
| reporting of sensor settings even when these settings cannot be manually changed. |
| |
| The values reported for the following controls are guaranteed to be available |
| in the CaptureResult, including when 3A is enabled: |
| |
| * Exposure control |
| * android.sensor.exposureTime |
| * Sensitivity control |
| * android.sensor.sensitivity |
| * Lens controls (if the lens is adjustable) |
| * android.lens.focusDistance |
| * android.lens.aperture |
| |
| This capability is a subset of the MANUAL_SENSOR control capability, and will |
| always be included if the MANUAL_SENSOR capability is available. |
| </notes> |
| </value> |
| <value optional="true">BURST_CAPTURE |
| <notes> |
| The camera device supports capturing high-resolution images at >= 20 frames per |
| second, in at least the uncompressed YUV format, when post-processing settings are |
| set to FAST. Additionally, all image resolutions less than 24 megapixels can be |
| captured at >= 10 frames per second. Here, 'high resolution' means at least 8 |
| megapixels, or the maximum resolution of the device, whichever is smaller. |
| </notes> |
| <sdk_notes> |
| More specifically, this means that a size matching the camera device's active array |
| size is listed as a supported size for the {@link |
| android.graphics.ImageFormat#YUV_420_888} format in either {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} or {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes}, |
| with a minimum frame duration for that format and size of either <= 1/20 s, or |
| <= 1/10 s if the image size is less than 24 megapixels, respectively; and |
| the android.control.aeAvailableTargetFpsRanges entry lists at least one FPS range |
| where the minimum FPS is >= 1 / minimumFrameDuration for the maximum-size |
| YUV_420_888 format. If that maximum size is listed in {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getHighResolutionOutputSizes}, |
| then the list of resolutions for YUV_420_888 from {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} contains at |
| least one resolution >= 8 megapixels, with a minimum frame duration of <= 1/20 |
| s. |
| |
| If the device supports the {@link |
| android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10}, {@link |
| android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12}, {@link |
| android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8}, then those can also be |
| captured at the same rate as the maximum-size YUV_420_888 resolution is. |
| |
| If the device supports the PRIVATE_REPROCESSING capability, then the same guarantees |
| as for the YUV_420_888 format also apply to the {@link |
| android.graphics.ImageFormat#PRIVATE} format. |
| |
| In addition, the android.sync.maxLatency field is guaranteed to have a value between 0 |
| and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable |
| are also guaranteed to be `true` so burst capture with these two locks ON yields |
| consistent image output. |
| </sdk_notes> |
| <ndk_notes> |
| More specifically, this means that at least one output {@link |
| android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} size listed in |
| {@link |
| android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} |
| is larger or equal to the 'high resolution' defined above, and can be captured at at |
| least 20 fps. For the largest {@link |
| android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} size listed in |
| {@link |
| android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS}, |
| camera device can capture this size for at least 10 frames per second if the size is |
| less than 24 megapixels. Also the android.control.aeAvailableTargetFpsRanges entry |
| lists at least one FPS range where the minimum FPS is >= 1 / minimumFrameDuration |
| for the largest YUV_420_888 size. |
| |
| If the device supports the {@link |
| android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10}, {@link |
| android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12}, {@link |
| android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8}, then those can also be |
| captured at the same rate as the maximum-size YUV_420_888 resolution is. |
| |
| In addition, the android.sync.maxLatency field is guaranteed to have a value between 0 |
| and 4, inclusive. android.control.aeLockAvailable and android.control.awbLockAvailable |
| are also guaranteed to be `true` so burst capture with these two locks ON yields |
| consistent image output. |
| </ndk_notes> |
| </value> |
| <value optional="true" visibility="java_public">YUV_REPROCESSING |
| <notes> |
| The camera device supports the YUV_420_888 reprocessing use case, similar as |
| PRIVATE_REPROCESSING, This capability requires the camera device to support the |
| following: |
| |
| * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`. |
| * {@link android.graphics.ImageFormat#YUV_420_888} is supported as an output/input |
| format, that is, YUV_420_888 is included in the lists of formats returned by {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}. |
| * {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput} |
| returns non-empty int[] for each supported input format returned by {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. |
| * Each size returned by {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getInputSizes |
| getInputSizes(YUV_420_888)} is also included in {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes |
| getOutputSizes(YUV_420_888)} |
| * Using {@link android.graphics.ImageFormat#YUV_420_888} does not cause a frame rate |
| drop relative to the sensor's maximum capture rate (at that resolution). |
| * {@link android.graphics.ImageFormat#YUV_420_888} will be reprocessable into both |
| {@link android.graphics.ImageFormat#YUV_420_888} and {@link |
| android.graphics.ImageFormat#JPEG} formats. |
| * The maximum available resolution for {@link |
| android.graphics.ImageFormat#YUV_420_888} streams (both input/output) will match the |
| maximum available resolution of {@link android.graphics.ImageFormat#JPEG} streams. |
| * For a MONOCHROME camera with Y8 format support, all the requirements mentioned |
| above for YUV_420_888 apply for Y8 format as well. |
| * Static metadata android.reprocess.maxCaptureStall. |
| * Only the below controls are effective for reprocessing requests and will be present |
| in capture results. The reprocess requests are from the original capture results |
| that are associated with the intermediate {@link |
| android.graphics.ImageFormat#YUV_420_888} output buffers. All other controls in the |
| reprocess requests will be ignored by the camera device. |
| * android.jpeg.* |
| * android.noiseReduction.mode |
| * android.edge.mode |
| * android.reprocess.effectiveExposureFactor |
| * android.noiseReduction.availableNoiseReductionModes and |
| android.edge.availableEdgeModes will both list ZERO_SHUTTER_LAG as a supported mode. |
| </notes> |
| </value> |
| <value optional="true">DEPTH_OUTPUT |
| <notes> |
| The camera device can produce depth measurements from its field of view. |
| |
| This capability requires the camera device to support the following: |
| |
| * {@link android.graphics.ImageFormat#DEPTH16|AIMAGE_FORMAT_DEPTH16} is supported as |
| an output format. |
| * {@link |
| android.graphics.ImageFormat#DEPTH_POINT_CLOUD|AIMAGE_FORMAT_DEPTH_POINT_CLOUD} is |
| optionally supported as an output format. |
| * This camera device, and all camera devices with the same android.lens.facing, will |
| list the following calibration metadata entries in both {@link |
| android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics} |
| and {@link |
| android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result}: |
| - android.lens.poseTranslation |
| - android.lens.poseRotation |
| - android.lens.intrinsicCalibration |
| - android.lens.distortion |
| * The android.depth.depthIsExclusive entry is listed by this device. |
| * As of Android P, the android.lens.poseReference entry is listed by this device. |
| * A LIMITED camera with only the DEPTH_OUTPUT capability does not have to support |
| normal YUV_420_888, Y8, JPEG, and PRIV-format outputs. It only has to support the |
| DEPTH16 format. |
| |
| Generally, depth output operates at a slower frame rate than standard color capture, |
| so the DEPTH16 and DEPTH_POINT_CLOUD formats will commonly have a stall duration that |
| should be accounted for (see {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS}). |
| On a device that supports both depth and color-based output, to enable smooth preview, |
| using a repeating burst is recommended, where a depth-output target is only included |
| once every N frames, where N is the ratio between preview output rate and depth output |
| rate, including depth stall time. |
| </notes> |
| </value> |
| <value optional="true" visibility="java_public">CONSTRAINED_HIGH_SPEED_VIDEO |
| <notes> |
| The device supports constrained high speed video recording (frame rate >=120fps) use |
| case. The camera device will support high speed capture session created by {@link |
| android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}, which |
| only accepts high speed request lists created by {@link |
| android.hardware.camera2.CameraConstrainedHighSpeedCaptureSession#createHighSpeedRequestList}. |
| |
| A camera device can still support high speed video streaming by advertising the high |
| speed FPS ranges in android.control.aeAvailableTargetFpsRanges. For this case, all |
| normal capture request per frame control and synchronization requirements will apply |
| to the high speed fps ranges, the same as all other fps ranges. This capability |
| describes the capability of a specialized operating mode with many limitations (see |
| below), which is only targeted at high speed video recording. |
| |
| The supported high speed video sizes and fps ranges are specified in {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}. |
| To get desired output frame rates, the application is only allowed to select video |
| size and FPS range combinations provided by {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}. The |
| fps range can be controlled via android.control.aeTargetFpsRange. |
| |
| In this capability, the camera device will override aeMode, awbMode, and afMode to |
| ON, AUTO, and CONTINUOUS_VIDEO, respectively. All post-processing block mode |
| controls will be overridden to be FAST. Therefore, no manual control of capture |
| and post-processing parameters is possible. All other controls operate the |
| same as when android.control.mode == AUTO. This means that all other |
| android.control.* fields continue to work, such as |
| |
| * android.control.aeTargetFpsRange |
| * android.control.aeExposureCompensation |
| * android.control.aeLock |
| * android.control.awbLock |
| * android.control.effectMode |
| * android.control.aeRegions |
| * android.control.afRegions |
| * android.control.awbRegions |
| * android.control.afTrigger |
| * android.control.aePrecaptureTrigger |
| * android.control.zoomRatio |
| |
| Outside of android.control.*, the following controls will work: |
| |
| * android.flash.mode (TORCH mode only, automatic flash for still capture will not |
| work since aeMode is ON) |
| * android.lens.opticalStabilizationMode (if it is supported) |
| * android.scaler.cropRegion |
| * android.statistics.faceDetectMode (if it is supported) |
| |
| For high speed recording use case, the actual maximum supported frame rate may |
| be lower than what camera can output, depending on the destination Surfaces for |
| the image data. For example, if the destination surface is from video encoder, |
| the application need check if the video encoder is capable of supporting the |
| high frame rate for a given video size, or it will end up with lower recording |
| frame rate. If the destination surface is from preview window, the actual preview frame |
| rate will be bounded by the screen refresh rate. |
| |
| The camera device will only support up to 2 high speed simultaneous output surfaces |
| (preview and recording surfaces) in this mode. Above controls will be effective only |
| if all of below conditions are true: |
| |
| * The application creates a camera capture session with no more than 2 surfaces via |
| {@link |
| android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. The |
| targeted surfaces must be preview surface (either from {@link |
| android.view.SurfaceView} or {@link android.graphics.SurfaceTexture}) or recording |
| surface(either from {@link android.media.MediaRecorder#getSurface} or {@link |
| android.media.MediaCodec#createInputSurface}). |
| * The stream sizes are selected from the sizes reported by |
| {@link android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoSizes}. |
| * The FPS ranges are selected from {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getHighSpeedVideoFpsRanges}. |
| |
| When above conditions are NOT satisfied, |
| {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession} |
| will fail. |
| |
| Switching to a FPS range that has different maximum FPS may trigger some camera device |
| reconfigurations, which may introduce extra latency. It is recommended that |
| the application avoids unnecessary maximum target FPS changes as much as possible |
| during high speed streaming. |
| </notes> |
| </value> |
| <value optional="true" hal_version="3.3" >MOTION_TRACKING |
| <notes> |
| The camera device supports the MOTION_TRACKING value for |
| android.control.captureIntent, which limits maximum exposure time to 20 ms. |
| |
| This limits the motion blur of capture images, resulting in better image tracking |
| results for use cases such as image stabilization or augmented reality. |
| </notes> |
| </value> |
| <value optional="true" hal_version="3.3">LOGICAL_MULTI_CAMERA |
| <notes> |
| The camera device is a logical camera backed by two or more physical cameras. |
| |
| In API level 28, the physical cameras must also be exposed to the application via |
| {@link android.hardware.camera2.CameraManager#getCameraIdList}. |
| |
| Starting from API level 29: |
| |
| * Some or all physical cameras may not be independently exposed to the application, |
| in which case the physical camera IDs will not be available in |
| {@link android.hardware.camera2.CameraManager#getCameraIdList}. But the |
| application can still query the physical cameras' characteristics by calling |
| {@link android.hardware.camera2.CameraManager#getCameraCharacteristics}. |
| * If a physical camera is hidden from camera ID list, the mandatory stream |
| combinations for that physical camera must be supported through the logical camera |
| using physical streams. One exception is that in API level 30, a physical camera |
| may become unavailable via |
| {@link CameraManager.AvailabilityCallback#onPhysicalCameraUnavailable|ACameraManager_PhysicalCameraAvailabilityCallback} |
| callback. |
| |
| Combinations of logical and physical streams, or physical streams from different |
| physical cameras are not guaranteed. However, if the camera device supports |
| {@link CameraDevice#isSessionConfigurationSupported|ACameraDevice_isSessionConfigurationSupported}, |
| application must be able to query whether a stream combination involving physical |
| streams is supported by calling |
| {@link CameraDevice#isSessionConfigurationSupported|ACameraDevice_isSessionConfigurationSupported}. |
| |
| Camera application shouldn't assume that there are at most 1 rear camera and 1 front |
| camera in the system. For an application that switches between front and back cameras, |
| the recommendation is to switch between the first rear camera and the first front |
| camera in the list of supported camera devices. |
| |
| This capability requires the camera device to support the following: |
| |
| * The IDs of underlying physical cameras are returned via |
| {@link android.hardware.camera2.CameraCharacteristics#getPhysicalCameraIds}. |
| * This camera device must list static metadata |
| android.logicalMultiCamera.sensorSyncType in |
| {@link android.hardware.camera2.CameraCharacteristics}. |
| * The underlying physical cameras' static metadata must list the following entries, |
| so that the application can correlate pixels from the physical streams: |
| - android.lens.poseReference |
| - android.lens.poseRotation |
| - android.lens.poseTranslation |
| - android.lens.intrinsicCalibration |
| - android.lens.distortion |
| * The SENSOR_INFO_TIMESTAMP_SOURCE of the logical device and physical devices must be |
| the same. |
| * The logical camera must be LIMITED or higher device. |
| |
| A logical camera device's dynamic metadata may contain |
| android.logicalMultiCamera.activePhysicalId to notify the application of the current |
| active physical camera Id. An active physical camera is the physical camera from which |
| the logical camera's main image data outputs (YUV or RAW) and metadata come from. |
| In addition, this serves as an indication which physical camera is used to output to |
| a RAW stream, or in case only physical cameras support RAW, which physical RAW stream |
| the application should request. |
| |
| Logical camera's static metadata tags below describe the default active physical |
| camera. An active physical camera is default if it's used when application directly |
| uses requests built from a template. All templates will default to the same active |
| physical camera. |
| |
| - android.sensor.info.sensitivityRange |
| - android.sensor.info.colorFilterArrangement |
| - android.sensor.info.exposureTimeRange |
| - android.sensor.info.maxFrameDuration |
| - android.sensor.info.physicalSize |
| - android.sensor.info.whiteLevel |
| - android.sensor.info.lensShadingApplied |
| - android.sensor.referenceIlluminant1 |
| - android.sensor.referenceIlluminant2 |
| - android.sensor.calibrationTransform1 |
| - android.sensor.calibrationTransform2 |
| - android.sensor.colorTransform1 |
| - android.sensor.colorTransform2 |
| - android.sensor.forwardMatrix1 |
| - android.sensor.forwardMatrix2 |
| - android.sensor.blackLevelPattern |
| - android.sensor.maxAnalogSensitivity |
| - android.sensor.opticalBlackRegions |
| - android.sensor.availableTestPatternModes |
| - android.lens.info.hyperfocalDistance |
| - android.lens.info.minimumFocusDistance |
| - android.lens.info.focusDistanceCalibration |
| - android.lens.poseRotation |
| - android.lens.poseTranslation |
| - android.lens.intrinsicCalibration |
| - android.lens.poseReference |
| - android.lens.distortion |
| |
| The field of view of non-RAW physical streams must not be smaller than that of the |
| non-RAW logical streams, or the maximum field-of-view of the physical camera, |
| whichever is smaller. The application should check the physical capture result |
| metadata for how the physical streams are cropped or zoomed. More specifically, given |
| the physical camera result metadata, the effective horizontal field-of-view of the |
| physical camera is: |
| |
| fov = 2 * atan2(cropW * sensorW / (2 * zoomRatio * activeArrayW), focalLength) |
| |
| where the equation parameters are the physical camera's crop region width, physical |
| sensor width, zoom ratio, active array width, and focal length respectively. Typically |
| the physical stream of active physical camera has the same field-of-view as the |
| logical streams. However, the same may not be true for physical streams from |
| non-active physical cameras. For example, if the logical camera has a wide-ultrawide |
| configuration where the wide lens is the default, when the crop region is set to the |
| logical camera's active array size, (and the zoom ratio set to 1.0 starting from |
| Android 11), a physical stream for the ultrawide camera may prefer outputting images |
| with larger field-of-view than that of the wide camera for better stereo matching |
| margin or more robust motion tracking. At the same time, the physical non-RAW streams' |
| field of view must not be smaller than the requested crop region and zoom ratio, as |
| long as it's within the physical lens' capability. For example, for a logical camera |
| with wide-tele lens configuration where the wide lens is the default, if the logical |
| camera's crop region is set to maximum size, and zoom ratio set to 1.0, the physical |
| stream for the tele lens will be configured to its maximum size crop region (no zoom). |
| |
| *Deprecated:* Prior to Android 11, the field of view of all non-RAW physical streams |
| cannot be larger than that of non-RAW logical streams. If the logical camera has a |
| wide-ultrawide lens configuration where the wide lens is the default, when the logical |
| camera's crop region is set to maximum size, the FOV of the physical streams for the |
| ultrawide lens will be the same as the logical stream, by making the crop region |
| smaller than its active array size to compensate for the smaller focal length. |
| |
| For a logical camera, typically the underlying physical cameras have different RAW |
| capabilities (such as resolution or CFA pattern). There are two ways for the |
| application to capture RAW images from the logical camera: |
| |
| * If the logical camera has RAW capability, the application can create and use RAW |
| streams in the same way as before. In case a RAW stream is configured, to maintain |
| backward compatibility, the camera device makes sure the default active physical |
| camera remains active and does not switch to other physical cameras. (One exception |
| is that, if the logical camera consists of identical image sensors and advertises |
| multiple focalLength due to different lenses, the camera device may generate RAW |
| images from different physical cameras based on the focalLength being set by the |
| application.) This backward-compatible approach usually results in loss of optical |
| zoom, to telephoto lens or to ultrawide lens. |
| * Alternatively, if supported by the device, |
| {@link android.hardware.camera2.MultiResolutionImageReader} |
| can be used to capture RAW images from one of the underlying physical cameras ( |
| depending on current zoom level). Because different physical cameras may have |
| different RAW characteristics, the application needs to use the characteristics |
| and result metadata of the active physical camera for the relevant RAW metadata. |
| |
| The capture request and result metadata tags required for backward compatible camera |
| functionalities will be solely based on the logical camera capability. On the other |
| hand, the use of manual capture controls (sensor or post-processing) with a |
| logical camera may result in unexpected behavior when the HAL decides to switch |
| between physical cameras with different characteristics under the hood. For example, |
| when the application manually sets exposure time and sensitivity while zooming in, |
| the brightness of the camera images may suddenly change because HAL switches from one |
| physical camera to the other. |
| </notes> |
| </value> |
| <value optional="true" hal_version="3.3" >MONOCHROME |
| <notes> |
| The camera device is a monochrome camera that doesn't contain a color filter array, |
| and for YUV_420_888 stream, the pixel values on U and V planes are all 128. |
| |
| A MONOCHROME camera must support the guaranteed stream combinations required for |
| its device level and capabilities. Additionally, if the monochrome camera device |
| supports Y8 format, all mandatory stream combination requirements related to {@link |
| android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888 YUV_420_888} apply |
| to {@link android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8 Y8} as well. There are no |
| mandatory stream combination requirements with regard to |
| {@link android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8 Y8} for Bayer camera devices. |
| |
| Starting from Android Q, the SENSOR_INFO_COLOR_FILTER_ARRANGEMENT of a MONOCHROME |
| camera will be either MONO or NIR. |
| </notes> |
| </value> |
| <value optional="true" hal_version="3.4" >SECURE_IMAGE_DATA |
| <notes> |
| The camera device is capable of writing image data into a region of memory |
| inaccessible to Android userspace or the Android kernel, and only accessible to |
| trusted execution environments (TEE). |
| </notes> |
| </value> |
| <value optional="true" hal_version="3.5" >SYSTEM_CAMERA |
| <notes> |
| The camera device is only accessible by Android's system components and privileged |
| applications. Processes need to have the android.permission.SYSTEM_CAMERA in |
| addition to android.permission.CAMERA in order to connect to this camera device. |
| </notes> |
| </value> |
| <value optional="true" visibility="java_public" hal_version="3.5">OFFLINE_PROCESSING |
| <notes> |
| The camera device supports the OFFLINE_PROCESSING use case. |
| |
| With OFFLINE_PROCESSING capability, the application can switch an ongoing |
| capture session to offline mode by calling the |
| CameraCaptureSession#switchToOffline method and specify streams to be kept in offline |
| mode. The camera will then stop currently active repeating requests, prepare for |
| some requests to go into offline mode, and return an offline session object. After |
| the switchToOffline call returns, the original capture session is in closed state as |
| if the CameraCaptureSession#close method has been called. |
| In the offline mode, all inflight requests will continue to be processed in the |
| background, and the application can immediately close the camera or create a new |
| capture session without losing those requests' output images and capture results. |
| |
| While the camera device is processing offline requests, it |
| might not be able to support all stream configurations it can support |
| without offline requests. When that happens, the createCaptureSession |
| method call will fail. The following stream configurations are guaranteed to work |
| without hitting the resource busy exception: |
| |
| * One ongoing offline session: target one output surface of YUV or |
| JPEG format, any resolution. |
| * The active camera capture session: |
| 1. One preview surface (SurfaceView or SurfaceTexture) up to 1920 width |
| 1. One YUV ImageReader surface up to 1920 width |
| 1. One Jpeg ImageReader, any resolution: the camera device is |
| allowed to slow down JPEG output speed by 50% if there is any ongoing offline |
| session. |
| 1. If the device supports PRIVATE_REPROCESSING, one pair of ImageWriter/ImageReader |
| surfaces of private format, with the same resolution that is larger or equal to |
| the JPEG ImageReader resolution above. |
| * Alternatively, the active camera session above can be replaced by an legacy |
| {@link android.hardware.Camera Camera} with the following parameter settings: |
| 1. Preview size up to 1920 width |
| 1. Preview callback size up to 1920 width |
| 1. Video size up to 1920 width |
| 1. Picture size, any resolution: the camera device is |
| allowed to slow down JPEG output speed by 50% if there is any ongoing offline |
| session. |
| </notes> |
| </value> |
| <value optional="true" hal_version="3.6" >ULTRA_HIGH_RESOLUTION_SENSOR |
| <notes> |
| This camera device is capable of producing ultra high resolution images in |
| addition to the image sizes described in the |
| android.scaler.streamConfigurationMap. |
| It can operate in 'default' mode and 'max resolution' mode. It generally does this |
| by binning pixels in 'default' mode and not binning them in 'max resolution' mode. |
| `android.scaler.streamConfigurationMap` describes the streams supported in 'default' |
| mode. |
| The stream configurations supported in 'max resolution' mode are described by |
| `android.scaler.streamConfigurationMapMaximumResolution`. |
| The maximum resolution mode pixel array size of a camera device |
| (`android.sensor.info.pixelArraySize`) with this capability, |
| will be at least 24 megapixels. |
| </notes> |
| </value> |
| <value optional="true" visibility="java_public" hal_version="3.6">REMOSAIC_REPROCESSING |
| <notes> |
| The device supports reprocessing from the `RAW_SENSOR` format with a bayer pattern |
| given by android.sensor.info.binningFactor (m x n group of pixels with the same |
| color filter) to a remosaiced regular bayer pattern. |
| |
| This capability will only be present for devices with |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability. When |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| devices do not advertise this capability, |
| {@link android.graphics.ImageFormat#RAW_SENSOR} images will already have a |
| regular bayer pattern. |
| |
| If a `RAW_SENSOR` stream is requested along with another non-RAW stream in a |
| {@link android.hardware.camera2.CaptureRequest} (if multiple streams are supported |
| when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}), |
| the `RAW_SENSOR` stream will have a regular bayer pattern. |
| |
| This capability requires the camera device to support the following : |
| |
| * The {@link android.hardware.camera2.params.StreamConfigurationMap} mentioned below |
| refers to the one, described by |
| `android.scaler.streamConfigurationMapMaximumResolution`. |
| * One input stream is supported, that is, `android.request.maxNumInputStreams == 1`. |
| * {@link android.graphics.ImageFormat#RAW_SENSOR} is supported as an output/input |
| format, that is, {@link android.graphics.ImageFormat#RAW_SENSOR} is included in the |
| lists of formats returned by {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getInputFormats} and {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputFormats}. |
| * {@link android.hardware.camera2.params.StreamConfigurationMap#getValidOutputFormatsForInput} |
| returns non-empty int[] for each supported input format returned by {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getInputFormats}. |
| * Each size returned by {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getInputSizes |
| getInputSizes(ImageFormat.RAW_SENSOR)} is also included in {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes |
| getOutputSizes(ImageFormat.RAW_SENSOR)} |
| * Using {@link android.graphics.ImageFormat#RAW_SENSOR} does not cause a frame rate |
| drop relative to the sensor's maximum capture rate (at that resolution). |
| * No CaptureRequest controls will be applicable when a request has an input target |
| with {@link android.graphics.ImageFormat#RAW_SENSOR} format. |
| </notes> |
| </value> |
| <value optional="true" visibility="java_public" hal_version="3.8">DYNAMIC_RANGE_TEN_BIT |
| <notes> |
| The device supports one or more 10-bit camera outputs according to the dynamic range |
| profiles specified in |
| {@link android.hardware.camera2.params.DynamicRangeProfiles#getSupportedProfiles}. |
| They can be configured as part of the capture session initialization via |
| {@link android.hardware.camera2.params.OutputConfiguration#setDynamicRangeProfile}. |
| Cameras that enable this capability must also support the following: |
| |
| * Profile {@link android.hardware.camera2.params.DynamicRangeProfiles#HLG10} |
| * All mandatory stream combinations for this specific capability as per |
| [documentation](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#10-bit-output-additional-guaranteed-configurations) |
| * In case the device is not able to capture some combination of supported |
| standard 8-bit and/or 10-bit dynamic range profiles within the same capture request, |
| then those constraints must be listed in |
| {@link android.hardware.camera2.params.DynamicRangeProfiles#getProfileCaptureRequestConstraints} |
| * Recommended dynamic range profile listed in |
| {@link android.hardware.camera2.CameraCharacteristics#REQUEST_RECOMMENDED_TEN_BIT_DYNAMIC_RANGE_PROFILE}. |
| </notes> |
| </value> |
| <value optional="true" hal_version="3.8">STREAM_USE_CASE |
| <notes> |
| The camera device supports selecting a per-stream use case via |
| {@link android.hardware.camera2.params.OutputConfiguration#setStreamUseCase} |
| so that the device can optimize camera pipeline parameters such as tuning, sensor |
| mode, or ISP settings for a specific user scenario. |
| Some sample usages of this capability are: |
| |
| * Distinguish high quality YUV captures from a regular YUV stream where |
| the image quality may not be as good as the JPEG stream, or |
| * Use one stream to serve multiple purposes: viewfinder, video recording and |
| still capture. This is common with applications that wish to apply edits equally |
| to preview, saved images, and saved videos. |
| |
| This capability requires the camera device to support the following |
| stream use cases: |
| |
| * DEFAULT for backward compatibility where the application doesn't set |
| a stream use case |
| * PREVIEW for live viewfinder and in-app image analysis |
| * STILL_CAPTURE for still photo capture |
| * VIDEO_RECORD for recording video clips |
| * PREVIEW_VIDEO_STILL for one single stream used for viewfinder, video |
| recording, and still capture. |
| * VIDEO_CALL for long running video calls |
| |
| {@link android.hardware.camera2.CameraCharacteristics#SCALER_AVAILABLE_STREAM_USE_CASES} |
| lists all of the supported stream use cases. |
| |
| Refer to the |
| [guideline](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#stream-use-case-capability-additional-guaranteed-configurations) |
| for the mandatory stream combinations involving stream use cases, which can also be |
| queried via {@link android.hardware.camera2.params.MandatoryStreamCombination}. |
| </notes> |
| </value> |
| <value optional="true" visibility="java_public" hal_version="3.9">COLOR_SPACE_PROFILES |
| <notes> |
| The device supports querying the possible combinations of color spaces, image |
| formats, and dynamic range profiles supported by the camera and requesting a |
| particular color space for a session via |
| {@link android.hardware.camera2.params.SessionConfiguration#setColorSpace}. |
| |
| Cameras that enable this capability may or may not also implement dynamic range |
| profiles. If they don't, |
| {@link android.hardware.camera2.params.ColorSpaceProfiles#getSupportedDynamicRangeProfiles} |
| will return only |
| {@link android.hardware.camera2.params.DynamicRangeProfiles#STANDARD} and |
| {@link android.hardware.camera2.params.ColorSpaceProfiles#getSupportedColorSpacesForDynamicRange} |
| will assume support of the |
| {@link android.hardware.camera2.params.DynamicRangeProfiles#STANDARD} |
| profile in all combinations of color spaces and image formats. |
| </notes> |
| </value> |
| </enum> |
| <description>List of capabilities that this camera device |
| advertises as fully supporting.</description> |
| <details> |
| A capability is a contract that the camera device makes in order |
| to be able to satisfy one or more use cases. |
| |
| Listing a capability guarantees that the whole set of features |
| required to support a common use will all be available. |
| |
| Using a subset of the functionality provided by an unsupported |
| capability may be possible on a specific camera device implementation; |
| to do this query each of android.request.availableRequestKeys, |
| android.request.availableResultKeys, |
| android.request.availableCharacteristicsKeys. |
| |
| The following capabilities are guaranteed to be available on |
| android.info.supportedHardwareLevel `==` FULL devices: |
| |
| * MANUAL_SENSOR |
| * MANUAL_POST_PROCESSING |
| |
| Other capabilities may be available on either FULL or LIMITED |
| devices, but the application should query this key to be sure. |
| </details> |
| <hal_details> |
| Additional constraint details per-capability will be available |
| in the Compatibility Test Suite. |
| |
| Minimum baseline requirements required for the |
| BACKWARD_COMPATIBLE capability are not explicitly listed. |
| Instead refer to "BC" tags and the camera CTS tests in the |
| android.hardware.camera2.cts package. |
| |
| Listed controls that can be either request or result (e.g. |
| android.sensor.exposureTime) must be available both in the |
| request and the result in order to be considered to be |
| capability-compliant. |
| |
| For example, if the HAL claims to support MANUAL control, |
| then exposure time must be configurable via the request _and_ |
| the actual exposure applied must be available via |
| the result. |
| |
| If MANUAL_SENSOR is omitted, the HAL may choose to omit the |
| android.scaler.availableMinFrameDurations static property entirely. |
| |
| For PRIVATE_REPROCESSING and YUV_REPROCESSING capabilities, see |
| hardware/libhardware/include/hardware/camera3.h Section 10 for more information. |
| |
| Devices that support the MANUAL_SENSOR capability must support the |
| CAMERA3_TEMPLATE_MANUAL template defined in camera3.h. |
| |
| Devices that support the PRIVATE_REPROCESSING capability or the |
| YUV_REPROCESSING capability must support the |
| CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG template defined in camera3.h. |
| |
| For DEPTH_OUTPUT, the depth-format keys |
| android.depth.availableDepthStreamConfigurations, |
| android.depth.availableDepthMinFrameDurations, |
| android.depth.availableDepthStallDurations must be available, in |
| addition to the other keys explicitly mentioned in the DEPTH_OUTPUT |
| enum notes. The entry android.depth.maxDepthSamples must be available |
| if the DEPTH_POINT_CLOUD format is supported (HAL pixel format BLOB, dataspace |
| DEPTH). |
| |
| For a camera device with LOGICAL_MULTI_CAMERA capability, it should operate in the |
| same way as a physical camera device based on its hardware level and capabilities. |
| It's recommended that its feature set is superset of that of individual physical cameras. |
| |
| * In camera1 API, to maintain application compatibility, for each camera facing, there |
| may be one or more {logical_camera_id, physical_camera_1_id, physical_camera_2_id, ...} |
| combinations, where logical_camera_id is composed of physical_camera_N_id, camera |
| framework will only advertise one camera id |
| (within the combinations for the particular facing) that is frontmost in the HAL |
| published camera id list. |
| For example, if HAL advertises 6 back facing camera IDs (ID0 to ID5), among which ID4 |
| and ID5 are logical cameras backed by ID0+ID1 and ID2+ID3 respectively. In this case, |
| only ID0 will be available for camera1 API to use. |
| |
| * Camera HAL is strongly recommended to advertise camera devices with best feature, |
| power, performance, and latency tradeoffs at the front of the camera id list. |
| |
| * Camera HAL may switch between physical cameras depending on focalLength, cropRegion, or |
| zoomRatio. If physical cameras have different sizes, HAL must maintain a single logical |
| camera activeArraySize/pixelArraySize/preCorrectionActiveArraySize, and must do proper |
| mapping between logical camera and underlying physical cameras for all related metadata |
| tags, such as crop region, zoomRatio, 3A regions, and intrinsicCalibration. |
| |
| * Starting from HIDL ICameraDevice version 3.5, camera HAL must support |
| isStreamCombinationSupported for application to query whether a particular logical and |
| physical streams combination are supported. |
| |
| A MONOCHROME camera device must also advertise BACKWARD_COMPATIBLE capability, and must |
| not advertise MANUAL_POST_PROCESSING capability. |
| |
| * To maintain backward compatibility, the camera device must support all |
| BACKWARD_COMPATIBLE required keys. The android.control.awbAvailableModes key only contains |
| AUTO, and android.control.awbState are either CONVERGED or LOCKED depending on |
| android.control.awbLock. |
| |
| * android.colorCorrection.mode, android.colorCorrection.transform, and |
| android.colorCorrection.gains must not be in available request and result keys. |
| As a result, the camera device cannot be a FULL device. However, the HAL can |
| still advertise other individual capabilities. |
| |
| * If the device supports tonemap control, only android.tonemap.curveRed is used. |
| CurveGreen and curveBlue are no-ops. |
| |
| In Android API level 28, a MONOCHROME camera device must not have RAW capability. From |
| API level 29, a camera is allowed to have both MONOCHROME and RAW capabilities. |
| |
| To support the legacy API to ICameraDevice 3.x shim layer, devices advertising |
| OFFLINE_PROCESSING capability must also support configuring an input stream of the same |
| size as the picture size if: |
| |
| * The device supports PRIVATE_REPROCESSING capability |
| * The device's maximal JPEG resolution can reach 30 FPS min frame duration |
| * The device does not support HAL based ZSL (android.control.enableZsl) |
| |
| For devices which support SYSTEM_CAMERA and LOGICAL_MULTI_CAMERA capabilities: |
| |
| Hidden physical camera ids[1] must not be be shared[2] between public camera devices |
| and camera devices advertising SYSTEM_CAMERA capability. |
| |
| [1] - Camera device ids which are advertised in the |
| ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS list, and not available through |
| ICameraProvider.getCameraIdList(). |
| |
| [2] - The ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS lists, must not have common |
| camera ids. |
| </hal_details> |
| </entry> |
| <entry name="availableRequestKeys" type="int32" visibility="ndk_public" |
| container="array" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>A list of all keys that the camera device has available |
| to use with {@link android.hardware.camera2.CaptureRequest|ACaptureRequest}.</description> |
| |
| <details>Attempting to set a key into a CaptureRequest that is not |
| listed here will result in an invalid request and will be rejected |
| by the camera device. |
| |
| This field can be used to query the feature set of a camera device |
| at a more granular level than capabilities. This is especially |
| important for optional keys that are not listed under any capability |
| in android.request.availableCapabilities. |
| </details> |
| <hal_details> |
| Vendor tags can be listed here. Vendor tag metadata should also |
| use the extensions C api (refer to camera3.h for more details). |
| |
| Setting/getting vendor tags will be checked against the metadata |
| vendor extensions API and not against this field. |
| |
| The HAL must not consume any request tags that are not listed either |
| here or in the vendor tag list. |
| |
| The public camera2 API will always make the vendor tags visible |
| via |
| {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys}. |
| </hal_details> |
| </entry> |
| <entry name="availableResultKeys" type="int32" visibility="ndk_public" |
| container="array" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>A list of all keys that the camera device has available to use with {@link |
| android.hardware.camera2.CaptureResult|ACameraCaptureSession_captureCallback_result}. |
| </description> |
| |
| <details>Attempting to get a key from a CaptureResult that is not |
| listed here will always return a `null` value. Getting a key from |
| a CaptureResult that is listed here will generally never return a `null` |
| value. |
| |
| The following keys may return `null` unless they are enabled: |
| |
| * android.statistics.lensShadingMap (non-null iff android.statistics.lensShadingMapMode == ON) |
| |
| (Those sometimes-null keys will nevertheless be listed here |
| if they are available.) |
| |
| This field can be used to query the feature set of a camera device |
| at a more granular level than capabilities. This is especially |
| important for optional keys that are not listed under any capability |
| in android.request.availableCapabilities. |
| </details> |
| <hal_details> |
| Tags listed here must always have an entry in the result metadata, |
| even if that size is 0 elements. Only array-type tags (e.g. lists, |
| matrices, strings) are allowed to have 0 elements. |
| |
| Vendor tags can be listed here. Vendor tag metadata should also |
| use the extensions C api (refer to camera3.h for more details). |
| |
| Setting/getting vendor tags will be checked against the metadata |
| vendor extensions API and not against this field. |
| |
| The HAL must not produce any result tags that are not listed either |
| here or in the vendor tag list. |
| |
| The public camera2 API will always make the vendor tags visible via {@link |
| android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}. |
| </hal_details> |
| </entry> |
| <entry name="availableCharacteristicsKeys" type="int32" visibility="ndk_public" |
| container="array" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>A list of all keys that the camera device has available to use with {@link |
| android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics}. |
| </description> |
| <details>This entry follows the same rules as |
| android.request.availableResultKeys (except that it applies for |
| CameraCharacteristics instead of CaptureResult). See above for more |
| details. |
| </details> |
| <hal_details> |
| Keys listed here must always have an entry in the static info metadata, |
| even if that size is 0 elements. Only array-type tags (e.g. lists, |
| matrices, strings) are allowed to have 0 elements. |
| |
| Vendor tags can listed here. Vendor tag metadata should also use |
| the extensions C api (refer to camera3.h for more details). |
| |
| Setting/getting vendor tags will be checked against the metadata |
| vendor extensions API and not against this field. |
| |
| The HAL must not have any tags in its static info that are not listed |
| either here or in the vendor tag list. |
| |
| The public camera2 API will always make the vendor tags visible |
| via {@link android.hardware.camera2.CameraCharacteristics#getKeys}. |
| </hal_details> |
| </entry> |
| <entry name="availableSessionKeys" type="int32" visibility="ndk_public" |
| container="array" hwlevel="legacy" hal_version="3.3"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>A subset of the available request keys that the camera device |
| can pass as part of the capture session initialization.</description> |
| |
| <details> This is a subset of android.request.availableRequestKeys which |
| contains a list of keys that are difficult to apply per-frame and |
| can result in unexpected delays when modified during the capture session |
| lifetime. Typical examples include parameters that require a |
| time-consuming hardware re-configuration or internal camera pipeline |
| change. For performance reasons we advise clients to pass their initial |
| values as part of |
| {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}. |
| Once the camera capture session is enabled it is also recommended to avoid |
| changing them from their initial values set in |
| {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}. |
| Control over session parameters can still be exerted in capture requests |
| but clients should be aware and expect delays during their application. |
| An example usage scenario could look like this: |
| |
| * The camera client starts by querying the session parameter key list via |
| {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys|ACameraManager_getCameraCharacteristics}. |
| * Before triggering the capture session create sequence, a capture request |
| must be built via |
| {@link CameraDevice#createCaptureRequest|ACameraDevice_createCaptureRequest} |
| using an appropriate template matching the particular use case. |
| * The client should go over the list of session parameters and check |
| whether some of the keys listed matches with the parameters that |
| they intend to modify as part of the first capture request. |
| * If there is no such match, the capture request can be passed |
| unmodified to |
| {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}. |
| * If matches do exist, the client should update the respective values |
| and pass the request to |
| {@link SessionConfiguration#setSessionParameters|ACameraDevice_createCaptureSessionWithSessionParameters}. |
| * After the capture session initialization completes the session parameter |
| key list can continue to serve as reference when posting or updating |
| further requests. As mentioned above further changes to session |
| parameters should ideally be avoided, if updates are necessary |
| however clients could expect a delay/glitch during the |
| parameter switch. |
| |
| </details> |
| <hal_details> |
| If android.control.aeTargetFpsRange is part of the session parameters and constrained high |
| speed mode is enabled, then only modifications of the maximum framerate value will be |
| monitored by the framework and can trigger camera re-configuration. For more information |
| about framerate ranges during constrained high speed sessions see |
| {@link android.hardware.camera2.CameraDevice#createConstrainedHighSpeedCaptureSession}. |
| Vendor tags can be listed here. Vendor tag metadata should also |
| use the extensions C api (refer to |
| android.hardware.camera.device.V3_4.StreamConfiguration.sessionParams for more details). |
| |
| Setting/getting vendor tags will be checked against the metadata |
| vendor extensions API and not against this field. |
| |
| The HAL must not consume any request tags in the session parameters that |
| are not listed either here or in the vendor tag list. |
| |
| The public camera2 API will always make the vendor tags visible |
| via |
| {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys}. |
| </hal_details> |
| </entry> |
| <entry name="availablePhysicalCameraRequestKeys" type="int32" visibility="ndk_public" |
| container="array" hwlevel="limited" hal_version="3.3"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>A subset of the available request keys that can be overridden for |
| physical devices backing a logical multi-camera.</description> |
| <details> |
| This is a subset of android.request.availableRequestKeys which contains a list |
| of keys that can be overridden using |
| {@link android.hardware.camera2.CaptureRequest.Builder#setPhysicalCameraKey}. |
| The respective value of such request key can be obtained by calling |
| {@link android.hardware.camera2.CaptureRequest.Builder#getPhysicalCameraKey}. |
| Capture requests that contain individual physical device requests must be built via |
| {@link android.hardware.camera2.CameraDevice#createCaptureRequest(int, Set)}. |
| </details> |
| <hal_details> |
| Vendor tags can be listed here. Vendor tag metadata should also |
| use the extensions C api (refer to |
| android.hardware.camera.device.V3_4.CaptureRequest.physicalCameraSettings for more |
| details). |
| |
| Setting/getting vendor tags will be checked against the metadata |
| vendor extensions API and not against this field. |
| |
| The HAL must not consume any request tags in the session parameters that |
| are not listed either here or in the vendor tag list. |
| |
| There should be no overlap between this set of keys and the available session keys |
| {@link android.hardware.camera2.CameraCharacteristics#getAvailableSessionKeys} along |
| with any other controls that can have impact on the dual-camera sync. |
| |
| The public camera2 API will always make the vendor tags visible |
| via |
| {@link android.hardware.camera2.CameraCharacteristics#getAvailablePhysicalCameraRequestKeys}. |
| </hal_details> |
| </entry> |
| <entry name="characteristicKeysNeedingPermission" type="int32" visibility="hidden" |
| container="array" hwlevel="legacy" hal_version="3.4"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>A list of camera characteristics keys that are only available |
| in case the camera client has camera permission.</description> |
| |
| <details>The entry contains a subset of |
| {@link android.hardware.camera2.CameraCharacteristics#getKeys} that require camera clients |
| to acquire the {@link android.Manifest.permission#CAMERA} permission before calling |
| {@link android.hardware.camera2.CameraManager#getCameraCharacteristics}. If the |
| permission is not held by the camera client, then the values of the respective properties |
| will not be present in {@link android.hardware.camera2.CameraCharacteristics}. |
| </details> |
| <hal_details> |
| Do not set this property directly, camera service will overwrite any previous values. |
| </hal_details> |
| </entry> |
| <entry name="availableDynamicRangeProfiles" type="int32" visibility="java_public" |
| synthetic="true" optional="true" typedef="dynamicRangeProfiles"> |
| <description>Devices supporting the 10-bit output capability |
| {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT} |
| must list their supported dynamic range profiles along with capture request |
| constraints for specific profile combinations. |
| </description> |
| <details> |
| Camera clients can retrieve the list of supported 10-bit dynamic range profiles by calling |
| {@link android.hardware.camera2.params.DynamicRangeProfiles#getSupportedProfiles}. |
| Any of them can be configured by setting OutputConfiguration dynamic range profile in |
| {@link android.hardware.camera2.params.OutputConfiguration#setDynamicRangeProfile}. |
| Clients can also check if there are any constraints that limit the combination |
| of supported profiles that can be referenced within a single capture request by calling |
| {@link android.hardware.camera2.params.DynamicRangeProfiles#getProfileCaptureRequestConstraints}. |
| </details> |
| </entry> |
| <entry name="availableDynamicRangeProfilesMap" type="int64" visibility="ndk_public" |
| optional="true" enum="true" container="array" hal_version="3.8"> |
| <array> |
| <size>n</size> |
| <size>3</size> |
| </array> |
| <enum> |
| <value id="0x1">STANDARD |
| <notes> |
| 8-bit SDR profile which is the default for all non 10-bit output capable devices. |
| </notes> |
| </value> |
| <value id="0x2">HLG10 |
| <notes> |
| 10-bit pixel samples encoded using the Hybrid log-gamma transfer function. |
| </notes> |
| </value> |
| <value id="0x4">HDR10 |
| <notes> |
| 10-bit pixel samples encoded using the SMPTE ST 2084 transfer function. |
| This profile utilizes internal static metadata to increase the quality |
| of the capture. |
| </notes> |
| </value> |
| <value id="0x8">HDR10_PLUS |
| <notes> |
| 10-bit pixel samples encoded using the SMPTE ST 2084 transfer function. |
| In contrast to HDR10, this profile uses internal per-frame metadata |
| to further enhance the quality of the capture. |
| </notes> |
| </value> |
| <value id="0x10">DOLBY_VISION_10B_HDR_REF |
| <notes> |
| This is a camera mode for Dolby Vision capture optimized for a more scene |
| accurate capture. This would typically differ from what a specific device |
| might want to tune for a consumer optimized Dolby Vision general capture. |
| </notes> |
| </value> |
| <value id="0x20">DOLBY_VISION_10B_HDR_REF_PO |
| <notes> |
| This is the power optimized mode for 10-bit Dolby Vision HDR Reference Mode. |
| </notes> |
| </value> |
| <value id="0x40">DOLBY_VISION_10B_HDR_OEM |
| <notes> |
| This is the camera mode for the default Dolby Vision capture mode for the |
| specific device. This would be tuned by each specific device for consumer |
| pleasing results that resonate with their particular audience. We expect |
| that each specific device would have a different look for their default |
| Dolby Vision capture. |
| </notes> |
| </value> |
| <value id="0x80">DOLBY_VISION_10B_HDR_OEM_PO |
| <notes> |
| This is the power optimized mode for 10-bit Dolby Vision HDR device specific |
| capture Mode. |
| </notes> |
| </value> |
| <value id="0x100">DOLBY_VISION_8B_HDR_REF |
| <notes> |
| This is the 8-bit version of the Dolby Vision reference capture mode optimized |
| for scene accuracy. |
| </notes> |
| </value> |
| <value id="0x200">DOLBY_VISION_8B_HDR_REF_PO |
| <notes> |
| This is the power optimized mode for 8-bit Dolby Vision HDR Reference Mode. |
| </notes> |
| </value> |
| <value id="0x400">DOLBY_VISION_8B_HDR_OEM |
| <notes> |
| This is the 8-bit version of device specific tuned and optimized Dolby Vision |
| capture mode. |
| </notes> |
| </value> |
| <value id="0x800">DOLBY_VISION_8B_HDR_OEM_PO |
| <notes> |
| This is the power optimized mode for 8-bit Dolby Vision HDR device specific |
| capture Mode. |
| </notes> |
| </value> |
| <value id="0x1000">MAX |
| <notes> |
| </notes> |
| </value> |
| </enum> |
| <description>A map of all available 10-bit dynamic range profiles along with their |
| capture request constraints. |
| </description> |
| <details>Devices supporting the 10-bit output capability |
| {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT} |
| must list their supported dynamic range profiles. In case the camera is not able to |
| support every possible profile combination within a single capture request, then the |
| constraints must be listed here as well. |
| </details> |
| <hal_details> |
| The array contains three entries per supported profile: |
| 1) The supported dynamic profile value. Do note that |
| ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_STANDARD is assumed to be always |
| present and must not be listed. |
| 2) A bitmap combination of all supported profiles that can be referenced at the same |
| time within a single capture request. Do note that a value of 0 means that there are |
| no constraints and all combinations are supported. |
| 3) A flag indicating the presence of an internal lookahead functionality that |
| can increase the streaming latency by more than 3 buffers. The value 0 will indicate |
| that latency doesn't exceed 3 buffers, everything different than 0 will indicate |
| latency that is beyond 3 buffers. In case the flag is set, then Camera clients will be |
| advised to avoid configuring this profile for camera latency sensitive outputs such as |
| preview. Do note, that such extra latency must not be present for the HLG10 profile. |
| |
| For example if we assume that a device exists that can only support HLG10, HDR10 and |
| HDR10_PLUS from the possible 10-bit profiles with the following capture constraints: |
| 1) HLG10 can be included in any capture request without constraints. |
| 2) HDR10 and HDR10_PLUS can only be referenced together and/or with HLG10 but not with |
| STANDARD. |
| In the same example, HLG10 and HDR10 will not have additional lookahead latency, and |
| HDR10+ will have latency that exceeds 3 buffers. |
| The resulting array should look like this: |
| [ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_HLG10, 0, 0, |
| ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_HDR10, |
| (ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_HDR10 | |
| ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_HLG10 | |
| ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_HDR10_PLUS), 0, |
| ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_HDR10_PLUS, |
| (ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_HDR10_PLUS | |
| ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_HLG10 | |
| ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_HDR10), 1] |
| |
| Camera providers must ensure that each processed buffer from a stream configured with the |
| HDR10 dynamic range profile includes SMPTE ST 2086 static metadata by calling |
| 'android::hardware::graphics::mapper::V4_0::IMapper::set' before returning the buffer. |
| |
| Camera providers must ensure that each processed buffer from a stream configured |
| with HDR10_PLUS dynamic range profile includes SMPTE ST 2094-40 dynamic |
| metadata by calling 'android::hardware::graphics::mapper::V4_0::IMapper::set' before |
| returning the buffer. |
| |
| Camera providers must ensure that each processed buffer from a stream configured |
| with any of the 10-bit Dolby Vision dynamic range profiles includes SMPTE ST 2094-10 |
| dynamic metadata by calling 'android::hardware::graphics::mapper::V4_0::IMapper::set' |
| before returning the buffer. |
| </hal_details> |
| </entry> |
| <entry name="recommendedTenBitDynamicRangeProfile" type="int64" visibility="java_public" |
| optional="true" hal_version="3.8"> |
| <description>Recommended 10-bit dynamic range profile.</description> |
| <details>Devices supporting the 10-bit output capability |
| {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT} |
| must list a 10-bit supported dynamic range profile that is expected to perform |
| optimally in terms of image quality, power and performance. |
| The value advertised can be used as a hint by camera clients when configuring the dynamic |
| range profile when calling |
| {@link android.hardware.camera2.params.OutputConfiguration#setDynamicRangeProfile}. |
| </details> |
| </entry> |
| <entry name="availableColorSpaceProfiles" type="int32" visibility="java_public" |
| synthetic="true" optional="true" typedef="colorSpaceProfiles" hal_version="3.9"> |
| <description> |
| An interface for querying the color space profiles supported by a camera device. |
| </description> |
| <details> |
| A color space profile is a combination of a color space, an image format, and a dynamic |
| range profile. Camera clients can retrieve the list of supported color spaces by calling |
| {@link android.hardware.camera2.params.ColorSpaceProfiles#getSupportedColorSpaces} or |
| {@link android.hardware.camera2.params.ColorSpaceProfiles#getSupportedColorSpacesForDynamicRange}. |
| If a camera does not support the |
| {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT} |
| capability, the dynamic range profile will always be |
| {@link android.hardware.camera2.params.DynamicRangeProfiles#STANDARD}. Color space |
| capabilities are queried in combination with an {@link android.graphics.ImageFormat}. |
| If a camera client wants to know the general color space capabilities of a camera device |
| regardless of image format, it can specify {@link android.graphics.ImageFormat#UNKNOWN}. |
| The color space for a session can be configured by setting the SessionConfiguration |
| color space via {@link android.hardware.camera2.params.SessionConfiguration#setColorSpace}. |
| </details> |
| </entry> |
| <entry name="availableColorSpaceProfilesMap" type="int64" visibility="ndk_public" |
| optional="true" enum="true" container="array" hal_version="3.9"> |
| <array> |
| <size>n</size> |
| <size>3</size> |
| </array> |
| <enum> |
| <value id="-1">UNSPECIFIED |
| <notes> |
| Default value, when not explicitly specified. The Camera device will choose the color |
| space to employ. |
| </notes> |
| </value> |
| <value visibility="system" id="0">SRGB |
| <notes> |
| RGB color space sRGB standardized as IEC 61966-2.1:1999. |
| </notes> |
| </value> |
| <value visibility="system" id="7">DISPLAY_P3 |
| <notes> |
| RGB color space Display P3 based on SMPTE RP 431-2-2007 and IEC 61966-2.1:1999. |
| </notes> |
| </value> |
| <value visibility="system" id="16">BT2020_HLG |
| <notes> |
| RGB color space BT.2100 standardized as Hybrid Log Gamma encoding. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| A list of all possible color space profiles supported by a camera device. |
| </description> |
| <details> |
| A color space profile is a combination of a color space, an image format, and a dynamic range |
| profile. If a camera does not support the |
| {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT} |
| capability, the dynamic range profile will always be |
| {@link android.hardware.camera2.params.DynamicRangeProfiles#STANDARD}. Camera clients can |
| use {@link android.hardware.camera2.params.SessionConfiguration#setColorSpace} to select |
| a color space. |
| </details> |
| <hal_details> |
| The array contains three entries per supported profile: |
| |
| 1) The supported color space. |
| 2) An image format which can be used with this color space. |
| 3) A bitmap of all compatible dynamic range profiles, if the device is HDR-capable. |
| |
| The possible values for #1 are the positive values of the |
| ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_* enum, which is equivalent to |
| {@link android.graphics.ColorSpace.Named} and its ordinals. UNSPECIFIED should not be |
| used here. It should be noted that not all {@link android.graphics.ColorSpace.Named} |
| values are supported, only those in the |
| ANDROID_REQUEST_AVAILABLE_COLOR_SPACE_PROFILES_MAP_* enum. |
| |
| The possible values for #2 consist of the public-facing image/pixel formats, found at |
| {@link android.graphics.ImageFormat} and {@link android.graphics.PixelFormat}. Each map |
| to a HAL pixel format except for {@link android.graphics.ImageFormat.JPEG}, |
| {@link android.graphics.ImageFormat.HEIC}, and |
| {@link android.graphics.ImageFormat.DEPTH_JPEG}. Depth formats besides DEPTH_JPEG are |
| not applicable and should not be specified. If there are no constraints on the type of |
| image format a color space is compatible with, this can be |
| {@link android.graphics.ImageFormat.UNKNOWN}. |
| |
| If the device is not HDR-capable, #3 should always be |
| ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_STANDARD. Otherwise, #3 should be a |
| bitmap of the compatible ANDROID_REQUEST_AVAILABLE_DYNAMIC_RANGE_PROFILES_* values. |
| </hal_details> |
| </entry> |
| </static> |
| </section> |
| <section name="scaler"> |
| <controls> |
| <entry name="cropRegion" type="int32" visibility="public" |
| container="array" typedef="rectangle" hwlevel="legacy"> |
| <array> |
| <size>4</size> |
| </array> |
| <description>The desired region of the sensor to read out for this capture.</description> |
| <units>Pixel coordinates relative to |
| android.sensor.info.activeArraySize or |
| android.sensor.info.preCorrectionActiveArraySize depending on distortion correction |
| capability and mode</units> |
| <details> |
| This control can be used to implement digital zoom. |
| |
| For devices not supporting android.distortionCorrection.mode control, the coordinate |
| system always follows that of android.sensor.info.activeArraySize, with `(0, 0)` being |
| the top-left pixel of the active array. |
| |
| For devices supporting android.distortionCorrection.mode control, the coordinate system |
| depends on the mode being set. When the distortion correction mode is OFF, the |
| coordinate system follows android.sensor.info.preCorrectionActiveArraySize, with `(0, |
| 0)` being the top-left pixel of the pre-correction active array. When the distortion |
| correction mode is not OFF, the coordinate system follows |
| android.sensor.info.activeArraySize, with `(0, 0)` being the top-left pixel of the |
| active array. |
| |
| Output streams use this rectangle to produce their output, cropping to a smaller region |
| if necessary to maintain the stream's aspect ratio, then scaling the sensor input to |
| match the output's configured resolution. |
| |
| The crop region is usually applied after the RAW to other color space (e.g. YUV) |
| conversion. As a result RAW streams are not croppable unless supported by the |
| camera device. See android.scaler.availableStreamUseCases#CROPPED_RAW for details. |
| |
| For non-raw streams, any additional per-stream cropping will be done to maximize the |
| final pixel area of the stream. |
| |
| For example, if the crop region is set to a 4:3 aspect ratio, then 4:3 streams will use |
| the exact crop region. 16:9 streams will further crop vertically (letterbox). |
| |
| Conversely, if the crop region is set to a 16:9, then 4:3 outputs will crop horizontally |
| (pillarbox), and 16:9 streams will match exactly. These additional crops will be |
| centered within the crop region. |
| |
| To illustrate, here are several scenarios of different crop regions and output streams, |
| for a hypothetical camera device with an active array of size `(2000,1500)`. Note that |
| several of these examples use non-centered crop regions for ease of illustration; such |
| regions are only supported on devices with FREEFORM capability |
| (android.scaler.croppingType `== FREEFORM`), but this does not affect the way the crop |
| rules work otherwise. |
| |
| * Camera Configuration: |
| * Active array size: `2000x1500` (3 MP, 4:3 aspect ratio) |
| * Output stream #1: `640x480` (VGA, 4:3 aspect ratio) |
| * Output stream #2: `1280x720` (720p, 16:9 aspect ratio) |
| * Case #1: 4:3 crop region with 2x digital zoom |
| * Crop region: `Rect(500, 375, 1500, 1125) // (left, top, right, bottom)` |
| *  |
| * `640x480` stream source area: `(500, 375, 1500, 1125)` (equal to crop region) |
| * `1280x720` stream source area: `(500, 469, 1500, 1031)` (letterboxed) |
| * Case #2: 16:9 crop region with ~1.5x digital zoom. |
| * Crop region: `Rect(500, 375, 1833, 1125)` |
| *  |
| * `640x480` stream source area: `(666, 375, 1666, 1125)` (pillarboxed) |
| * `1280x720` stream source area: `(500, 375, 1833, 1125)` (equal to crop region) |
| * Case #3: 1:1 crop region with ~2.6x digital zoom. |
| * Crop region: `Rect(500, 375, 1250, 1125)` |
| *  |
| * `640x480` stream source area: `(500, 469, 1250, 1031)` (letterboxed) |
| * `1280x720` stream source area: `(500, 543, 1250, 957)` (letterboxed) |
| * Case #4: Replace `640x480` stream with `1024x1024` stream, with 4:3 crop region: |
| * Crop region: `Rect(500, 375, 1500, 1125)` |
| *  |
| * `1024x1024` stream source area: `(625, 375, 1375, 1125)` (pillarboxed) |
| * `1280x720` stream source area: `(500, 469, 1500, 1031)` (letterboxed) |
| * Note that in this case, neither of the two outputs is a subset of the other, with |
| each containing image data the other doesn't have. |
| |
| If the coordinate system is android.sensor.info.activeArraySize, the width and height |
| of the crop region cannot be set to be smaller than |
| `floor( activeArraySize.width / android.scaler.availableMaxDigitalZoom )` and |
| `floor( activeArraySize.height / android.scaler.availableMaxDigitalZoom )`, respectively. |
| |
| If the coordinate system is android.sensor.info.preCorrectionActiveArraySize, the width |
| and height of the crop region cannot be set to be smaller than |
| `floor( preCorrectionActiveArraySize.width / android.scaler.availableMaxDigitalZoom )` |
| and |
| `floor( preCorrectionActiveArraySize.height / android.scaler.availableMaxDigitalZoom )`, |
| respectively. |
| |
| The camera device may adjust the crop region to account for rounding and other hardware |
| requirements; the final crop region used will be included in the output capture result. |
| |
| The camera sensor output aspect ratio depends on factors such as output stream |
| combination and android.control.aeTargetFpsRange, and shouldn't be adjusted by using |
| this control. And the camera device will treat different camera sensor output sizes |
| (potentially with in-sensor crop) as the same crop of |
| android.sensor.info.activeArraySize. As a result, the application shouldn't assume the |
| maximum crop region always maps to the same aspect ratio or field of view for the |
| sensor output. |
| |
| Starting from API level 30, it's strongly recommended to use android.control.zoomRatio |
| to take advantage of better support for zoom with logical multi-camera. The benefits |
| include better precision with optical-digital zoom combination, and ability to do |
| zoom-out from 1.0x. When using android.control.zoomRatio for zoom, the crop region in |
| the capture request should be left as the default activeArray size. The |
| coordinate system is post-zoom, meaning that the activeArraySize or |
| preCorrectionActiveArraySize covers the camera device's field of view "after" zoom. See |
| android.control.zoomRatio for details. |
| |
| For camera devices with the |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability or devices where {@link CameraCharacteristics#getAvailableCaptureRequestKeys} |
| lists android.sensor.pixelMode, |
| android.sensor.info.activeArraySizeMaximumResolution / |
| android.sensor.info.preCorrectionActiveArraySizeMaximumResolution must be used as the |
| coordinate system for requests where android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <ndk_details> |
| The data representation is int[4], which maps to (left, top, width, height). |
| </ndk_details> |
| <hal_details> |
| The output streams must maintain square pixels at all |
| times, no matter what the relative aspect ratios of the |
| crop region and the stream are. Negative values for |
| corner are allowed for raw output if full pixel array is |
| larger than active pixel array. Width and height may be |
| rounded to nearest larger supportable width, especially |
| for raw output, where only a few fixed scales may be |
| possible. |
| |
| If android.control.zoomRatio is supported by the HAL, the HAL must report the zoom |
| ratio via android.control.zoomRatio, and change the coordinate system such that |
| android.sensor.info.preCorrectionActiveArraySize or android.sensor.info.activeArraySize |
| (depending on whether android.distortionCorrection.mode is supported) is used to |
| represent the camera field-of-view after zoom. see android.control.zoomRatio for |
| details. |
| |
| HAL2.x uses only (x, y, width) |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| </controls> |
| <static> |
| <entry name="availableFormats" type="int32" |
| visibility="hidden" deprecated="true" enum="true" |
| container="array" typedef="imageFormat"> |
| <array> |
| <size>n</size> |
| </array> |
| <enum> |
| <value optional="true" id="0x20">RAW16 |
| <notes> |
| RAW16 is a standard, cross-platform format for raw image |
| buffers with 16-bit pixels. |
| |
| Buffers of this format are typically expected to have a |
| Color Filter Array (CFA) layout, which is given in |
| android.sensor.info.colorFilterArrangement. Sensors with |
| CFAs that are not representable by a format in |
| android.sensor.info.colorFilterArrangement should not |
| use this format. |
| |
| Buffers of this format will also follow the constraints given for |
| RAW_OPAQUE buffers, but with relaxed performance constraints. |
| |
| This format is intended to give users access to the full contents |
| of the buffers coming directly from the image sensor prior to any |
| cropping or scaling operations, and all coordinate systems for |
| metadata used for this format are relative to the size of the |
| active region of the image sensor before any geometric distortion |
| correction has been applied (i.e. |
| android.sensor.info.preCorrectionActiveArraySize). Supported |
| dimensions for this format are limited to the full dimensions of |
| the sensor (e.g. either android.sensor.info.pixelArraySize or |
| android.sensor.info.preCorrectionActiveArraySize will be the |
| only supported output size). |
| |
| See android.scaler.availableInputOutputFormatsMap for |
| the full set of performance guarantees. |
| </notes> |
| </value> |
| <value optional="true" id="0x24">RAW_OPAQUE |
| <notes> |
| RAW_OPAQUE (or |
| {@link android.graphics.ImageFormat#RAW_PRIVATE RAW_PRIVATE} |
| as referred in public API) is a format for raw image buffers |
| coming from an image sensor. |
| |
| The actual structure of buffers of this format is |
| platform-specific, but must follow several constraints: |
| |
| 1. No image post-processing operations may have been applied to |
| buffers of this type. These buffers contain raw image data coming |
| directly from the image sensor. |
| 1. If a buffer of this format is passed to the camera device for |
| reprocessing, the resulting images will be identical to the images |
| produced if the buffer had come directly from the sensor and was |
| processed with the same settings. |
| |
| The intended use for this format is to allow access to the native |
| raw format buffers coming directly from the camera sensor without |
| any additional conversions or decrease in framerate. |
| |
| See android.scaler.availableInputOutputFormatsMap for the full set of |
| performance guarantees. |
| </notes> |
| </value> |
| <value optional="true" id="0x32315659">YV12 |
| <notes>YCrCb 4:2:0 Planar</notes> |
| </value> |
| <value optional="true" id="0x11">YCrCb_420_SP |
| <notes>NV21</notes> |
| </value> |
| <value id="0x22">IMPLEMENTATION_DEFINED |
| <notes>System internal format, not application-accessible</notes> |
| </value> |
| <value id="0x23">YCbCr_420_888 |
| <notes>Flexible YUV420 Format</notes> |
| </value> |
| <value id="0x21">BLOB |
| <notes>JPEG format</notes> |
| </value> |
| <value id="0x25" hal_version="3.4">RAW10 |
| <notes>RAW10</notes> |
| </value> |
| <value id="0x26" hal_version="3.4">RAW12 |
| <notes>RAW12</notes> |
| </value> |
| <value id="0x20203859" hal_version="3.4">Y8 |
| <notes>Y8</notes> |
| </value> |
| </enum> |
| <description>The list of image formats that are supported by this |
| camera device for output streams.</description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <details> |
| All camera devices will support JPEG and YUV_420_888 formats. |
| |
| When set to YUV_420_888, application can access the YUV420 data directly. |
| </details> |
| <hal_details> |
| These format values are from HAL_PIXEL_FORMAT_* in |
| system/core/libsystem/include/system/graphics-base.h. |
| |
| When IMPLEMENTATION_DEFINED is used, the platform |
| gralloc module will select a format based on the usage flags provided |
| by the camera HAL device and the other endpoint of the stream. It is |
| usually used by preview and recording streams, where the application doesn't |
| need access the image data. |
| |
| YCbCr_420_888 format must be supported by the HAL. When an image stream |
| needs CPU/application direct access, this format will be used. For a MONOCHROME |
| camera device, the pixel value of Cb and Cr planes is 128. |
| |
| The BLOB format must be supported by the HAL. This is used for the JPEG stream. |
| |
| A RAW_OPAQUE buffer should contain only pixel data. It is strongly |
| recommended that any information used by the camera device when |
| processing images is fully expressed by the result metadata |
| for that image buffer. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="availableJpegMinDurations" type="int64" visibility="hidden" deprecated="true" |
| container="array"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>The minimum frame duration that is supported |
| for each resolution in android.scaler.availableJpegSizes. |
| </description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <units>Nanoseconds</units> |
| <range>TODO: Remove property.</range> |
| <details> |
| This corresponds to the minimum steady-state frame duration when only |
| that JPEG stream is active and captured in a burst, with all |
| processing (typically in android.*.mode) set to FAST. |
| |
| When multiple streams are configured, the minimum |
| frame duration will be &gt;= max(individual stream min |
| durations)</details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="availableJpegSizes" type="int32" visibility="hidden" |
| deprecated="true" container="array" typedef="size"> |
| <array> |
| <size>n</size> |
| <size>2</size> |
| </array> |
| <description>The JPEG resolutions that are supported by this camera device.</description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <range>TODO: Remove property.</range> |
| <details> |
| The resolutions are listed as `(width, height)` pairs. All camera devices will support |
| sensor maximum resolution (defined by android.sensor.info.activeArraySize). |
| </details> |
| <hal_details> |
| The HAL must include sensor maximum resolution |
| (defined by android.sensor.info.activeArraySize), |
| and should include half/quarter of sensor maximum resolution. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="availableMaxDigitalZoom" type="float" visibility="public" |
| hwlevel="legacy" session_characteristics_key_since="35"> |
| <description>The maximum ratio between both active area width |
| and crop region width, and active area height and |
| crop region height, for android.scaler.cropRegion. |
| </description> |
| <units>Zoom scale factor</units> |
| <range>&gt;=1</range> |
| <details> |
| This represents the maximum amount of zooming possible by |
| the camera device, or equivalently, the minimum cropping |
| window size. |
| |
| Crop regions that have a width or height that is smaller |
| than this ratio allows will be rounded up to the minimum |
| allowed size by the camera device. |
| |
| Starting from API level 30, when using android.control.zoomRatio to zoom in or out, |
| the application must use android.control.zoomRatioRange to query both the minimum and |
| maximum zoom ratio. |
| </details> |
| <hal_details> |
| If the HAL supports android.control.zoomRatio, this value must be equal to or less than |
| the maximum supported zoomRatio specified in android.control.zoomRatioRange. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="availableProcessedMinDurations" type="int64" visibility="hidden" deprecated="true" |
| container="array"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>For each available processed output size (defined in |
| android.scaler.availableProcessedSizes), this property lists the |
| minimum supportable frame duration for that size. |
| </description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <units>Nanoseconds</units> |
| <details> |
| This should correspond to the frame duration when only that processed |
| stream is active, with all processing (typically in android.*.mode) |
| set to FAST. |
| |
| When multiple streams are configured, the minimum frame duration will |
| be &gt;= max(individual stream min durations). |
| </details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="availableProcessedSizes" type="int32" visibility="hidden" |
| deprecated="true" container="array" typedef="size"> |
| <array> |
| <size>n</size> |
| <size>2</size> |
| </array> |
| <description>The resolutions available for use with |
| processed output streams, such as YV12, NV12, and |
| platform opaque YUV/RGB streams to the GPU or video |
| encoders.</description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <details> |
| The resolutions are listed as `(width, height)` pairs. |
| |
| For a given use case, the actual maximum supported resolution |
| may be lower than what is listed here, depending on the destination |
| Surface for the image data. For example, for recording video, |
| the video encoder chosen may have a maximum size limit (e.g. 1080p) |
| smaller than what the camera (e.g. maximum resolution is 3264x2448) |
| can provide. |
| |
| Please reference the documentation for the image data destination to |
| check if it limits the maximum size for image data. |
| </details> |
| <hal_details> |
| For FULL capability devices (`android.info.supportedHardwareLevel == FULL`), |
| the HAL must include all JPEG sizes listed in android.scaler.availableJpegSizes |
| and each below resolution if it is smaller than or equal to the sensor |
| maximum resolution (if they are not listed in JPEG sizes already): |
| |
| * 240p (320 x 240) |
| * 480p (640 x 480) |
| * 720p (1280 x 720) |
| * 1080p (1920 x 1080) |
| |
| For LIMITED capability devices (`android.info.supportedHardwareLevel == LIMITED`), |
| the HAL only has to list up to the maximum video size supported by the devices. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="availableRawMinDurations" type="int64" deprecated="true" |
| container="array"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| For each available raw output size (defined in |
| android.scaler.availableRawSizes), this property lists the minimum |
| supportable frame duration for that size. |
| </description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| <units>Nanoseconds</units> |
| <details> |
| Should correspond to the frame duration when only the raw stream is |
| active. |
| |
| When multiple streams are configured, the minimum |
| frame duration will be &gt;= max(individual stream min |
| durations)</details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="availableRawSizes" type="int32" deprecated="true" |
| container="array" typedef="size"> |
| <array> |
| <size>n</size> |
| <size>2</size> |
| </array> |
| <description>The resolutions available for use with raw |
| sensor output streams, listed as width, |
| height</description> |
| <deprecation_description> |
| Not used in HALv3 or newer |
| </deprecation_description> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.scaler.cropRegion" kind="controls"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="availableInputOutputFormatsMap" type="int32" visibility="hidden" |
| typedef="reprocessFormatsMap"> |
| <description>The mapping of image formats that are supported by this |
| camera device for input streams, to their corresponding output formats. |
| </description> |
| <details> |
| All camera devices with at least 1 |
| android.request.maxNumInputStreams will have at least one |
| available input format. |
| |
| The camera device will support the following map of formats, |
| if its dependent capability (android.request.availableCapabilities) is supported: |
| |
| Input Format | Output Format | Capability |
| :-------------------------------------------------|:--------------------------------------------------|:---------- |
| {@link android.graphics.ImageFormat#PRIVATE} | {@link android.graphics.ImageFormat#JPEG} | PRIVATE_REPROCESSING |
| {@link android.graphics.ImageFormat#PRIVATE} | {@link android.graphics.ImageFormat#YUV_420_888} | PRIVATE_REPROCESSING |
| {@link android.graphics.ImageFormat#YUV_420_888} | {@link android.graphics.ImageFormat#JPEG} | YUV_REPROCESSING |
| {@link android.graphics.ImageFormat#YUV_420_888} | {@link android.graphics.ImageFormat#YUV_420_888} | YUV_REPROCESSING |
| |
| PRIVATE refers to a device-internal format that is not directly application-visible. A |
| PRIVATE input surface can be acquired by {@link android.media.ImageReader#newInstance} |
| with {@link android.graphics.ImageFormat#PRIVATE} as the format. |
| |
| For a PRIVATE_REPROCESSING-capable camera device, using the PRIVATE format as either input |
| or output will never hurt maximum frame rate (i.e. {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration |
| getOutputStallDuration(ImageFormat.PRIVATE, size)} is always 0), |
| |
| Attempting to configure an input stream with output streams not |
| listed as available in this map is not valid. |
| |
| Additionally, if the camera device is MONOCHROME with Y8 support, it will also support |
| the following map of formats if its dependent capability |
| (android.request.availableCapabilities) is supported: |
| |
| Input Format | Output Format | Capability |
| :-------------------------------------------------|:--------------------------------------------------|:---------- |
| {@link android.graphics.ImageFormat#PRIVATE} | {@link android.graphics.ImageFormat#Y8} | PRIVATE_REPROCESSING |
| {@link android.graphics.ImageFormat#Y8} | {@link android.graphics.ImageFormat#JPEG} | YUV_REPROCESSING |
| {@link android.graphics.ImageFormat#Y8} | {@link android.graphics.ImageFormat#Y8} | YUV_REPROCESSING |
| |
| </details> |
| <hal_details> |
| For the formats, see `system/core/libsystem/include/system/graphics-base.h` for a |
| definition of the image format enumerations. The PRIVATE format refers to the |
| HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED format. The HAL could determine |
| the actual format by using the gralloc usage flags. |
| For ZSL use case in particular, the HAL could choose appropriate format (partially |
| processed YUV or RAW based format) by checking the format and GRALLOC_USAGE_HW_CAMERA_ZSL. |
| See camera3.h for more details. |
| |
| This value is encoded as a variable-size array-of-arrays. |
| The inner array always contains `[format, length, ...]` where |
| `...` has `length` elements. An inner array is followed by another |
| inner array if the total metadata entry size hasn't yet been exceeded. |
| |
| A code sample to read/write this encoding (with a device that |
| supports reprocessing IMPLEMENTATION_DEFINED to YUV_420_888, and JPEG, |
| and reprocessing YUV_420_888 to YUV_420_888 and JPEG): |
| |
| // reading |
| int32_t* contents = &entry.i32[0]; |
| for (size_t i = 0; i < entry.count; ) { |
| int32_t format = contents[i++]; |
| int32_t length = contents[i++]; |
| int32_t output_formats[length]; |
| memcpy(&output_formats[0], &contents[i], |
| length * sizeof(int32_t)); |
| i += length; |
| } |
| |
| // writing (static example, PRIVATE_REPROCESSING + YUV_REPROCESSING) |
| int32_t[] contents = { |
| IMPLEMENTATION_DEFINED, 2, YUV_420_888, BLOB, |
| YUV_420_888, 2, YUV_420_888, BLOB, |
| }; |
| update_camera_metadata_entry(metadata, index, &contents[0], |
| sizeof(contents)/sizeof(contents[0]), &updated_entry); |
| |
| If the HAL claims to support any of the capabilities listed in the |
| above details, then it must also support all the input-output |
| combinations listed for that capability. It can optionally support |
| additional formats if it so chooses. |
| </hal_details> |
| <tag id="REPROC" /> |
| </entry> |
| <entry name="availableStreamConfigurations" type="int32" visibility="ndk_public" |
| enum="true" container="array" typedef="streamConfiguration" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available stream configurations that this |
| camera device supports |
| (i.e. format, width, height, output/input stream). |
| </description> |
| <details> |
| The configurations are listed as `(format, width, height, input?)` |
| tuples. |
| |
| For a given use case, the actual maximum supported resolution |
| may be lower than what is listed here, depending on the destination |
| Surface for the image data. For example, for recording video, |
| the video encoder chosen may have a maximum size limit (e.g. 1080p) |
| smaller than what the camera (e.g. maximum resolution is 3264x2448) |
| can provide. |
| |
| Please reference the documentation for the image data destination to |
| check if it limits the maximum size for image data. |
| |
| Not all output formats may be supported in a configuration with |
| an input stream of a particular format. For more details, see |
| android.scaler.availableInputOutputFormatsMap. |
| |
| For applications targeting SDK version older than 31, the following table |
| describes the minimum required output stream configurations based on the hardware level |
| (android.info.supportedHardwareLevel): |
| |
| Format | Size | Hardware Level | Notes |
| :-------------:|:--------------------------------------------:|:--------------:|:--------------: |
| JPEG | android.sensor.info.activeArraySize | Any | |
| JPEG | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize |
| JPEG | 1280x720 (720) | Any | if 720p <= activeArraySize |
| JPEG | 640x480 (480p) | Any | if 480p <= activeArraySize |
| JPEG | 320x240 (240p) | Any | if 240p <= activeArraySize |
| YUV_420_888 | all output sizes available for JPEG | FULL | |
| YUV_420_888 | all output sizes available for JPEG, up to the maximum video size | LIMITED | |
| IMPLEMENTATION_DEFINED | same as YUV_420_888 | Any | |
| |
| For applications targeting SDK version 31 or newer, if the mobile device declares to be |
| media performance class 12 or higher by setting |
| {@link android.os.Build.VERSION#MEDIA_PERFORMANCE_CLASS} to be 31 or larger, |
| the primary camera devices (first rear/front camera in the camera ID list) will not |
| support JPEG sizes smaller than 1080p. If the application configures a JPEG stream |
| smaller than 1080p, the camera device will round up the JPEG image size to at least |
| 1080p. The requirements for IMPLEMENTATION_DEFINED and YUV_420_888 stay the same. |
| This new minimum required output stream configurations are illustrated by the table below: |
| |
| Format | Size | Hardware Level | Notes |
| :-------------:|:--------------------------------------------:|:--------------:|:--------------: |
| JPEG | android.sensor.info.activeArraySize | Any | |
| JPEG | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize |
| YUV_420_888 | android.sensor.info.activeArraySize | FULL | |
| YUV_420_888 | 1920x1080 (1080p) | FULL | if 1080p <= activeArraySize |
| YUV_420_888 | 1280x720 (720) | FULL | if 720p <= activeArraySize |
| YUV_420_888 | 640x480 (480p) | FULL | if 480p <= activeArraySize |
| YUV_420_888 | 320x240 (240p) | FULL | if 240p <= activeArraySize |
| YUV_420_888 | all output sizes available for FULL hardware level, up to the maximum video size | LIMITED | |
| IMPLEMENTATION_DEFINED | same as YUV_420_888 | Any | |
| |
| For applications targeting SDK version 31 or newer, if the mobile device doesn't declare |
| to be media performance class 12 or better by setting |
| {@link android.os.Build.VERSION#MEDIA_PERFORMANCE_CLASS} to be 31 or larger, |
| or if the camera device isn't a primary rear/front camera, the minimum required output |
| stream configurations are the same as for applications targeting SDK version older than |
| 31. |
| |
| Refer to android.request.availableCapabilities for additional |
| mandatory stream configurations on a per-capability basis. |
| |
| Exception on 176x144 (QCIF) resolution: camera devices usually have a fixed capability for |
| downscaling from larger resolution to smaller, and the QCIF resolution sometimes is not |
| fully supported due to this limitation on devices with high-resolution image sensors. |
| Therefore, trying to configure a QCIF resolution stream together with any other |
| stream larger than 1920x1080 resolution (either width or height) might not be supported, |
| and capture session creation will fail if it is not. |
| |
| </details> |
| <hal_details> |
| It is recommended (but not mandatory) to also include half/quarter |
| of sensor maximum resolution for JPEG formats (regardless of hardware |
| level). |
| |
| (The following is a rewording of the above required table): |
| |
| For JPEG format, the sizes may be restricted by below conditions: |
| |
| * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones |
| (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution |
| (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these, |
| it does not have to be included in the supported JPEG sizes. |
| * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as |
| the dimensions being a multiple of 16. |
| |
| Therefore, the maximum JPEG size may be smaller than sensor maximum resolution. |
| However, the largest JPEG size must be as close as possible to the sensor maximum |
| resolution given above constraints. It is required that after aspect ratio adjustments, |
| additional size reduction due to other issues must be less than 3% in area. For example, |
| if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect |
| ratio 4:3, the JPEG encoder alignment requirement is 16, the maximum JPEG size will be |
| 3264x2448. |
| |
| For FULL capability devices (`android.info.supportedHardwareLevel == FULL`), |
| the HAL must include all YUV_420_888 sizes that have JPEG sizes listed |
| here as output streams. |
| |
| It must also include each below resolution if it is smaller than or |
| equal to the sensor maximum resolution (for both YUV_420_888 and JPEG |
| formats), as output streams: |
| |
| * 240p (320 x 240) |
| * 480p (640 x 480) |
| * 720p (1280 x 720) |
| * 1080p (1920 x 1080) |
| |
| Note that for primary cameras (first rear/front facing camera in the camera ID list) |
| on a device with {@link android.os.Build.VERSION#MEDIA_PERFORMANCE_CLASS} set to |
| 31 or larger, camera framework filters out JPEG sizes smaller than 1080p depending on |
| applications' targetSdkLevel. The camera HAL must still support the smaller JPEG sizes |
| to maintain backward compatibility. |
| |
| For LIMITED capability devices |
| (`android.info.supportedHardwareLevel == LIMITED`), |
| the HAL only has to list up to the maximum video size |
| supported by the device. |
| |
| Regardless of hardware level, every output resolution available for |
| YUV_420_888 must also be available for IMPLEMENTATION_DEFINED. |
| |
| This supersedes the following fields, which are now deprecated: |
| |
| * availableFormats |
| * available[Processed,Raw,Jpeg]Sizes |
| </hal_details> |
| </entry> |
| <entry name="availableMinFrameDurations" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hwlevel="legacy"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the minimum frame duration for each |
| format/size combination. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| This should correspond to the frame duration when only that |
| stream is active, with all processing (typically in android.*.mode) |
| set to either OFF or FAST. |
| |
| When multiple streams are used in a request, the minimum frame |
| duration will be max(individual stream min durations). |
| |
| See android.sensor.frameDuration and |
| android.scaler.availableStallDurations for more details about |
| calculating the max frame rate. |
| </details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="availableStallDurations" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hwlevel="legacy"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the maximum stall duration for each |
| output format/size combination. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| A stall duration is how much extra time would get added |
| to the normal minimum frame duration for a repeating request |
| that has streams with non-zero stall. |
| |
| For example, consider JPEG captures which have the following |
| characteristics: |
| |
| * JPEG streams act like processed YUV streams in requests for which |
| they are not included; in requests in which they are directly |
| referenced, they act as JPEG streams. This is because supporting a |
| JPEG stream requires the underlying YUV data to always be ready for |
| use by a JPEG encoder, but the encoder will only be used (and impact |
| frame duration) on requests that actually reference a JPEG stream. |
| * The JPEG processor can run concurrently to the rest of the camera |
| pipeline, but cannot process more than 1 capture at a time. |
| |
| In other words, using a repeating YUV request would result |
| in a steady frame rate (let's say it's 30 FPS). If a single |
| JPEG request is submitted periodically, the frame rate will stay |
| at 30 FPS (as long as we wait for the previous JPEG to return each |
| time). If we try to submit a repeating YUV + JPEG request, then |
| the frame rate will drop from 30 FPS. |
| |
| In general, submitting a new request with a non-0 stall time |
| stream will _not_ cause a frame rate drop unless there are still |
| outstanding buffers for that stream from previous requests. |
| |
| Submitting a repeating request with streams (call this `S`) |
| is the same as setting the minimum frame duration from |
| the normal minimum frame duration corresponding to `S`, added with |
| the maximum stall duration for `S`. |
| |
| If interleaving requests with and without a stall duration, |
| a request will stall by the maximum of the remaining times |
| for each can-stall stream with outstanding buffers. |
| |
| This means that a stalling request will not have an exposure start |
| until the stall has completed. |
| |
| This should correspond to the stall duration when only that stream is |
| active, with all processing (typically in android.*.mode) set to FAST |
| or OFF. Setting any of the processing modes to HIGH_QUALITY |
| effectively results in an indeterminate stall duration for all |
| streams in a request (the regular stall calculation rules are |
| ignored). |
| |
| The following formats may always have a stall duration: |
| |
| * {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG} |
| * {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16} |
| |
| The following formats will never have a stall duration: |
| |
| * {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888} |
| * {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10} |
| * {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12} |
| * {@link android.graphics.ImageFormat#Y8|AIMAGE_FORMAT_Y8} |
| |
| All other formats may or may not have an allowed stall duration on |
| a per-capability basis; refer to android.request.availableCapabilities |
| for more details. |
| |
| See android.sensor.frameDuration for more information about |
| calculating the max frame rate (absent stalls). |
| </details> |
| <hal_details> |
| If possible, it is recommended that all non-JPEG formats |
| (such as RAW16) should not have a stall duration. RAW10, RAW12, RAW_OPAQUE |
| and IMPLEMENTATION_DEFINED must not have stall durations. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="streamConfigurationMap" type="int32" visibility="java_public" |
| synthetic="true" typedef="streamConfigurationMap" |
| hwlevel="legacy"> |
| <description>The available stream configurations that this |
| camera device supports; also includes the minimum frame durations |
| and the stall durations for each format/size combination. |
| </description> |
| <details> |
| All camera devices will support sensor maximum resolution (defined by |
| android.sensor.info.activeArraySize) for the JPEG format. |
| |
| For a given use case, the actual maximum supported resolution |
| may be lower than what is listed here, depending on the destination |
| Surface for the image data. For example, for recording video, |
| the video encoder chosen may have a maximum size limit (e.g. 1080p) |
| smaller than what the camera (e.g. maximum resolution is 3264x2448) |
| can provide. |
| |
| Please reference the documentation for the image data destination to |
| check if it limits the maximum size for image data. |
| |
| For applications targeting SDK version older than 31, the following table |
| describes the minimum required output stream configurations based on the |
| hardware level (android.info.supportedHardwareLevel): |
| |
| Format | Size | Hardware Level | Notes |
| :-------------------------------------------------:|:--------------------------------------------:|:--------------:|:--------------: |
| {@link android.graphics.ImageFormat#JPEG} | android.sensor.info.activeArraySize (*1) | Any | |
| {@link android.graphics.ImageFormat#JPEG} | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize |
| {@link android.graphics.ImageFormat#JPEG} | 1280x720 (720p) | Any | if 720p <= activeArraySize |
| {@link android.graphics.ImageFormat#JPEG} | 640x480 (480p) | Any | if 480p <= activeArraySize |
| {@link android.graphics.ImageFormat#JPEG} | 320x240 (240p) | Any | if 240p <= activeArraySize |
| {@link android.graphics.ImageFormat#YUV_420_888} | all output sizes available for JPEG | FULL | |
| {@link android.graphics.ImageFormat#YUV_420_888} | all output sizes available for JPEG, up to the maximum video size | LIMITED | |
| {@link android.graphics.ImageFormat#PRIVATE} | same as YUV_420_888 | Any | |
| |
| For applications targeting SDK version 31 or newer, if the mobile device declares to be |
| media performance class 12 or higher by setting |
| {@link android.os.Build.VERSION#MEDIA_PERFORMANCE_CLASS} to be 31 or larger, |
| the primary camera devices (first rear/front camera in the camera ID list) will not |
| support JPEG sizes smaller than 1080p. If the application configures a JPEG stream |
| smaller than 1080p, the camera device will round up the JPEG image size to at least |
| 1080p. The requirements for IMPLEMENTATION_DEFINED and YUV_420_888 stay the same. |
| This new minimum required output stream configurations are illustrated by the table below: |
| |
| Format | Size | Hardware Level | Notes |
| :-------------------------------------------------:|:--------------------------------------------:|:--------------:|:--------------: |
| {@link android.graphics.ImageFormat#JPEG} | android.sensor.info.activeArraySize (*1) | Any | |
| {@link android.graphics.ImageFormat#JPEG} | 1920x1080 (1080p) | Any | if 1080p <= activeArraySize |
| {@link android.graphics.ImageFormat#YUV_420_888} | android.sensor.info.activeArraySize | FULL | |
| {@link android.graphics.ImageFormat#YUV_420_888} | 1920x1080 (1080p) | FULL | if 1080p <= activeArraySize |
| {@link android.graphics.ImageFormat#YUV_420_888} | 1280x720 (720) | FULL | if 720p <= activeArraySize |
| {@link android.graphics.ImageFormat#YUV_420_888} | 640x480 (480p) | FULL | if 480p <= activeArraySize |
| {@link android.graphics.ImageFormat#YUV_420_888} | 320x240 (240p) | FULL | if 240p <= activeArraySize |
| {@link android.graphics.ImageFormat#YUV_420_888} | all output sizes available for FULL hardware level, up to the maximum video size | LIMITED | |
| {@link android.graphics.ImageFormat#PRIVATE} | same as YUV_420_888 | Any | |
| |
| For applications targeting SDK version 31 or newer, if the mobile device doesn't declare |
| to be media performance class 12 or better by setting |
| {@link android.os.Build.VERSION#MEDIA_PERFORMANCE_CLASS} to be 31 or larger, |
| or if the camera device isn't a primary rear/front camera, the minimum required output |
| stream configurations are the same as for applications targeting SDK version older than |
| 31. |
| |
| Refer to android.request.availableCapabilities and |
| [the table](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#legacy-level-guaranteed-configurations) |
| for additional mandatory stream configurations on a per-capability basis. |
| |
| *1: For JPEG format, the sizes may be restricted by below conditions: |
| |
| * The HAL may choose the aspect ratio of each Jpeg size to be one of well known ones |
| (e.g. 4:3, 16:9, 3:2 etc.). If the sensor maximum resolution |
| (defined by android.sensor.info.activeArraySize) has an aspect ratio other than these, |
| it does not have to be included in the supported JPEG sizes. |
| * Some hardware JPEG encoders may have pixel boundary alignment requirements, such as |
| the dimensions being a multiple of 16. |
| Therefore, the maximum JPEG size may be smaller than sensor maximum resolution. |
| However, the largest JPEG size will be as close as possible to the sensor maximum |
| resolution given above constraints. It is required that after aspect ratio adjustments, |
| additional size reduction due to other issues must be less than 3% in area. For example, |
| if the sensor maximum resolution is 3280x2464, if the maximum JPEG size has aspect |
| ratio 4:3, and the JPEG encoder alignment requirement is 16, the maximum JPEG size will be |
| 3264x2448. |
| |
| Exception on 176x144 (QCIF) resolution: camera devices usually have a fixed capability on |
| downscaling from larger resolution to smaller ones, and the QCIF resolution can sometimes |
| not be fully supported due to this limitation on devices with high-resolution image |
| sensors. Therefore, trying to configure a QCIF resolution stream together with any other |
| stream larger than 1920x1080 resolution (either width or height) might not be supported, |
| and capture session creation will fail if it is not. |
| |
| </details> |
| <hal_details> |
| Do not set this property directly |
| (it is synthetic and will not be available at the HAL layer); |
| set the android.scaler.availableStreamConfigurations instead. |
| |
| Not all output formats may be supported in a configuration with |
| an input stream of a particular format. For more details, see |
| android.scaler.availableInputOutputFormatsMap. |
| |
| It is recommended (but not mandatory) to also include half/quarter |
| of sensor maximum resolution for JPEG formats (regardless of hardware |
| level). |
| |
| (The following is a rewording of the above required table): |
| |
| The HAL must include sensor maximum resolution (defined by |
| android.sensor.info.activeArraySize). |
| |
| For FULL capability devices (`android.info.supportedHardwareLevel == FULL`), |
| the HAL must include all YUV_420_888 sizes that have JPEG sizes listed |
| here as output streams. |
| |
| It must also include each below resolution if it is smaller than or |
| equal to the sensor maximum resolution (for both YUV_420_888 and JPEG |
| formats), as output streams: |
| |
| * 240p (320 x 240) |
| * 480p (640 x 480) |
| * 720p (1280 x 720) |
| * 1080p (1920 x 1080) |
| |
| Note that for Performance Class 12 or higher primary cameras (first rear/front facing |
| camera in the camera ID list), camera framework filters out JPEG sizes smaller than |
| 1080p depending on applications' targetSdkLevel. The camera HAL must still support the |
| smaller JPEG sizes to maintain backward compatibility. |
| |
| For LIMITED capability devices |
| (`android.info.supportedHardwareLevel == LIMITED`), |
| the HAL only has to list up to the maximum video size |
| supported by the device. |
| |
| Regardless of hardware level, every output resolution available for |
| YUV_420_888 must also be available for IMPLEMENTATION_DEFINED. |
| |
| This supersedes the following fields, which are now deprecated: |
| |
| * availableFormats |
| * available[Processed,Raw,Jpeg]Sizes |
| </hal_details> |
| </entry> |
| <entry name="croppingType" type="byte" visibility="public" enum="true" |
| hwlevel="legacy"> |
| <enum> |
| <value>CENTER_ONLY |
| <notes> |
| The camera device only supports centered crop regions. |
| </notes> |
| </value> |
| <value>FREEFORM |
| <notes> |
| The camera device supports arbitrarily chosen crop regions. |
| </notes> |
| </value> |
| </enum> |
| <description>The crop type that this camera device supports.</description> |
| <details> |
| When passing a non-centered crop region (android.scaler.cropRegion) to a camera |
| device that only supports CENTER_ONLY cropping, the camera device will move the |
| crop region to the center of the sensor active array (android.sensor.info.activeArraySize) |
| and keep the crop region width and height unchanged. The camera device will return the |
| final used crop region in metadata result android.scaler.cropRegion. |
| |
| Camera devices that support FREEFORM cropping will support any crop region that |
| is inside of the active array. The camera device will apply the same crop region and |
| return the final used crop region in capture result metadata android.scaler.cropRegion. |
| |
| Starting from API level 30, |
| |
| * If the camera device supports FREEFORM cropping, in order to do FREEFORM cropping, the |
| application must set android.control.zoomRatio to 1.0, and use android.scaler.cropRegion |
| for zoom. |
| * To do CENTER_ONLY zoom, the application has below 2 options: |
| 1. Set android.control.zoomRatio to 1.0; adjust zoom by android.scaler.cropRegion. |
| 2. Adjust zoom by android.control.zoomRatio; use android.scaler.cropRegion to crop |
| the field of view vertically (letterboxing) or horizontally (pillarboxing), but not |
| windowboxing. |
| * Setting android.control.zoomRatio to values different than 1.0 and |
| android.scaler.cropRegion to be windowboxing at the same time are not supported. In this |
| case, the camera framework will override the android.scaler.cropRegion to be the active |
| array. |
| |
| LEGACY capability devices will only support CENTER_ONLY cropping. |
| </details> |
| <hal_details> |
| If the HAL supports android.control.zoomRatio, this tag must be set to CENTER_ONLY. |
| </hal_details> |
| </entry> |
| <entry name="availableRecommendedStreamConfigurations" type="int32" visibility="ndk_public" |
| optional="true" enum="true" container="array" typedef="recommendedStreamConfiguration" |
| hal_version="3.4"> |
| <array> |
| <size>n</size> |
| <size>5</size> |
| </array> |
| <enum> |
| <value id="0x0">PREVIEW |
| <notes> |
| Preview must only include non-stalling processed stream configurations with |
| output formats like |
| {@link android.graphics.ImageFormat#YUV_420_888|AIMAGE_FORMAT_YUV_420_888}, |
| {@link android.graphics.ImageFormat#PRIVATE|AIMAGE_FORMAT_PRIVATE}, etc. |
| </notes> |
| </value> |
| <value id="0x1">RECORD |
| <notes> |
| Video record must include stream configurations that match the advertised |
| supported media profiles {@link android.media.CamcorderProfile} with |
| IMPLEMENTATION_DEFINED format. |
| </notes> |
| </value> |
| <value id="0x2">VIDEO_SNAPSHOT |
| <notes> |
| Video snapshot must include stream configurations at least as big as |
| the maximum RECORD resolutions and only with |
| {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG output format}. |
| Additionally the configurations shouldn't cause preview glitches and also be able to |
| run at 30 fps. |
| </notes> |
| </value> |
| <value id="0x3">SNAPSHOT |
| <notes> |
| Recommended snapshot stream configurations must include at least one with |
| size close to android.sensor.info.activeArraySize and |
| {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG output format}. |
| Taking into account restrictions on aspect ratio, alignment etc. the area of the |
| maximum suggested size shouldn’t be less than 97% of the sensor array size area. |
| </notes> |
| </value> |
| <value id="0x4">ZSL |
| <notes> |
| If supported, recommended input stream configurations must only be advertised with |
| ZSL along with other processed and/or stalling output formats. |
| </notes> |
| </value> |
| <value id="0x5">RAW |
| <notes> |
| If supported, recommended raw stream configurations must only include RAW based |
| output formats. |
| </notes> |
| </value> |
| <value id="0x6">LOW_LATENCY_SNAPSHOT |
| <notes> |
| If supported, the recommended low latency stream configurations must have |
| end-to-end latency that does not exceed 200 ms. under standard operating conditions |
| (reasonable light levels, not loaded system) and using template |
| TEMPLATE_STILL_CAPTURE. This is primarily for listing configurations for the |
| {@link android.graphics.ImageFormat#JPEG|AIMAGE_FORMAT_JPEG JPEG output format} |
| however other supported output formats can be added as well. |
| </notes> |
| </value> |
| <value id="0x7">PUBLIC_END |
| </value> |
| <value id="0x8" hal_version="3.8">10BIT_OUTPUT |
| <notes> |
| If supported, the recommended 10-bit output stream configurations must include |
| a subset of the advertised {@link android.graphics.ImageFormat#YCBCR_P010} and |
| {@link android.graphics.ImageFormat#PRIVATE} outputs that are optimized for power |
| and performance when registered along with a supported 10-bit dynamic range profile. |
| see android.hardware.camera2.params.OutputConfiguration#setDynamicRangeProfile for |
| details. |
| </notes> |
| </value> |
| <value id="0x9" hal_version="3.8">PUBLIC_END_3_8 |
| </value> |
| <value id="0x18">VENDOR_START |
| <notes> |
| Vendor defined use cases. These depend on the vendor implementation. |
| </notes> |
| </value> |
| </enum> |
| <description>Recommended stream configurations for common client use cases. |
| </description> |
| <details>Optional subset of the android.scaler.availableStreamConfigurations that contains |
| similar tuples listed as |
| (i.e. width, height, format, output/input stream, usecase bit field). |
| Camera devices will be able to suggest particular stream configurations which are |
| power and performance efficient for specific use cases. For more information about |
| retrieving the suggestions see |
| {@link android.hardware.camera2.CameraCharacteristics#getRecommendedStreamConfigurationMap}. |
| </details> |
| <ndk_details> |
| The data representation is int[5], which maps to |
| (width, height, format, output/input stream, usecase bit field). The array can be |
| parsed using the following pseudo code: |
| |
| struct StreamConfiguration { |
| int32_t format; |
| int32_t width; |
| int32_t height; |
| int32_t isInput; }; |
| |
| void getPreferredStreamConfigurations( |
| int32_t *array, size_t count, int32_t usecaseId, |
| Vector < StreamConfiguration > * scs) { |
| const size_t STREAM_CONFIGURATION_SIZE = 5; |
| const size_t STREAM_WIDTH_OFFSET = 0; |
| const size_t STREAM_HEIGHT_OFFSET = 1; |
| const size_t STREAM_FORMAT_OFFSET = 2; |
| const size_t STREAM_IS_INPUT_OFFSET = 3; |
| const size_t STREAM_USECASE_BITMAP_OFFSET = 4; |
| |
| for (size_t i = 0; i < count; i+= STREAM_CONFIGURATION_SIZE) { |
| int32_t width = array[i + STREAM_WIDTH_OFFSET]; |
| int32_t height = array[i + STREAM_HEIGHT_OFFSET]; |
| int32_t format = array[i + STREAM_FORMAT_OFFSET]; |
| int32_t isInput = array[i + STREAM_IS_INPUT_OFFSET]; |
| int32_t supportedUsecases = array[i + STREAM_USECASE_BITMAP_OFFSET]; |
| if (supportedUsecases & (1 << usecaseId)) { |
| StreamConfiguration sc = {format, width, height, isInput}; |
| scs->add(sc); |
| } |
| } |
| } |
| |
| </ndk_details> |
| <hal_details> |
| There are some requirements that need to be considered regarding the usecases and the |
| suggested configurations: |
| |
| * If android.scaler.availableRecommendedStreamConfigurations is set, then recommended |
| stream configurations must be present for all mandatory usecases PREVIEW, |
| SNAPSHOT, RECORD, VIDEO_SNAPSHOT. ZSL and RAW are |
| required depending on device capabilities see android.request.availableCapabilities. |
| * Non-existing usecases and non-vendor usecases within the range |
| (RAW : VENDOR_START] are prohibited as well as stream configurations not |
| present in the exhaustive android.scaler.availableStreamConfigurations list. |
| |
| For example, in case the camera device supports only 4K and 1080p and both resolutions are |
| recommended for the mandatory usecases except preview which can run efficiently only |
| on 1080p. The array may look like this: |
| |
| [3840, 2160, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, |
| ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, |
| (1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RECORD | |
| 1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT | |
| 1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VIDEO_SNAPSHOT), |
| |
| 1920, 1080, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, |
| ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT, |
| (1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_PREVIEW | |
| 1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_RECORD | |
| 1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_SNAPSHOT | |
| 1<< ANDROID_SCALER_AVAILABLE_RECOMMENDED_STREAM_CONFIGURATIONS_VIDEO_SNAPSHOT)] |
| |
| </hal_details> |
| </entry> |
| <entry name="availableRecommendedInputOutputFormatsMap" type="int32" visibility="ndk_public" |
| optional="true" typedef="reprocessFormatsMap" hal_version="3.4"> |
| <description>Recommended mappings of image formats that are supported by this |
| camera device for input streams, to their corresponding output formats. |
| </description> |
| <details> |
| This is a recommended subset of the complete list of mappings found in |
| android.scaler.availableInputOutputFormatsMap. The same requirements apply here as well. |
| The list however doesn't need to contain all available and supported mappings. Instead of |
| this developers must list only recommended and efficient entries. |
| If set, the information will be available in the ZERO_SHUTTER_LAG recommended stream |
| configuration see |
| {@link android.hardware.camera2.CameraCharacteristics#getRecommendedStreamConfigurationMap}. |
| </details> |
| <hal_details> |
| For a code sample of the required data encoding please check |
| android.scaler.availableInputOutputFormatsMap. |
| </hal_details> |
| <tag id="REPROC" /> |
| </entry> |
| <entry name="mandatoryStreamCombinations" type="int32" visibility="java_public" |
| synthetic="true" container="array" typedef="mandatoryStreamCombination" hwlevel="limited"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| An array of mandatory stream combinations generated according to the camera device |
| {@link android.hardware.camera2.CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL} |
| and {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES}. |
| This is an app-readable conversion of the mandatory stream combination |
| [tables](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#legacy-level-guaranteed-configurations). |
| </description> |
| <details> |
| The array of |
| {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is |
| generated according to the documented |
| [guideline](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#legacy-level-guaranteed-configurations). |
| based on specific device level and capabilities. |
| Clients can use the array as a quick reference to find an appropriate camera stream |
| combination. |
| As per documentation, the stream combinations with given PREVIEW, RECORD and |
| MAXIMUM resolutions and anything smaller from the list given by |
| {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputSizes} are |
| guaranteed to work. |
| For a physical camera not independently exposed in |
| {@link android.hardware.camera2.CameraManager#getCameraIdList}, the mandatory stream |
| combinations for that physical camera Id are also generated, so that the application can |
| configure them as physical streams via the logical camera. |
| The mandatory stream combination array will be {@code null} in case the device is not |
| backward compatible. |
| </details> |
| <hal_details> |
| Do not set this property directly |
| (it is synthetic and will not be available at the HAL layer). |
| </hal_details> |
| </entry> |
| <entry name="mandatoryConcurrentStreamCombinations" type="int32" visibility="java_public" |
| synthetic="true" container="array" typedef="mandatoryStreamCombination"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| An array of mandatory concurrent stream combinations. |
| This is an app-readable conversion of the concurrent mandatory stream combination |
| [tables](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#concurrent-stream-guaranteed-configurations). |
| </description> |
| <details> |
| The array of |
| {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is |
| generated according to the documented |
| [guideline](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#concurrent-stream-guaranteed-configurations) |
| for each device which has its Id present in the set returned by |
| {@link android.hardware.camera2.CameraManager#getConcurrentCameraIds}. |
| Clients can use the array as a quick reference to find an appropriate camera stream |
| combination. |
| The mandatory stream combination array will be {@code null} in case the device is not a |
| part of at least one set of combinations returned by |
| {@link android.hardware.camera2.CameraManager#getConcurrentCameraIds}. |
| </details> |
| <hal_details> |
| Do not set this property directly |
| (it is synthetic and will not be available at the HAL layer). |
| </hal_details> |
| </entry> |
| <entry name="availableRotateAndCropModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="enumList" |
| hal_version="3.5"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of rotate-and-crop modes for android.scaler.rotateAndCrop that are supported by this camera device. |
| </description> |
| <range>Any value listed in android.scaler.rotateAndCrop</range> |
| <details> |
| This entry lists the valid modes for android.scaler.rotateAndCrop for this camera device. |
| |
| Starting with API level 30, all devices will list at least `ROTATE_AND_CROP_NONE`. |
| Devices with support for rotate-and-crop will additionally list at least |
| `ROTATE_AND_CROP_AUTO` and `ROTATE_AND_CROP_90`. |
| </details> |
| </entry> |
| </static> |
| <controls> |
| <entry name="rotateAndCrop" type="byte" visibility="public" enum="true" |
| hal_version="3.5"> |
| <enum> |
| <value>NONE |
| <notes>No rotate and crop is applied. Processed outputs are in the sensor orientation. |
| </notes> |
| </value> |
| <value>90 |
| <notes>Processed images are rotated by 90 degrees clockwise, and then cropped |
| to the original aspect ratio.</notes> |
| </value> |
| <value>180 |
| <notes>Processed images are rotated by 180 degrees. Since the aspect ratio does not |
| change, no cropping is performed.</notes> |
| </value> |
| <value>270 |
| <notes>Processed images are rotated by 270 degrees clockwise, and then cropped |
| to the original aspect ratio.</notes> |
| </value> |
| <value>AUTO |
| <notes>The camera API automatically selects the best concrete value for |
| rotate-and-crop based on the application's support for resizability and the current |
| multi-window mode. |
| |
| If the application does not support resizing but the display mode for its main |
| Activity is not in a typical orientation, the camera API will set `ROTATE_AND_CROP_90` |
| or some other supported rotation value, depending on device configuration, |
| to ensure preview and captured images are correctly shown to the user. Otherwise, |
| `ROTATE_AND_CROP_NONE` will be selected. |
| |
| When a value other than NONE is selected, several metadata fields will also be parsed |
| differently to ensure that coordinates are correctly handled for features like drawing |
| face detection boxes or passing in tap-to-focus coordinates. The camera API will |
| convert positions in the active array coordinate system to/from the cropped-and-rotated |
| coordinate system to make the operation transparent for applications. |
| |
| No coordinate mapping will be done when the application selects a non-AUTO mode. |
| </notes> |
| </value> |
| </enum> |
| <description>Whether a rotation-and-crop operation is applied to processed |
| outputs from the camera.</description> |
| <range>android.scaler.availableRotateAndCropModes</range> |
| <details> |
| This control is primarily intended to help camera applications with no support for |
| multi-window modes to work correctly on devices where multi-window scenarios are |
| unavoidable, such as foldables or other devices with variable display geometry or more |
| free-form window placement (such as laptops, which often place portrait-orientation apps |
| in landscape with pillarboxing). |
| |
| If supported, the default value is `ROTATE_AND_CROP_AUTO`, which allows the camera API |
| to enable backwards-compatibility support for applications that do not support resizing |
| / multi-window modes, when the device is in fact in a multi-window mode (such as inset |
| portrait on laptops, or on a foldable device in some fold states). In addition, |
| `ROTATE_AND_CROP_NONE` and `ROTATE_AND_CROP_90` will always be available if this control |
| is supported by the device. If not supported, devices API level 30 or higher will always |
| list only `ROTATE_AND_CROP_NONE`. |
| |
| When `CROP_AUTO` is in use, and the camera API activates backward-compatibility mode, |
| several metadata fields will also be parsed differently to ensure that coordinates are |
| correctly handled for features like drawing face detection boxes or passing in |
| tap-to-focus coordinates. The camera API will convert positions in the active array |
| coordinate system to/from the cropped-and-rotated coordinate system to make the |
| operation transparent for applications. The following controls are affected: |
| |
| * android.control.aeRegions |
| * android.control.afRegions |
| * android.control.awbRegions |
| * android.statistics.faces |
| |
| Capture results will contain the actual value selected by the API; |
| `ROTATE_AND_CROP_AUTO` will never be seen in a capture result. |
| |
| Applications can also select their preferred cropping mode, either to opt out of the |
| backwards-compatibility treatment, or to use the cropping feature themselves as needed. |
| In this case, no coordinate translation will be done automatically, and all controls |
| will continue to use the normal active array coordinates. |
| |
| Cropping and rotating is done after the application of digital zoom (via either |
| android.scaler.cropRegion or android.control.zoomRatio), but before each individual |
| output is further cropped and scaled. It only affects processed outputs such as |
| YUV, PRIVATE, and JPEG. It has no effect on RAW outputs. |
| |
| When `CROP_90` or `CROP_270` are selected, there is a significant loss to the field of |
| view. For example, with a 4:3 aspect ratio output of 1600x1200, `CROP_90` will still |
| produce 1600x1200 output, but these buffers are cropped from a vertical 3:4 slice at the |
| center of the 4:3 area, then rotated to be 4:3, and then upscaled to 1600x1200. Only |
| 56.25% of the original FOV is still visible. In general, for an aspect ratio of `w:h`, |
| the crop and rotate operation leaves `(h/w)^2` of the field of view visible. For 16:9, |
| this is ~31.6%. |
| |
| As a visual example, the figure below shows the effect of `ROTATE_AND_CROP_90` on the |
| outputs for the following parameters: |
| |
| * Sensor active array: `2000x1500` |
| * Crop region: top-left: `(500, 375)`, size: `(1000, 750)` (4:3 aspect ratio) |
| * Output streams: YUV `640x480` and YUV `1280x720` |
| * `ROTATE_AND_CROP_90` |
| |
|  |
| |
| With these settings, the regions of the active array covered by the output streams are: |
| |
| * 640x480 stream crop: top-left: `(219, 375)`, size: `(562, 750)` |
| * 1280x720 stream crop: top-left: `(289, 375)`, size: `(422, 750)` |
| |
| Since the buffers are rotated, the buffers as seen by the application are: |
| |
| * 640x480 stream: top-left: `(781, 375)` on active array, size: `(640, 480)`, downscaled 1.17x from sensor pixels |
| * 1280x720 stream: top-left: `(711, 375)` on active array, size: `(1280, 720)`, upscaled 1.71x from sensor pixels |
| </details> |
| <hal_details> |
| ROTATE_AND_CROP_AUTO will never be sent to the HAL, though it must be set as the default |
| value in all the capture request templates by the HAL. The camera service will |
| translate AUTO to a specific rotation value based on the current application's |
| multi-window state and its support of resizability. |
| |
| The HAL also does not need to consider coordinate transforms for ROTATE_AND_CROP - all |
| capture request and result fields should be kept in the active array coordinate frame. |
| Any translation required to implement ROTATE_AND_CROP_AUTO will be handled by the camera |
| service. |
| </hal_details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.scaler.rotateAndCrop" kind="controls" hal_version="3.5"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="defaultSecureImageSize" type="int32" visibility="public" |
| type_notes="width/height for the default secure image data size" container="array" |
| typedef="size" hal_version="3.6"> |
| <array> |
| <size>2</size> |
| </array> |
| <description> |
| Default YUV/PRIVATE size to use for requesting secure image buffers. |
| </description> |
| <units>Pixels</units> |
| <details> |
| This entry lists the default size supported in the secure camera mode. This entry is |
| optional on devices support the SECURE_IMAGE_DATA capability. This entry will be null |
| if the camera device does not list SECURE_IMAGE_DATA capability. |
| |
| When the key is present, only a PRIVATE/YUV output of the specified size is guaranteed |
| to be supported by the camera HAL in the secure camera mode. Any other format or |
| resolutions might not be supported. Use |
| {@link CameraDevice#isSessionConfigurationSupported|ACameraDevice_isSessionConfigurationSupported} |
| API to query if a secure session configuration is supported if the device supports this |
| API. |
| |
| If this key returns null on a device with SECURE_IMAGE_DATA capability, the application |
| can assume all output sizes listed in the |
| {@link |
| android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS} |
| are supported. |
| </details> |
| </entry> |
| <entry name="physicalCameraMultiResolutionStreamConfigurations" type="int32" |
| visibility="ndk_public" optional="true" enum="true" container="array" |
| typedef="streamConfiguration" hwlevel="limited" hal_version="3.6"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available multi-resolution stream configurations that this |
| physical camera device supports |
| (i.e. format, width, height, output/input stream). |
| </description> |
| <details> |
| This list contains a subset of the parent logical camera's multi-resolution stream |
| configurations which belong to this physical camera, and it will advertise and will only |
| advertise the maximum supported resolutions for a particular format. |
| |
| If this camera device isn't a physical camera device constituting a logical camera, |
| but a standalone {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| camera, this field represents the multi-resolution input/output stream configurations of |
| default mode and max resolution modes. The sizes will be the maximum resolution of a |
| particular format for default mode and max resolution mode. |
| |
| This field will only be advertised if the device is a physical camera of a |
| logical multi-camera device or an ultra high resolution sensor camera. For a logical |
| multi-camera, the camera API will derive the logical camera’s multi-resolution stream |
| configurations from all physical cameras. For an ultra high resolution sensor camera, this |
| is used directly as the camera’s multi-resolution stream configurations. |
| </details> |
| <hal_details> |
| If this field contains input stream configurations, and the camera device is a physical |
| camera (not a standalone ultra-high resolution camera), the |
| android.logicalMultiCamera.activePhysicalId tag must be set to the physical camera Id in |
| the physical camera result metadata. This is to make sure during multi-resolution |
| reprocessing, the camera HAL is notified of which physical camera the reprocessing |
| request comes from. |
| </hal_details> |
| </entry> |
| <entry name="multiResolutionStreamConfigurationMap" type="int32" visibility="java_public" |
| synthetic="true" optional="true" typedef="multiResolutionStreamConfigurationMap"> |
| <description>The multi-resolution stream configurations supported by this logical camera |
| or ultra high resolution sensor camera device. |
| </description> |
| <details> |
| Multi-resolution streams can be used by a LOGICAL_MULTI_CAMERA or an |
| ULTRA_HIGH_RESOLUTION_SENSOR camera where the images sent or received can vary in |
| resolution per frame. This is useful in cases where the camera device's effective full |
| resolution changes depending on factors such as the current zoom level, lighting |
| condition, focus distance, or pixel mode. |
| |
| * For a logical multi-camera implementing optical zoom, at different zoom level, a |
| different physical camera may be active, resulting in different full-resolution image |
| sizes. |
| * For an ultra high resolution camera, depending on whether the camera operates in default |
| mode, or maximum resolution mode, the output full-size images may be of either binned |
| resolution or maximum resolution. |
| |
| To use multi-resolution output streams, the supported formats can be queried by {@link |
| android.hardware.camera2.params.MultiResolutionStreamConfigurationMap#getOutputFormats}. |
| A {@link android.hardware.camera2.MultiResolutionImageReader} can then be created for a |
| supported format with the MultiResolutionStreamInfo group queried by {@link |
| android.hardware.camera2.params.MultiResolutionStreamConfigurationMap#getOutputInfo}. |
| |
| If a camera device supports multi-resolution output streams for a particular format, for |
| each of its mandatory stream combinations, the camera device will support using a |
| MultiResolutionImageReader for the MAXIMUM stream of supported formats. Refer to |
| [the table](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#legacy-level-additional-guaranteed-combinations-with-multiresolutionoutputs) |
| for additional details. |
| |
| To use multi-resolution input streams, the supported formats can be queried by {@link |
| android.hardware.camera2.params.MultiResolutionStreamConfigurationMap#getInputFormats}. |
| A reprocessable CameraCaptureSession can then be created using an {@link |
| android.hardware.camera2.params.InputConfiguration InputConfiguration} constructed with |
| the input MultiResolutionStreamInfo group, queried by {@link |
| android.hardware.camera2.params.MultiResolutionStreamConfigurationMap#getInputInfo}. |
| |
| If a camera device supports multi-resolution {@code YUV} input and multi-resolution |
| {@code YUV} output, or multi-resolution {@code PRIVATE} input and multi-resolution |
| {@code PRIVATE} output, {@code JPEG} and {@code YUV} are guaranteed to be supported |
| multi-resolution output stream formats. Refer to |
| [the table](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#legacy-level-additional-guaranteed-combinations-with-multiresolutionoutputs) |
| for details about the additional mandatory stream combinations in this case. |
| </details> |
| <hal_details> |
| Do not set this property directly |
| (it is synthetic and will not be available at the HAL layer). |
| </hal_details> |
| </entry> |
| <entry name="availableStreamConfigurationsMaximumResolution" type="int32" |
| visibility="ndk_public" enum="true" container="array" typedef="streamConfiguration" |
| hal_version="3.6"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available stream configurations that this |
| camera device supports (i.e. format, width, height, output/input stream) for a |
| CaptureRequest with android.sensor.pixelMode set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <details> |
| Analogous to android.scaler.availableStreamConfigurations, for configurations |
| which are applicable when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| |
| Not all output formats may be supported in a configuration with |
| an input stream of a particular format. For more details, see |
| android.scaler.availableInputOutputFormatsMapMaximumResolution. |
| </details> |
| <hal_details> |
| Refer to hal_details for android.scaler.availableStreamConfigurations. |
| </hal_details> |
| </entry> |
| <entry name="availableMinFrameDurationsMaximumResolution" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hal_version="3.6"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the minimum frame duration for each |
| format/size combination when the camera device is sent a CaptureRequest with |
| android.sensor.pixelMode set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| Analogous to android.scaler.availableMinFrameDurations, for configurations |
| which are applicable when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| |
| When multiple streams are used in a request (if supported, when android.sensor.pixelMode |
| is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}), the |
| minimum frame duration will be max(individual stream min durations). |
| |
| See android.sensor.frameDuration and |
| android.scaler.availableStallDurationsMaximumResolution for more details about |
| calculating the max frame rate. |
| </details> |
| </entry> |
| <entry name="availableStallDurationsMaximumResolution" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hal_version="3.6"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the maximum stall duration for each |
| output format/size combination when CaptureRequests are submitted with |
| android.sensor.pixelMode set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| Analogous to android.scaler.availableMinFrameDurations, for configurations |
| which are applicable when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <hal_details> |
| If possible, it is recommended that all non-JPEG formats |
| (such as RAW16) should not have a stall duration. RAW10, RAW12, RAW_OPAQUE |
| and IMPLEMENTATION_DEFINED must not have stall durations. |
| </hal_details> |
| </entry> |
| <entry name="streamConfigurationMapMaximumResolution" type="int32" visibility="java_public" |
| synthetic="true" typedef="streamConfigurationMap"> |
| <description>The available stream configurations that this |
| camera device supports when given a CaptureRequest with android.sensor.pixelMode |
| set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}; |
| also includes the minimum frame durations |
| and the stall durations for each format/size combination. |
| </description> |
| <details> |
| Analogous to android.scaler.streamConfigurationMap for CaptureRequests where |
| android.sensor.pixelMode is |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <hal_details> |
| Do not set this property directly |
| (it is synthetic and will not be available at the HAL layer); |
| set the android.scaler.availableStreamConfigurationsMaximumResolution instead. |
| |
| Not all output formats may be supported in a configuration with |
| an input stream of a particular format. For more details, see |
| android.scaler.availableInputOutputFormatsMapMaximumResolution. |
| </hal_details> |
| </entry> |
| <entry name="availableInputOutputFormatsMapMaximumResolution" type="int32" |
| visibility="hidden" typedef="reprocessFormatsMap" hal_version="3.6"> |
| <description>The mapping of image formats that are supported by this |
| camera device for input streams, to their corresponding output formats, when |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <details> |
| Analogous to android.scaler.availableInputOutputFormatsMap for CaptureRequests where |
| android.sensor.pixelMode is |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <hal_details> |
| Refer to hal details for android.scaler.availableInputOutputFormatsMapMaximumResolution. |
| </hal_details> |
| <tag id="REPROC" /> |
| </entry> |
| <entry name="mandatoryMaximumResolutionStreamCombinations" type="int32" |
| visibility="java_public" synthetic="true" container="array" |
| typedef="mandatoryStreamCombination"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| An array of mandatory stream combinations which are applicable when |
| {@link android.hardware.camera2.CaptureRequest} has android.sensor.pixelMode set |
| to {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| This is an app-readable conversion of the maximum resolution mandatory stream combination |
| [tables](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#additional-guaranteed-combinations-for-ultra-high-resolution-sensors). |
| </description> |
| <details> |
| The array of |
| {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is |
| generated according to the documented |
| [guideline](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#additional-guaranteed-combinations-for-ultra-high-resolution-sensors) |
| for each device which has the |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability. |
| Clients can use the array as a quick reference to find an appropriate camera stream |
| combination. |
| The mandatory stream combination array will be {@code null} in case the device is not an |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| device. |
| </details> |
| <hal_details> |
| Do not set this property directly |
| (it is synthetic and will not be available at the HAL layer). |
| </hal_details> |
| </entry> |
| <entry name="mandatoryTenBitOutputStreamCombinations" type="int32" |
| visibility="java_public" synthetic="true" container="array" |
| typedef="mandatoryStreamCombination"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| An array of mandatory stream combinations which are applicable when device support the |
| 10-bit output capability |
| {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT} |
| This is an app-readable conversion of the 10 bit output mandatory stream combination |
| [tables](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#10-bit-output-additional-guaranteed-configurations). |
| </description> |
| <details> |
| The array of |
| {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is |
| generated according to the documented |
| [guideline](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#10-bit-output-additional-guaranteed-configurations) |
| for each device which has the |
| {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT} |
| capability. |
| Clients can use the array as a quick reference to find an appropriate camera stream |
| combination. |
| The mandatory stream combination array will be {@code null} in case the device is not an |
| {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_DYNAMIC_RANGE_TEN_BIT} |
| device. |
| </details> |
| <hal_details> |
| Do not set this property directly |
| (it is synthetic and will not be available at the HAL layer). |
| </hal_details> |
| </entry> |
| <entry name="mandatoryPreviewStabilizationOutputStreamCombinations" type="int32" |
| visibility="java_public" synthetic="true" container="array" |
| typedef="mandatoryStreamCombination"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| An array of mandatory stream combinations which are applicable when device lists |
| {@code PREVIEW_STABILIZATION} in android.control.availableVideoStabilizationModes. |
| This is an app-readable conversion of the preview stabilization mandatory stream |
| combination |
| [tables](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#preview-stabilization-guaranteed-stream-configurations). |
| </description> |
| <details> |
| The array of |
| {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is |
| generated according to the documented |
| [guideline](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#preview-stabilization-guaranteed-stream-configurations) |
| for each device which supports {@code PREVIEW_STABILIZATION} |
| Clients can use the array as a quick reference to find an appropriate camera stream |
| combination. |
| The mandatory stream combination array will be {@code null} in case the device does not |
| list {@code PREVIEW_STABILIZATION} in android.control.availableVideoStabilizationModes. |
| </details> |
| <hal_details> |
| Do not set this property directly |
| (it is synthetic and will not be available at the HAL layer). |
| </hal_details> |
| </entry> |
| |
| <entry name="multiResolutionStreamSupported" type="byte" visibility="ndk_public" enum="true" |
| typedef="boolean" hwlevel="limited" hal_version="3.6"> |
| <enum> |
| <value>FALSE</value> |
| <value>TRUE</value> |
| </enum> |
| <description>Whether the camera device supports multi-resolution input or output streams |
| </description> |
| <details> |
| A logical multi-camera or an ultra high resolution camera may support multi-resolution |
| input or output streams. With multi-resolution output streams, the camera device is able |
| to output different resolution images depending on the current active physical camera or |
| pixel mode. With multi-resolution input streams, the camera device can reprocess images |
| of different resolutions from different physical cameras or sensor pixel modes. |
| |
| When set to TRUE: |
| |
| * For a logical multi-camera, the camera framework derives |
| android.scaler.multiResolutionStreamConfigurationMap by combining the |
| android.scaler.physicalCameraMultiResolutionStreamConfigurations from its physical |
| cameras. |
| * For an ultra-high resolution sensor camera, the camera framework directly copies |
| the value of android.scaler.physicalCameraMultiResolutionStreamConfigurations to |
| android.scaler.multiResolutionStreamConfigurationMap. |
| </details> |
| <hal_details> |
| For the HAL to claim support for multi-resolution streams: |
| |
| * The HAL must support the buffer management API by setting |
| supportedBufferManagementVersion to HIDL_DEVICE_3_5. |
| * For a logical multi-camera, when combined from all its physical cameras, there must be |
| at a minimum one input or output stream format with at least two different |
| physicalCameraMultiResolutionStreamConfigurations entries for that format. |
| * For an ultra high resolution sensor camera, for each supported multi-resolution format, |
| the physicalCameraMultiResolutionStreamConfigurations must contain both the largest stream |
| configuration within the android.scaler.streamConfigurationMap and the largest stream |
| configuration within the android.scaler.streamConfigurationMapMaximumResolution. |
| * If the HAL advertises multi-resolution input stream support for a particular format |
| (namely PRIVATE, or YUV), the logical multi-camera or ultra high resolution sensor camera |
| must have the corresponding reprocessing capabilities (PRIVATE_REPROCESSING, |
| or YUV_REPROCESSING respectively). The camera HAL must support reprocessing the |
| multi-resolution input stream to the output formats specified in the camera's |
| android.scaler.availableInputOutputFormatsMap. |
| </hal_details> |
| </entry> |
| </static> |
| <controls> |
| <entry name="cropRegionSet" type="byte" visibility="fwk_only" |
| enum="true" typedef="boolean"> |
| <enum> |
| <value>FALSE |
| <notes>Crop region (android.scaler.cropRegion) has not been set by the |
| camera client. |
| </notes> |
| </value> |
| <value>TRUE |
| <notes> |
| Scaler crop regions (android.scaler.cropRegion) has been set by the camera |
| client. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Framework-only private key which informs camera fwk that the scaler crop region |
| (android.scaler.cropRegion) has been set by the client and it need |
| not be corrected when android.sensor.pixelMode is set to MAXIMUM_RESOLUTION. |
| </description> |
| <details> |
| This must be set to TRUE by the camera2 java fwk when the camera client sets |
| android.scaler.cropRegion. |
| </details> |
| </entry> |
| </controls> |
| <static> |
| <entry name="availableStreamUseCases" type="int64" visibility="public" |
| enum="true" container="array" hal_version="3.8"> |
| <array> |
| <size>n</size> |
| </array> |
| <enum> |
| <value optional="true" id="0x0">DEFAULT |
| <notes> |
| Default stream use case. |
| |
| This use case is the same as when the application doesn't set any use case for |
| the stream. The camera device uses the properties of the output target, such as |
| format, dataSpace, or surface class type, to optimize the image processing pipeline. |
| </notes> |
| </value> |
| <value optional="true" id="0x1">PREVIEW |
| <notes> |
| Live stream shown to the user. |
| |
| Optimized for performance and usability as a viewfinder, but not necessarily for |
| image quality. The output is not meant to be persisted as saved images or video. |
| |
| No stall if android.control.* are set to FAST. There may be stall if |
| they are set to HIGH_QUALITY. This use case has the same behavior as the |
| default SurfaceView and SurfaceTexture targets. Additionally, this use case can be |
| used for in-app image analysis. |
| </notes> |
| </value> |
| <value optional="true" id="0x2">STILL_CAPTURE |
| <notes> |
| Still photo capture. |
| |
| Optimized for high-quality high-resolution capture, and not expected to maintain |
| preview-like frame rates. |
| |
| The stream may have stalls regardless of whether android.control.* is HIGH_QUALITY. |
| This use case has the same behavior as the default JPEG and RAW related formats. |
| </notes> |
| </value> |
| <value optional="true" id="0x3">VIDEO_RECORD |
| <notes> |
| Recording video clips. |
| |
| Optimized for high-quality video capture, including high-quality image stabilization |
| if supported by the device and enabled by the application. As a result, may produce |
| output frames with a substantial lag from real time, to allow for highest-quality |
| stabilization or other processing. As such, such an output is not suitable for drawing |
| to screen directly, and is expected to be persisted to disk or similar for later |
| playback or processing. Only streams that set the VIDEO_RECORD use case are guaranteed |
| to have video stabilization applied when the video stabilization control is set |
| to ON, as opposed to PREVIEW_STABILIZATION. |
| |
| This use case has the same behavior as the default MediaRecorder and MediaCodec |
| targets. |
| </notes> |
| </value> |
| <value optional="true" id="0x4">PREVIEW_VIDEO_STILL |
| <notes> |
| One single stream used for combined purposes of preview, video, and still capture. |
| |
| For such multi-purpose streams, the camera device aims to make the best tradeoff |
| between the individual use cases. For example, the STILL_CAPTURE use case by itself |
| may have stalls for achieving best image quality. But if combined with PREVIEW and |
| VIDEO_RECORD, the camera device needs to trade off the additional image processing |
| for speed so that preview and video recording aren't slowed down. |
| |
| Similarly, VIDEO_RECORD may produce frames with a substantial lag, but |
| PREVIEW_VIDEO_STILL must have minimal output delay. This means that to enable video |
| stabilization with this use case, the device must support and the app must select the |
| PREVIEW_STABILIZATION mode for video stabilization. |
| </notes> |
| </value> |
| <value optional="true" id="0x5">VIDEO_CALL |
| <notes> |
| Long-running video call optimized for both power efficiency and video quality. |
| |
| The camera sensor may run in a lower-resolution mode to reduce power consumption |
| at the cost of some image and digital zoom quality. Unlike VIDEO_RECORD, VIDEO_CALL |
| outputs are expected to work in dark conditions, so are usually accompanied with |
| variable frame rate settings to allow sufficient exposure time in low light. |
| </notes> |
| </value> |
| <value optional="true" id="0x6" hal_version="3.9">CROPPED_RAW |
| <notes> |
| Cropped RAW stream when the client chooses to crop the field of view. |
| |
| Certain types of image sensors can run in binned modes in order to improve signal to |
| noise ratio while capturing frames. However, at certain zoom levels and / or when |
| other scene conditions are deemed fit, the camera sub-system may choose to un-bin and |
| remosaic the sensor's output. This results in a RAW frame which is cropped in field |
| of view and yet has the same number of pixels as full field of view RAW, thereby |
| improving image detail. |
| |
| The resultant field of view of the RAW stream will be greater than or equal to |
| croppable non-RAW streams. The effective crop region for this RAW stream will be |
| reflected in the CaptureResult key android.scaler.rawCropRegion. |
| |
| If this stream use case is set on a non-RAW stream, i.e. not one of : |
| |
| * {@link android.graphics.ImageFormat#RAW_SENSOR|AIMAGE_FORMAT_RAW16 RAW_SENSOR} |
| * {@link android.graphics.ImageFormat#RAW10|AIMAGE_FORMAT_RAW10 RAW10} |
| * {@link android.graphics.ImageFormat#RAW12|AIMAGE_FORMAT_RAW12 RAW12} |
| |
| session configuration is not guaranteed to succeed. |
| |
| |
| This stream use case may not be supported on some devices. |
| </notes> |
| </value> |
| <value optional="true" visibility="hidden" id="0x10000">VENDOR_START |
| <notes> |
| Vendor defined use cases. These depend on the vendor implementation. |
| </notes> |
| </value> |
| </enum> |
| <description>The stream use cases supported by this camera device.</description> |
| <details>The stream use case indicates the purpose of a particular camera stream from |
| the end-user perspective. Some examples of camera use cases are: preview stream for |
| live viewfinder shown to the user, still capture for generating high quality photo |
| capture, video record for encoding the camera output for the purpose of future playback, |
| and video call for live realtime video conferencing. |
| |
| With this flag, the camera device can optimize the image processing pipeline |
| parameters, such as tuning, sensor mode, and ISP settings, independent of |
| the properties of the immediate camera output surface. For example, if the output |
| surface is a SurfaceTexture, the stream use case flag can be used to indicate whether |
| the camera frames eventually go to display, video encoder, |
| still image capture, or all of them combined. |
| |
| The application sets the use case of a camera stream by calling |
| {@link android.hardware.camera2.params.OutputConfiguration#setStreamUseCase}. |
| |
| A camera device with |
| {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE} |
| capability must support the following stream use cases: |
| |
| * DEFAULT |
| * PREVIEW |
| * STILL_CAPTURE |
| * VIDEO_RECORD |
| * PREVIEW_VIDEO_STILL |
| * VIDEO_CALL |
| |
| The guaranteed stream combinations related to stream use case for a camera device with |
| {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE} |
| capability is documented in the camera device |
| [guideline](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#stream-use-case-capability-additional-guaranteed-configurations). |
| The application is strongly recommended to use one of the guaranteed stream combinations. |
| If the application creates a session with a stream combination not in the guaranteed |
| list, or with mixed DEFAULT and non-DEFAULT use cases within the same session, |
| the camera device may ignore some stream use cases due to hardware constraints |
| and implementation details. |
| |
| For stream combinations not covered by the stream use case mandatory lists, such as |
| reprocessable session, constrained high speed session, or RAW stream combinations, the |
| application should leave stream use cases within the session as DEFAULT. |
| </details> |
| <hal_details> |
| The camera HAL must support DEFAULT stream use case to handle scenarios where the |
| application doesn't explicitly set a stream's use case flag, in which case the camera |
| framework sets it to DEFAULT. |
| </hal_details> |
| </entry> |
| <entry name="mandatoryUseCaseStreamCombinations" type="int32" visibility="java_public" |
| synthetic="true" container="array" typedef="mandatoryStreamCombination"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| An array of mandatory stream combinations with stream use cases. |
| This is an app-readable conversion of the mandatory stream combination |
| [tables](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#stream-use-case-capability-additional-guaranteed-configurations) |
| with each stream's use case being set. |
| </description> |
| <details> |
| The array of |
| {@link android.hardware.camera2.params.MandatoryStreamCombination combinations} is |
| generated according to the documented |
| [guildeline](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#stream-use-case-capability-additional-guaranteed-configurations) |
| for a camera device with |
| {@link android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE} |
| capability. |
| The mandatory stream combination array will be {@code null} in case the device doesn't |
| have {@link |
| android.hardware.camera2.CameraCharacteristics#REQUEST_AVAILABLE_CAPABILITIES_STREAM_USE_CASE} |
| capability. |
| </details> |
| <hal_details> |
| Do not set this property directly |
| (it is synthetic and will not be available at the HAL layer). |
| </hal_details> |
| </entry> |
| </static> |
| <dynamic> |
| <entry name="rawCropRegion" type="int32" visibility="public" container="array" |
| typedef="rectangle" hal_version="3.9"> |
| <array> |
| <size>4</size> |
| </array> |
| <description> |
| The region of the sensor that corresponds to the RAW read out for this |
| capture when the stream use case of a RAW stream is set to CROPPED_RAW. |
| </description> |
| <units>Pixel coordinates relative to |
| android.sensor.info.activeArraySize or |
| android.sensor.info.preCorrectionActiveArraySize depending on distortion correction |
| capability and mode</units> |
| <details> |
| The coordinate system follows that of android.sensor.info.preCorrectionActiveArraySize. |
| |
| This CaptureResult key will be set when the corresponding CaptureRequest has a RAW target |
| with stream use case set to |
| {@link android.hardware.camera2.CameraMetadata#SCALER_AVAILABLE_STREAM_USE_CASES_CROPPED_RAW}, |
| otherwise it will be {@code null}. |
| The value of this key specifies the region of the sensor used for the RAW capture and can |
| be used to calculate the corresponding field of view of RAW streams. |
| This field of view will always be >= field of view for (processed) non-RAW streams for the |
| capture. Note: The region specified may not necessarily be centered. |
| |
| For example: Assume a camera device has a pre correction active array size of |
| {@code {0, 0, 1500, 2000}}. If the RAW_CROP_REGION is {@code {500, 375, 1500, 1125}}, that |
| corresponds to a centered crop of 1/4th of the full field of view RAW stream. |
| |
| The metadata keys which describe properties of RAW frames: |
| |
| * android.statistics.hotPixelMap |
| * android.statistics.lensShadingCorrectionMap |
| * android.lens.distortion |
| * android.lens.poseTranslation |
| * android.lens.poseRotation |
| * android.lens.distortion |
| * android.lens.intrinsicCalibration |
| |
| should be interpreted in the effective after raw crop field-of-view coordinate system. |
| In this coordinate system, |
| {android.sensor.info.preCorrectionActiveArraySize.left, |
| android.sensor.info.preCorrectionActiveArraySize.top} corresponds to the |
| the top left corner of the cropped RAW frame and |
| {android.sensor.info.preCorrectionActiveArraySize.right, |
| android.sensor.info.preCorrectionActiveArraySize.bottom} corresponds to |
| the bottom right corner. Client applications must use the values of the keys |
| in the CaptureResult metadata if present. |
| |
| Crop regions android.scaler.cropRegion, AE/AWB/AF regions and face coordinates still |
| use the android.sensor.info.activeArraySize coordinate system as usual. |
| </details> |
| </entry> |
| </dynamic> |
| </section> |
| <section name="sensor"> |
| <controls> |
| <entry name="exposureTime" type="int64" visibility="public" hwlevel="full"> |
| <description>Duration each pixel is exposed to |
| light.</description> |
| <units>Nanoseconds</units> |
| <range>android.sensor.info.exposureTimeRange</range> |
| <details>If the sensor can't expose this exact duration, it will shorten the |
| duration exposed to the nearest possible value (rather than expose longer). |
| The final exposure time used will be available in the output capture result. |
| |
| This control is only effective if android.control.aeMode or android.control.mode is set to |
| OFF; otherwise the auto-exposure algorithm will override this value. However, in the |
| case that android.control.aePriorityMode is set to SENSOR_EXPOSURE_TIME_PRIORITY, this |
| control will be effective and not controlled by the auto-exposure algorithm. |
| </details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="frameDuration" type="int64" visibility="public" hwlevel="full"> |
| <description>Duration from start of frame readout to |
| start of next frame readout.</description> |
| <units>Nanoseconds</units> |
| <range>See android.sensor.info.maxFrameDuration, {@link |
| android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}. |
| The duration is capped to `max(duration, exposureTime + overhead)`.</range> |
| <details> |
| The maximum frame rate that can be supported by a camera subsystem is |
| a function of many factors: |
| |
| * Requested resolutions of output image streams |
| * Availability of binning / skipping modes on the imager |
| * The bandwidth of the imager interface |
| * The bandwidth of the various ISP processing blocks |
| |
| Since these factors can vary greatly between different ISPs and |
| sensors, the camera abstraction tries to represent the bandwidth |
| restrictions with as simple a model as possible. |
| |
| The model presented has the following characteristics: |
| |
| * The image sensor is always configured to output the smallest |
| resolution possible given the application's requested output stream |
| sizes. The smallest resolution is defined as being at least as large |
| as the largest requested output stream size; the camera pipeline must |
| never digitally upsample sensor data when the crop region covers the |
| whole sensor. In general, this means that if only small output stream |
| resolutions are configured, the sensor can provide a higher frame |
| rate. |
| * Since any request may use any or all the currently configured |
| output streams, the sensor and ISP must be configured to support |
| scaling a single capture to all the streams at the same time. This |
| means the camera pipeline must be ready to produce the largest |
| requested output size without any delay. Therefore, the overall |
| frame rate of a given configured stream set is governed only by the |
| largest requested stream resolution. |
| * Using more than one output stream in a request does not affect the |
| frame duration. |
| * Certain format-streams may need to do additional background processing |
| before data is consumed/produced by that stream. These processors |
| can run concurrently to the rest of the camera pipeline, but |
| cannot process more than 1 capture at a time. |
| |
| The necessary information for the application, given the model above, is provided via |
| {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS}. |
| These are used to determine the maximum frame rate / minimum frame duration that is |
| possible for a given stream configuration. |
| |
| Specifically, the application can use the following rules to |
| determine the minimum frame duration it can request from the camera |
| device: |
| |
| 1. Let the set of currently configured input/output streams be called `S`. |
| 1. Find the minimum frame durations for each stream in `S`, by looking it up in {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS} |
| (with its respective size/format). Let this set of frame durations be called `F`. |
| 1. For any given request `R`, the minimum frame duration allowed for `R` is the maximum |
| out of all values in `F`. Let the streams used in `R` be called `S_r`. |
| |
| If none of the streams in `S_r` have a stall time (listed in {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS} |
| using its respective size/format), then the frame duration in `F` determines the steady |
| state frame rate that the application will get if it uses `R` as a repeating request. Let |
| this special kind of request be called `Rsimple`. |
| |
| A repeating request `Rsimple` can be _occasionally_ interleaved by a single capture of a |
| new request `Rstall` (which has at least one in-use stream with a non-0 stall time) and if |
| `Rstall` has the same minimum frame duration this will not cause a frame rate loss if all |
| buffers from the previous `Rstall` have already been delivered. |
| |
| For more details about stalling, see {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputStallDuration|ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS}. |
| |
| This control is only effective if android.control.aeMode or android.control.mode is set to |
| OFF; otherwise the auto-exposure algorithm will override this value. |
| |
| *Note:* Prior to Android 13, this field was described as measuring the duration from |
| start of frame exposure to start of next frame exposure, which doesn't reflect the |
| definition from sensor manufacturer. A mobile sensor defines the frame duration as |
| intervals between sensor readouts. |
| </details> |
| <hal_details> |
| For more details about stalling, see |
| android.scaler.availableStallDurations. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="sensitivity" type="int32" visibility="public" hwlevel="full"> |
| <description>The amount of gain applied to sensor data |
| before processing.</description> |
| <units>ISO arithmetic units</units> |
| <range>android.sensor.info.sensitivityRange</range> |
| <details> |
| The sensitivity is the standard ISO sensitivity value, |
| as defined in ISO 12232:2006. |
| |
| The sensitivity must be within android.sensor.info.sensitivityRange, and |
| if if it less than android.sensor.maxAnalogSensitivity, the camera device |
| is guaranteed to use only analog amplification for applying the gain. |
| |
| If the camera device cannot apply the exact sensitivity |
| requested, it will reduce the gain to the nearest supported |
| value. The final sensitivity used will be available in the |
| output capture result. |
| |
| This control is only effective if android.control.aeMode or android.control.mode is set to |
| OFF; otherwise the auto-exposure algorithm will override this value. However, in the |
| case that android.control.aePriorityMode is set to SENSOR_SENSITIVITY_PRIORITY, this |
| control will be effective and not controlled by the auto-exposure algorithm. |
| |
| Note that for devices supporting postRawSensitivityBoost, the total sensitivity applied |
| to the final processed image is the combination of android.sensor.sensitivity and |
| android.control.postRawSensitivityBoost. In case the application uses the sensor |
| sensitivity from last capture result of an auto request for a manual request, in order |
| to achieve the same brightness in the output image, the application should also |
| set postRawSensitivityBoost. |
| </details> |
| <hal_details>ISO 12232:2006 REI method is acceptable.</hal_details> |
| <tag id="V1" /> |
| </entry> |
| </controls> |
| <static> |
| <namespace name="info"> |
| <entry name="activeArraySize" type="int32" visibility="public" |
| type_notes="Four ints defining the active pixel rectangle" |
| container="array" typedef="rectangle" hwlevel="legacy"> |
| <array> |
| <size>4</size> |
| </array> |
| <description> |
| The area of the image sensor which corresponds to active pixels after any geometric |
| distortion correction has been applied. |
| </description> |
| <units>Pixel coordinates on the image sensor</units> |
| <details> |
| This is the rectangle representing the size of the active region of the sensor (i.e. |
| the region that actually receives light from the scene) after any geometric correction |
| has been applied, and should be treated as the maximum size in pixels of any of the |
| image output formats aside from the raw formats. |
| |
| This rectangle is defined relative to the full pixel array; (0,0) is the top-left of |
| the full pixel array, and the size of the full pixel array is given by |
| android.sensor.info.pixelArraySize. |
| |
| The coordinate system for most other keys that list pixel coordinates, including |
| android.scaler.cropRegion, is defined relative to the active array rectangle given in |
| this field, with `(0, 0)` being the top-left of this rectangle. |
| |
| The active array may be smaller than the full pixel array, since the full array may |
| include black calibration pixels or other inactive regions. |
| |
| For devices that do not support android.distortionCorrection.mode control, the active |
| array must be the same as android.sensor.info.preCorrectionActiveArraySize. |
| |
| For devices that support android.distortionCorrection.mode control, the active array must |
| be enclosed by android.sensor.info.preCorrectionActiveArraySize. The difference between |
| pre-correction active array and active array accounts for scaling or cropping caused |
| by lens geometric distortion correction. |
| |
| In general, application should always refer to active array size for controls like |
| metering regions or crop region. Two exceptions are when the application is dealing with |
| RAW image buffers (RAW_SENSOR, RAW10, RAW12 etc), or when application explicitly set |
| android.distortionCorrection.mode to OFF. In these cases, application should refer |
| to android.sensor.info.preCorrectionActiveArraySize. |
| </details> |
| <ndk_details> |
| The data representation is `int[4]`, which maps to `(left, top, width, height)`. |
| </ndk_details> |
| <hal_details> |
| This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be |
| &gt;= `(0,0)`. |
| The `(width, height)` must be &lt;= `android.sensor.info.pixelArraySize`. |
| </hal_details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="sensitivityRange" type="int32" visibility="public" |
| type_notes="Range of supported sensitivities" |
| container="array" typedef="rangeInt" |
| hwlevel="full"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>Range of sensitivities for android.sensor.sensitivity supported by this |
| camera device.</description> |
| <range>Min <= 100, Max &gt;= 800</range> |
| <details> |
| The values are the standard ISO sensitivity values, |
| as defined in ISO 12232:2006. |
| </details> |
| |
| <tag id="BC" /> |
| <tag id="V1" /> |
| </entry> |
| <entry name="colorFilterArrangement" type="byte" visibility="public" enum="true" |
| hwlevel="full"> |
| <enum> |
| <value>RGGB</value> |
| <value>GRBG</value> |
| <value>GBRG</value> |
| <value>BGGR</value> |
| <value>RGB |
| <notes>Sensor is not Bayer; output has 3 16-bit |
| values for each pixel, instead of just 1 16-bit value |
| per pixel.</notes></value> |
| <value hal_version="3.4">MONO |
| <notes>Sensor doesn't have any Bayer color filter. |
| Such sensor captures visible light in monochrome. The exact weighting and |
| wavelengths captured is not specified, but generally only includes the visible |
| frequencies. This value implies a MONOCHROME camera.</notes></value> |
| <value hal_version="3.4">NIR |
| <notes>Sensor has a near infrared filter capturing light with wavelength between |
| roughly 750nm and 1400nm, and the same filter covers the whole sensor array. This |
| value implies a MONOCHROME camera.</notes></value> |
| </enum> |
| <description>The arrangement of color filters on sensor; |
| represents the colors in the top-left 2x2 section of |
| the sensor, in reading order, for a Bayer camera, or the |
| light spectrum it captures for MONOCHROME camera. |
| </description> |
| <hal_details> |
| Starting from Android Q, the colorFilterArrangement for a MONOCHROME camera must be |
| single color patterns, such as MONO or NIR. |
| </hal_details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="exposureTimeRange" type="int64" visibility="public" |
| type_notes="nanoseconds" container="array" typedef="rangeLong" |
| hwlevel="full"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>The range of image exposure times for android.sensor.exposureTime supported |
| by this camera device. |
| </description> |
| <units>Nanoseconds</units> |
| <range>The minimum exposure time will be less than 100 us. For FULL |
| capability devices (android.info.supportedHardwareLevel == FULL), |
| the maximum exposure time will be greater than 100ms.</range> |
| <hal_details>For FULL capability devices (android.info.supportedHardwareLevel == FULL), |
| The maximum of the range SHOULD be at least 1 second (1e9), MUST be at least |
| 100ms. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="maxFrameDuration" type="int64" visibility="public" |
| hwlevel="full"> |
| <description>The maximum possible frame duration (minimum frame rate) for |
| android.sensor.frameDuration that is supported this camera device.</description> |
| <units>Nanoseconds</units> |
| <range>For FULL capability devices |
| (android.info.supportedHardwareLevel == FULL), at least 100ms. |
| </range> |
| <details>Attempting to use frame durations beyond the maximum will result in the frame |
| duration being clipped to the maximum. See that control for a full definition of frame |
| durations. |
| |
| Refer to {@link |
| android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration|ACAMERA_SCALER_AVAILABLE_MIN_FRAME_DURATIONS} |
| for the minimum frame duration values. |
| </details> |
| <hal_details> |
| For FULL capability devices (android.info.supportedHardwareLevel == FULL), |
| The maximum of the range SHOULD be at least |
| 1 second (1e9), MUST be at least 100ms (100e6). |
| |
| android.sensor.info.maxFrameDuration must be greater or |
| equal to the android.sensor.info.exposureTimeRange max |
| value (since exposure time overrides frame duration). |
| |
| Available minimum frame durations for JPEG must be no greater |
| than that of the YUV_420_888/IMPLEMENTATION_DEFINED |
| minimum frame durations (for that respective size). |
| |
| Since JPEG processing is considered offline and can take longer than |
| a single uncompressed capture, refer to |
| android.scaler.availableStallDurations |
| for details about encoding this scenario. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="physicalSize" type="float" visibility="public" |
| type_notes="width x height" |
| container="array" typedef="sizeF" hwlevel="legacy"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>The physical dimensions of the full pixel |
| array.</description> |
| <units>Millimeters</units> |
| <details>This is the physical size of the sensor pixel |
| array defined by android.sensor.info.pixelArraySize. |
| </details> |
| <hal_details>Needed for FOV calculation for old API</hal_details> |
| <tag id="V1" /> |
| <tag id="BC" /> |
| </entry> |
| <entry name="pixelArraySize" type="int32" visibility="public" |
| container="array" typedef="size" hwlevel="legacy"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>Dimensions of the full pixel array, possibly |
| including black calibration pixels.</description> |
| <units>Pixels</units> |
| <details>The pixel count of the full pixel array of the image sensor, which covers |
| android.sensor.info.physicalSize area. This represents the full pixel dimensions of |
| the raw buffers produced by this sensor. |
| |
| If a camera device supports raw sensor formats, either this or |
| android.sensor.info.preCorrectionActiveArraySize is the maximum dimensions for the raw |
| output formats listed in {@link |
| android.hardware.camera2.params.StreamConfigurationMap|ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS} |
| (this depends on whether or not the image sensor returns buffers containing pixels that |
| are not part of the active array region for blacklevel calibration or other purposes). |
| |
| Some parts of the full pixel array may not receive light from the scene, |
| or be otherwise inactive. The android.sensor.info.preCorrectionActiveArraySize key |
| defines the rectangle of active pixels that will be included in processed image |
| formats. |
| </details> |
| <tag id="RAW" /> |
| <tag id="BC" /> |
| </entry> |
| <entry name="whiteLevel" type="int32" visibility="public"> |
| <description> |
| Maximum raw value output by sensor. |
| </description> |
| <range>&gt; 255 (8-bit output)</range> |
| <details> |
| This specifies the fully-saturated encoding level for the raw |
| sample values from the sensor. This is typically caused by the |
| sensor becoming highly non-linear or clipping. The minimum for |
| each channel is specified by the offset in the |
| android.sensor.blackLevelPattern key. |
| |
| The white level is typically determined either by sensor bit depth |
| (8-14 bits is expected), or by the point where the sensor response |
| becomes too non-linear to be useful. The default value for this is |
| maximum representable value for a 16-bit raw sample (2^16 - 1). |
| |
| The white level values of captured images may vary for different |
| capture settings (e.g., android.sensor.sensitivity). This key |
| represents a coarse approximation for such case. It is recommended |
| to use android.sensor.dynamicWhiteLevel for captures when supported |
| by the camera device, which provides more accurate white level values. |
| </details> |
| <hal_details> |
| The full bit depth of the sensor must be available in the raw data, |
| so the value for linear sensors should not be significantly lower |
| than maximum raw value supported, i.e. 2^(sensor bits per pixel). |
| </hal_details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="timestampSource" type="byte" visibility="public" |
| enum="true" hwlevel="legacy"> |
| <enum> |
| <value>UNKNOWN |
| <notes> |
| Timestamps from android.sensor.timestamp are in nanoseconds and monotonic, but can |
| not be compared to timestamps from other subsystems (e.g. accelerometer, gyro etc.), |
| or other instances of the same or different camera devices in the same system with |
| accuracy. However, the timestamps are roughly in the same timebase as |
| {@link android.os.SystemClock#uptimeMillis}. The accuracy is sufficient for tasks |
| like A/V synchronization for video recording, at least, and the timestamps can be |
| directly used together with timestamps from the audio subsystem for that task. |
| |
| Timestamps between streams and results for a single camera instance are comparable, |
| and the timestamps for all buffers and the result metadata generated by a single |
| capture are identical. |
| </notes> |
| </value> |
| <value>REALTIME |
| <notes> |
| Timestamps from android.sensor.timestamp are in the same timebase as |
| {@link android.os.SystemClock#elapsedRealtimeNanos}, |
| and they can be compared to other timestamps using that base. |
| |
| When buffers from a REALTIME device are passed directly to a video encoder from the |
| camera, automatic compensation is done to account for differing timebases of the |
| audio and camera subsystems. If the application is receiving buffers and then later |
| sending them to a video encoder or other application where they are compared with |
| audio subsystem timestamps or similar, this compensation is not present. In those |
| cases, applications need to adjust the timestamps themselves. Since {@link |
| android.os.SystemClock#elapsedRealtimeNanos} and {@link |
| android.os.SystemClock#uptimeMillis} only diverge while the device is asleep, an |
| offset between the two sources can be measured once per active session and applied |
| to timestamps for sufficient accuracy for A/V sync. |
| </notes> |
| </value> |
| </enum> |
| <description>The time base source for sensor capture start timestamps.</description> |
| <details> |
| The timestamps provided for captures are always in nanoseconds and monotonic, but |
| may not based on a time source that can be compared to other system time sources. |
| |
| This characteristic defines the source for the timestamps, and therefore whether they |
| can be compared against other system time sources/timestamps. |
| </details> |
| <hal_details> |
| For camera devices implement UNKNOWN, the camera framework expects that the timestamp |
| source to be SYSTEM_TIME_MONOTONIC. For camera devices implement REALTIME, the camera |
| framework expects that the timestamp source to be SYSTEM_TIME_BOOTTIME. See |
| system/core/include/utils/Timers.h for the definition of SYSTEM_TIME_MONOTONIC and |
| SYSTEM_TIME_BOOTTIME. Note that HAL must follow above expectation; otherwise video |
| recording might suffer unexpected behavior. |
| |
| Also, camera devices which implement REALTIME must pass the ITS sensor fusion test which |
| tests the alignment between camera timestamps and gyro sensor timestamps. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| <entry name="lensShadingApplied" type="byte" visibility="public" enum="true" |
| typedef="boolean"> |
| <enum> |
| <value>FALSE</value> |
| <value>TRUE</value> |
| </enum> |
| <description>Whether the RAW images output from this camera device are subject to |
| lens shading correction.</description> |
| <details> |
| If `true`, all images produced by the camera device in the `RAW` image formats will have |
| at least some lens shading correction already applied to it. If `false`, the images will |
| not be adjusted for lens shading correction. See android.request.maxNumOutputRaw for a |
| list of RAW image formats. |
| |
| When `true`, the `lensShadingCorrectionMap` key may still have values greater than 1.0, |
| and those will need to be applied to any captured RAW frames for them to match the shading |
| correction of processed buffers such as `YUV` or `JPEG` images. This may occur, for |
| example, when some basic fixed lens shading correction is applied by hardware to RAW data, |
| and additional correction is done dynamically in the camera processing pipeline after |
| demosaicing. |
| |
| This key will be `null` for all devices do not report this information. |
| Devices with RAW capability will always report this information in this key. |
| </details> |
| </entry> |
| <entry name="preCorrectionActiveArraySize" type="int32" visibility="public" |
| type_notes="Four ints defining the active pixel rectangle" container="array" |
| typedef="rectangle" hwlevel="legacy"> |
| <array> |
| <size>4</size> |
| </array> |
| <description> |
| The area of the image sensor which corresponds to active pixels prior to the |
| application of any geometric distortion correction. |
| </description> |
| <units>Pixel coordinates on the image sensor</units> |
| <details> |
| This is the rectangle representing the size of the active region of the sensor (i.e. |
| the region that actually receives light from the scene) before any geometric correction |
| has been applied, and should be treated as the active region rectangle for any of the |
| raw formats. All metadata associated with raw processing (e.g. the lens shading |
| correction map, and radial distortion fields) treats the top, left of this rectangle as |
| the origin, (0,0). |
| |
| The size of this region determines the maximum field of view and the maximum number of |
| pixels that an image from this sensor can contain, prior to the application of |
| geometric distortion correction. The effective maximum pixel dimensions of a |
| post-distortion-corrected image is given by the android.sensor.info.activeArraySize |
| field, and the effective maximum field of view for a post-distortion-corrected image |
| can be calculated by applying the geometric distortion correction fields to this |
| rectangle, and cropping to the rectangle given in android.sensor.info.activeArraySize. |
| |
| E.g. to calculate position of a pixel, (x,y), in a processed YUV output image with the |
| dimensions in android.sensor.info.activeArraySize given the position of a pixel, |
| (x', y'), in the raw pixel array with dimensions given in |
| android.sensor.info.pixelArraySize: |
| |
| 1. Choose a pixel (x', y') within the active array region of the raw buffer given in |
| android.sensor.info.preCorrectionActiveArraySize, otherwise this pixel is considered |
| to be outside of the FOV, and will not be shown in the processed output image. |
| 1. Apply geometric distortion correction to get the post-distortion pixel coordinate, |
| (x_i, y_i). When applying geometric correction metadata, note that metadata for raw |
| buffers is defined relative to the top, left of the |
| android.sensor.info.preCorrectionActiveArraySize rectangle. |
| 1. If the resulting corrected pixel coordinate is within the region given in |
| android.sensor.info.activeArraySize, then the position of this pixel in the |
| processed output image buffer is `(x_i - activeArray.left, y_i - activeArray.top)`, |
| when the top, left coordinate of that buffer is treated as (0, 0). |
| |
| Thus, for pixel x',y' = (25, 25) on a sensor where android.sensor.info.pixelArraySize |
| is (100,100), android.sensor.info.preCorrectionActiveArraySize is (10, 10, 100, 100), |
| android.sensor.info.activeArraySize is (20, 20, 80, 80), and the geometric distortion |
| correction doesn't change the pixel coordinate, the resulting pixel selected in |
| pixel coordinates would be x,y = (25, 25) relative to the top,left of the raw buffer |
| with dimensions given in android.sensor.info.pixelArraySize, and would be (5, 5) |
| relative to the top,left of post-processed YUV output buffer with dimensions given in |
| android.sensor.info.activeArraySize. |
| |
| The currently supported fields that correct for geometric distortion are: |
| |
| 1. android.lens.distortion. |
| |
| If the camera device doesn't support geometric distortion correction, or all of the |
| geometric distortion fields are no-ops, this rectangle will be the same as the |
| post-distortion-corrected rectangle given in android.sensor.info.activeArraySize. |
| |
| This rectangle is defined relative to the full pixel array; (0,0) is the top-left of |
| the full pixel array, and the size of the full pixel array is given by |
| android.sensor.info.pixelArraySize. |
| |
| The pre-correction active array may be smaller than the full pixel array, since the |
| full array may include black calibration pixels or other inactive regions. |
| </details> |
| <ndk_details> |
| The data representation is `int[4]`, which maps to `(left, top, width, height)`. |
| </ndk_details> |
| <hal_details> |
| This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be |
| &gt;= `(0,0)`. |
| The `(width, height)` must be &lt;= `android.sensor.info.pixelArraySize`. |
| |
| If omitted by the HAL implementation, the camera framework will assume that this is |
| the same as the post-correction active array region given in |
| android.sensor.info.activeArraySize. |
| </hal_details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="activeArraySizeMaximumResolution" type="int32" visibility="public" |
| type_notes="Four ints defining the active pixel rectangle" |
| container="array" typedef="rectangle" hal_version="3.6"> |
| <array> |
| <size>4</size> |
| </array> |
| <description> |
| The area of the image sensor which corresponds to active pixels after any geometric |
| distortion correction has been applied, when the sensor runs in maximum resolution mode. |
| </description> |
| <units>Pixel coordinates on the image sensor</units> |
| <details> |
| Analogous to android.sensor.info.activeArraySize, when android.sensor.pixelMode |
| is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| Refer to android.sensor.info.activeArraySize for details, with sensor array related keys |
| replaced with their |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} |
| counterparts. |
| This key will only be present for devices which advertise the |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability or devices where {@link CameraCharacteristics#getAvailableCaptureRequestKeys} |
| lists android.sensor.pixelMode. |
| </details> |
| <ndk_details> |
| The data representation is `int[4]`, which maps to `(left, top, width, height)`. |
| </ndk_details> |
| <hal_details> |
| This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be |
| &gt;= `(0,0)`. |
| The `(width, height)` must be &lt;= `android.sensor.info.pixelArraySizeMaximumResolution`. |
| </hal_details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="pixelArraySizeMaximumResolution" type="int32" visibility="public" |
| container="array" typedef="size" hal_version="3.6"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>Dimensions of the full pixel array, possibly |
| including black calibration pixels, when the sensor runs in maximum resolution mode. |
| Analogous to android.sensor.info.pixelArraySize, when android.sensor.pixelMode is |
| set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>Pixels</units> |
| <details> |
| The pixel count of the full pixel array of the image sensor, which covers |
| android.sensor.info.physicalSize area. This represents the full pixel dimensions of |
| the raw buffers produced by this sensor, when it runs in maximum resolution mode. That |
| is, when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| This key will only be present for devices which advertise the |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability or devices where {@link CameraCharacteristics#getAvailableCaptureRequestKeys} |
| lists android.sensor.pixelMode. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="preCorrectionActiveArraySizeMaximumResolution" type="int32" |
| visibility="public" type_notes="Four ints defining the active pixel rectangle" |
| container="array" typedef="rectangle" hal_version="3.6"> |
| <array> |
| <size>4</size> |
| </array> |
| <description> |
| The area of the image sensor which corresponds to active pixels prior to the |
| application of any geometric distortion correction, when the sensor runs in maximum |
| resolution mode. This key must be used for crop / metering regions, only when |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>Pixel coordinates on the image sensor</units> |
| <details> |
| Analogous to android.sensor.info.preCorrectionActiveArraySize, |
| when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| This key will only be present for devices which advertise the |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability or devices where {@link CameraCharacteristics#getAvailableCaptureRequestKeys} |
| lists android.sensor.pixelMode. |
| </details> |
| <ndk_details> |
| The data representation is `int[4]`, which maps to `(left, top, width, height)`. |
| </ndk_details> |
| <hal_details> |
| This array contains `(xmin, ymin, width, height)`. The `(xmin, ymin)` must be |
| &gt;= `(0,0)`. |
| The `(width, height)` must be &lt;= `android.sensor.info.pixelArraySizeMaximumResolution`. |
| |
| If omitted by the HAL implementation, the camera framework will assume that this is |
| the same as the post-correction active array region given in |
| android.sensor.info.activeArraySizeMaximumResolution. |
| </hal_details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="binningFactor" type="int32" visibility="public" |
| container="array" typedef="size" hal_version="3.6"> |
| <array> |
| <size>2</size> |
| </array> |
| <description> Dimensions of the group of pixels which are under the same color filter. |
| This specifies the width and height (pair of integers) of the group of pixels which fall |
| under the same color filter for ULTRA_HIGH_RESOLUTION sensors. |
| </description> |
| <units>Pixels</units> |
| <details> Sensors can have pixels grouped together under the same color filter in order |
| to improve various aspects of imaging such as noise reduction, low light |
| performance etc. These groups can be of various sizes such as 2X2 (quad bayer), |
| 3X3 (nona-bayer). This key specifies the length and width of the pixels grouped under |
| the same color filter. |
| In case the device has the |
| {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability : |
| |
| * This key will not be present if REMOSAIC_REPROCESSING is not supported, since RAW |
| images will have a regular bayer pattern. |
| |
| In case the device does not have the |
| {@link CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability : |
| |
| * This key will be present if |
| {@link CameraCharacteristics#getAvailableCaptureRequestKeys} |
| lists android.sensor.pixelMode, since RAW |
| images may not necessarily have a regular bayer pattern when |
| {@link CaptureRequest#SENSOR_PIXEL_MODE android.sensor.pixelMode} is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| </entry> |
| </namespace> |
| <entry name="referenceIlluminant1" type="byte" visibility="public" |
| enum="true" permission_needed="true" > |
| <enum> |
| <value id="1">DAYLIGHT</value> |
| <value id="2">FLUORESCENT</value> |
| <value id="3">TUNGSTEN |
| <notes>Incandescent light</notes> |
| </value> |
| <value id="4">FLASH</value> |
| <value id="9">FINE_WEATHER</value> |
| <value id="10">CLOUDY_WEATHER</value> |
| <value id="11">SHADE</value> |
| <value id="12">DAYLIGHT_FLUORESCENT |
| <notes>D 5700 - 7100K</notes> |
| </value> |
| <value id="13">DAY_WHITE_FLUORESCENT |
| <notes>N 4600 - 5400K</notes> |
| </value> |
| <value id="14">COOL_WHITE_FLUORESCENT |
| <notes>W 3900 - 4500K</notes> |
| </value> |
| <value id="15">WHITE_FLUORESCENT |
| <notes>WW 3200 - 3700K</notes> |
| </value> |
| <value id="17">STANDARD_A</value> |
| <value id="18">STANDARD_B</value> |
| <value id="19">STANDARD_C</value> |
| <value id="20">D55</value> |
| <value id="21">D65</value> |
| <value id="22">D75</value> |
| <value id="23">D50</value> |
| <value id="24">ISO_STUDIO_TUNGSTEN</value> |
| </enum> |
| <description> |
| The standard reference illuminant used as the scene light source when |
| calculating the android.sensor.colorTransform1, |
| android.sensor.calibrationTransform1, and |
| android.sensor.forwardMatrix1 matrices. |
| </description> |
| <details> |
| The values in this key correspond to the values defined for the |
| EXIF LightSource tag. These illuminants are standard light sources |
| that are often used calibrating camera devices. |
| |
| If this key is present, then android.sensor.colorTransform1, |
| android.sensor.calibrationTransform1, and |
| android.sensor.forwardMatrix1 will also be present. |
| |
| Some devices may choose to provide a second set of calibration |
| information for improved quality, including |
| android.sensor.referenceIlluminant2 and its corresponding matrices. |
| |
| Starting from Android Q, this key will not be present for a MONOCHROME camera, even if |
| the camera device has RAW capability. |
| </details> |
| <hal_details> |
| The first reference illuminant (android.sensor.referenceIlluminant1) |
| and corresponding matrices must be present to support the RAW capability |
| and DNG output. |
| |
| When producing raw images with a color profile that has only been |
| calibrated against a single light source, it is valid to omit |
| android.sensor.referenceIlluminant2 along with the |
| android.sensor.colorTransform2, android.sensor.calibrationTransform2, |
| and android.sensor.forwardMatrix2 matrices. |
| |
| If only android.sensor.referenceIlluminant1 is included, it should be |
| chosen so that it is representative of typical scene lighting. In |
| general, D50 or DAYLIGHT will be chosen for this case. |
| |
| If both android.sensor.referenceIlluminant1 and |
| android.sensor.referenceIlluminant2 are included, they should be |
| chosen to represent the typical range of scene lighting conditions. |
| In general, low color temperature illuminant such as Standard-A will |
| be chosen for the first reference illuminant and a higher color |
| temperature illuminant such as D65 will be chosen for the second |
| reference illuminant. |
| </hal_details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="referenceIlluminant2" type="byte" visibility="public" |
| permission_needed="true" > |
| <description> |
| The standard reference illuminant used as the scene light source when |
| calculating the android.sensor.colorTransform2, |
| android.sensor.calibrationTransform2, and |
| android.sensor.forwardMatrix2 matrices. |
| </description> |
| <range>Any value listed in android.sensor.referenceIlluminant1</range> |
| <details> |
| See android.sensor.referenceIlluminant1 for more details. |
| |
| If this key is present, then android.sensor.colorTransform2, |
| android.sensor.calibrationTransform2, and |
| android.sensor.forwardMatrix2 will also be present. |
| |
| Starting from Android Q, this key will not be present for a MONOCHROME camera, even if |
| the camera device has RAW capability. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="calibrationTransform1" type="rational" |
| visibility="public" optional="true" |
| type_notes="3x3 matrix in row-major-order" container="array" |
| typedef="colorSpaceTransform" permission_needed="true" > |
| <array> |
| <size>3</size> |
| <size>3</size> |
| </array> |
| <description> |
| A per-device calibration transform matrix that maps from the |
| reference sensor colorspace to the actual device sensor colorspace. |
| </description> |
| <details> |
| This matrix is used to correct for per-device variations in the |
| sensor colorspace, and is used for processing raw buffer data. |
| |
| The matrix is expressed as a 3x3 matrix in row-major-order, and |
| contains a per-device calibration transform that maps colors |
| from reference sensor color space (i.e. the "golden module" |
| colorspace) into this camera device's native sensor color |
| space under the first reference illuminant |
| (android.sensor.referenceIlluminant1). |
| |
| Starting from Android Q, this key will not be present for a MONOCHROME camera, even if |
| the camera device has RAW capability. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="calibrationTransform2" type="rational" |
| visibility="public" optional="true" |
| type_notes="3x3 matrix in row-major-order" container="array" |
| typedef="colorSpaceTransform" permission_needed="true" > |
| <array> |
| <size>3</size> |
| <size>3</size> |
| </array> |
| <description> |
| A per-device calibration transform matrix that maps from the |
| reference sensor colorspace to the actual device sensor colorspace |
| (this is the colorspace of the raw buffer data). |
| </description> |
| <details> |
| This matrix is used to correct for per-device variations in the |
| sensor colorspace, and is used for processing raw buffer data. |
| |
| The matrix is expressed as a 3x3 matrix in row-major-order, and |
| contains a per-device calibration transform that maps colors |
| from reference sensor color space (i.e. the "golden module" |
| colorspace) into this camera device's native sensor color |
| space under the second reference illuminant |
| (android.sensor.referenceIlluminant2). |
| |
| This matrix will only be present if the second reference |
| illuminant is present. |
| |
| Starting from Android Q, this key will not be present for a MONOCHROME camera, even if |
| the camera device has RAW capability. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="colorTransform1" type="rational" |
| visibility="public" optional="true" |
| type_notes="3x3 matrix in row-major-order" container="array" |
| typedef="colorSpaceTransform" permission_needed="true" > |
| <array> |
| <size>3</size> |
| <size>3</size> |
| </array> |
| <description> |
| A matrix that transforms color values from CIE XYZ color space to |
| reference sensor color space. |
| </description> |
| <details> |
| This matrix is used to convert from the standard CIE XYZ color |
| space to the reference sensor colorspace, and is used when processing |
| raw buffer data. |
| |
| The matrix is expressed as a 3x3 matrix in row-major-order, and |
| contains a color transform matrix that maps colors from the CIE |
| XYZ color space to the reference sensor color space (i.e. the |
| "golden module" colorspace) under the first reference illuminant |
| (android.sensor.referenceIlluminant1). |
| |
| The white points chosen in both the reference sensor color space |
| and the CIE XYZ colorspace when calculating this transform will |
| match the standard white point for the first reference illuminant |
| (i.e. no chromatic adaptation will be applied by this transform). |
| |
| Starting from Android Q, this key will not be present for a MONOCHROME camera, even if |
| the camera device has RAW capability. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="colorTransform2" type="rational" |
| visibility="public" optional="true" |
| type_notes="3x3 matrix in row-major-order" container="array" |
| typedef="colorSpaceTransform" permission_needed="true" > |
| <array> |
| <size>3</size> |
| <size>3</size> |
| </array> |
| <description> |
| A matrix that transforms color values from CIE XYZ color space to |
| reference sensor color space. |
| </description> |
| <details> |
| This matrix is used to convert from the standard CIE XYZ color |
| space to the reference sensor colorspace, and is used when processing |
| raw buffer data. |
| |
| The matrix is expressed as a 3x3 matrix in row-major-order, and |
| contains a color transform matrix that maps colors from the CIE |
| XYZ color space to the reference sensor color space (i.e. the |
| "golden module" colorspace) under the second reference illuminant |
| (android.sensor.referenceIlluminant2). |
| |
| The white points chosen in both the reference sensor color space |
| and the CIE XYZ colorspace when calculating this transform will |
| match the standard white point for the second reference illuminant |
| (i.e. no chromatic adaptation will be applied by this transform). |
| |
| This matrix will only be present if the second reference |
| illuminant is present. |
| |
| Starting from Android Q, this key will not be present for a MONOCHROME camera, even if |
| the camera device has RAW capability. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="forwardMatrix1" type="rational" |
| visibility="public" optional="true" |
| type_notes="3x3 matrix in row-major-order" container="array" |
| typedef="colorSpaceTransform" permission_needed="true" > |
| <array> |
| <size>3</size> |
| <size>3</size> |
| </array> |
| <description> |
| A matrix that transforms white balanced camera colors from the reference |
| sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint. |
| </description> |
| <details> |
| This matrix is used to convert to the standard CIE XYZ colorspace, and |
| is used when processing raw buffer data. |
| |
| This matrix is expressed as a 3x3 matrix in row-major-order, and contains |
| a color transform matrix that maps white balanced colors from the |
| reference sensor color space to the CIE XYZ color space with a D50 white |
| point. |
| |
| Under the first reference illuminant (android.sensor.referenceIlluminant1) |
| this matrix is chosen so that the standard white point for this reference |
| illuminant in the reference sensor colorspace is mapped to D50 in the |
| CIE XYZ colorspace. |
| |
| Starting from Android Q, this key will not be present for a MONOCHROME camera, even if |
| the camera device has RAW capability. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="forwardMatrix2" type="rational" |
| visibility="public" optional="true" |
| type_notes="3x3 matrix in row-major-order" container="array" |
| typedef="colorSpaceTransform" permission_needed="true" > |
| <array> |
| <size>3</size> |
| <size>3</size> |
| </array> |
| <description> |
| A matrix that transforms white balanced camera colors from the reference |
| sensor colorspace to the CIE XYZ colorspace with a D50 whitepoint. |
| </description> |
| <details> |
| This matrix is used to convert to the standard CIE XYZ colorspace, and |
| is used when processing raw buffer data. |
| |
| This matrix is expressed as a 3x3 matrix in row-major-order, and contains |
| a color transform matrix that maps white balanced colors from the |
| reference sensor color space to the CIE XYZ color space with a D50 white |
| point. |
| |
| Under the second reference illuminant (android.sensor.referenceIlluminant2) |
| this matrix is chosen so that the standard white point for this reference |
| illuminant in the reference sensor colorspace is mapped to D50 in the |
| CIE XYZ colorspace. |
| |
| This matrix will only be present if the second reference |
| illuminant is present. |
| |
| Starting from Android Q, this key will not be present for a MONOCHROME camera, even if |
| the camera device has RAW capability. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="baseGainFactor" type="rational" |
| optional="true"> |
| <description>Gain factor from electrons to raw units when |
| ISO=100</description> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="blackLevelPattern" type="int32" visibility="public" |
| optional="true" type_notes="2x2 raw count block" container="array" |
| typedef="blackLevelPattern"> |
| <array> |
| <size>4</size> |
| </array> |
| <description> |
| A fixed black level offset for each of the color filter arrangement |
| (CFA) mosaic channels. |
| </description> |
| <range>&gt;= 0 for each.</range> |
| <details> |
| This key specifies the zero light value for each of the CFA mosaic |
| channels in the camera sensor. The maximal value output by the |
| sensor is represented by the value in android.sensor.info.whiteLevel. |
| |
| The values are given in the same order as channels listed for the CFA |
| layout key (see android.sensor.info.colorFilterArrangement), i.e. the |
| nth value given corresponds to the black level offset for the nth |
| color channel listed in the CFA. |
| |
| The black level values of captured images may vary for different |
| capture settings (e.g., android.sensor.sensitivity). This key |
| represents a coarse approximation for such case. It is recommended to |
| use android.sensor.dynamicBlackLevel or use pixels from |
| android.sensor.opticalBlackRegions directly for captures when |
| supported by the camera device, which provides more accurate black |
| level values. For raw capture in particular, it is recommended to use |
| pixels from android.sensor.opticalBlackRegions to calculate black |
| level values for each frame. |
| |
| For a MONOCHROME camera device, all of the 2x2 channels must have the same values. |
| </details> |
| <hal_details> |
| The values are given in row-column scan order, with the first value |
| corresponding to the element of the CFA in row=0, column=0. |
| </hal_details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="maxAnalogSensitivity" type="int32" visibility="public" |
| optional="true" hwlevel="full"> |
| <description>Maximum sensitivity that is implemented |
| purely through analog gain.</description> |
| <details>For android.sensor.sensitivity values less than or |
| equal to this, all applied gain must be analog. For |
| values above this, the gain applied can be a mix of analog and |
| digital.</details> |
| <tag id="V1" /> |
| <tag id="FULL" /> |
| </entry> |
| <entry name="orientation" type="int32" visibility="public" |
| hwlevel="legacy"> |
| <description>Clockwise angle through which the output image needs to be rotated to be |
| upright on the device screen in its native orientation. |
| </description> |
| <units>Degrees of clockwise rotation; always a multiple of |
| 90</units> |
| <range>0, 90, 180, 270</range> |
| <details> |
| Also defines the direction of rolling shutter readout, which is from top to bottom in |
| the sensor's coordinate system. |
| |
| Starting with Android API level 32, camera clients that query the orientation via |
| {@link android.hardware.camera2.CameraCharacteristics#get} on foldable devices which |
| include logical cameras can receive a value that can dynamically change depending on the |
| device/fold state. |
| Clients are advised to not cache or store the orientation value of such logical sensors. |
| In case repeated queries to CameraCharacteristics are not preferred, then clients can |
| also access the entire mapping from device state to sensor orientation in |
| {@link android.hardware.camera2.params.DeviceStateSensorOrientationMap}. |
| Do note that a dynamically changing sensor orientation value in camera characteristics |
| will not be the best way to establish the orientation per frame. Clients that want to |
| know the sensor orientation of a particular captured frame should query the |
| android.logicalMultiCamera.activePhysicalId from the corresponding capture result and |
| check the respective physical camera orientation. |
| </details> |
| <ndk_details> |
| Native camera clients must query android.info.deviceStateOrientations for the mapping |
| between device state and camera sensor orientation. Dynamic updates to the sensor |
| orientation are not supported in this code path. |
| </ndk_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="profileHueSatMapDimensions" type="int32" |
| visibility="system" optional="true" |
| type_notes="Number of samples for hue, saturation, and value" |
| container="array"> |
| <array> |
| <size>3</size> |
| </array> |
| <description> |
| The number of input samples for each dimension of |
| android.sensor.profileHueSatMap. |
| </description> |
| <range> |
| Hue &gt;= 1, |
| Saturation &gt;= 2, |
| Value &gt;= 1 |
| </range> |
| <details> |
| The number of input samples for the hue, saturation, and value |
| dimension of android.sensor.profileHueSatMap. The order of the |
| dimensions given is hue, saturation, value; where hue is the 0th |
| element. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.sensor.exposureTime" kind="controls"> |
| </clone> |
| <clone entry="android.sensor.frameDuration" |
| kind="controls"></clone> |
| <clone entry="android.sensor.sensitivity" kind="controls"> |
| </clone> |
| <entry name="timestamp" type="int64" visibility="public" |
| hwlevel="legacy"> |
| <description>Time at start of exposure of first |
| row of the image sensor active array, in nanoseconds.</description> |
| <units>Nanoseconds</units> |
| <range>&gt; 0</range> |
| <details>The timestamps are also included in all image |
| buffers produced for the same capture, and will be identical |
| on all the outputs. |
| |
| When android.sensor.info.timestampSource `==` UNKNOWN, |
| the timestamps measure time since an unspecified starting point, |
| and are monotonically increasing. They can be compared with the |
| timestamps for other captures from the same camera device, but are |
| not guaranteed to be comparable to any other time source. |
| |
| When android.sensor.info.timestampSource `==` REALTIME, the |
| timestamps measure time in the same timebase as {@link |
| android.os.SystemClock#elapsedRealtimeNanos}, and they can |
| be compared to other timestamps from other subsystems that |
| are using that base. |
| |
| For reprocessing, the timestamp will match the start of exposure of |
| the input image, i.e. {@link CaptureResult#SENSOR_TIMESTAMP the |
| timestamp} in the TotalCaptureResult that was used to create the |
| reprocess capture request. |
| </details> |
| <hal_details> |
| All timestamps must be in reference to the kernel's |
| CLOCK_BOOTTIME monotonic clock, which properly accounts for |
| time spent asleep. This allows for synchronization with |
| sensors that continue to operate while the system is |
| otherwise asleep. |
| |
| If android.sensor.info.timestampSource `==` REALTIME, |
| The timestamp must be synchronized with the timestamps from other |
| sensor subsystems that are using the same timebase. |
| |
| For reprocessing, the input image's start of exposure can be looked up |
| with android.sensor.timestamp from the metadata included in the |
| capture request. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="temperature" type="float" |
| optional="true"> |
| <description>The temperature of the sensor, sampled at the time |
| exposure began for this frame. |
| |
| The thermal diode being queried should be inside the sensor PCB, or |
| somewhere close to it. |
| </description> |
| |
| <units>Celsius</units> |
| <range>Optional. This value is missing if no temperature is available.</range> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="neutralColorPoint" type="rational" visibility="public" |
| optional="true" container="array"> |
| <array> |
| <size>3</size> |
| </array> |
| <description> |
| The estimated camera neutral color in the native sensor colorspace at |
| the time of capture. |
| </description> |
| <details> |
| This value gives the neutral color point encoded as an RGB value in the |
| native sensor color space. The neutral color point indicates the |
| currently estimated white point of the scene illumination. It can be |
| used to interpolate between the provided color transforms when |
| processing raw sensor data. |
| |
| The order of the values is R, G, B; where R is in the lowest index. |
| |
| Starting from Android Q, this key will not be present for a MONOCHROME camera, even if |
| the camera device has RAW capability. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="noiseProfile" type="double" visibility="public" |
| optional="true" type_notes="Pairs of noise model coefficients" |
| container="array" typedef="pairDoubleDouble"> |
| <array> |
| <size>2</size> |
| <size>CFA Channels</size> |
| </array> |
| <description> |
| Noise model coefficients for each CFA mosaic channel. |
| </description> |
| <details> |
| This key contains two noise model coefficients for each CFA channel |
| corresponding to the sensor amplification (S) and sensor readout |
| noise (O). These are given as pairs of coefficients for each channel |
| in the same order as channels listed for the CFA layout key |
| (see android.sensor.info.colorFilterArrangement). This is |
| represented as an array of Pair&lt;Double, Double&gt;, where |
| the first member of the Pair at index n is the S coefficient and the |
| second member is the O coefficient for the nth color channel in the CFA. |
| |
| These coefficients are used in a two parameter noise model to describe |
| the amount of noise present in the image for each CFA channel. The |
| noise model used here is: |
| |
| N(x) = sqrt(Sx + O) |
| |
| Where x represents the recorded signal of a CFA channel normalized to |
| the range [0, 1], and S and O are the noise model coefficients for |
| that channel. |
| |
| A more detailed description of the noise model can be found in the |
| Adobe DNG specification for the NoiseProfile tag. |
| |
| For a MONOCHROME camera, there is only one color channel. So the noise model coefficients |
| will only contain one S and one O. |
| |
| </details> |
| <hal_details> |
| For a CFA layout of RGGB, the list of coefficients would be given as |
| an array of doubles S0,O0,S1,O1,..., where S0 and O0 are the coefficients |
| for the red channel, S1 and O1 are the coefficients for the first green |
| channel, etc. |
| </hal_details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="profileHueSatMap" type="float" |
| visibility="system" optional="true" |
| type_notes="Mapping for hue, saturation, and value" |
| container="array"> |
| <array> |
| <size>hue_samples</size> |
| <size>saturation_samples</size> |
| <size>value_samples</size> |
| <size>3</size> |
| </array> |
| <description> |
| A mapping containing a hue shift, saturation scale, and value scale |
| for each pixel. |
| </description> |
| <units> |
| The hue shift is given in degrees; saturation and value scale factors are |
| unitless and are between 0 and 1 inclusive |
| </units> |
| <details> |
| hue_samples, saturation_samples, and value_samples are given in |
| android.sensor.profileHueSatMapDimensions. |
| |
| Each entry of this map contains three floats corresponding to the |
| hue shift, saturation scale, and value scale, respectively; where the |
| hue shift has the lowest index. The map entries are stored in the key |
| in nested loop order, with the value divisions in the outer loop, the |
| hue divisions in the middle loop, and the saturation divisions in the |
| inner loop. All zero input saturation entries are required to have a |
| value scale factor of 1.0. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="profileToneCurve" type="float" |
| visibility="system" optional="true" |
| type_notes="Samples defining a spline for a tone-mapping curve" |
| container="array"> |
| <array> |
| <size>samples</size> |
| <size>2</size> |
| </array> |
| <description> |
| A list of x,y samples defining a tone-mapping curve for gamma adjustment. |
| </description> |
| <range> |
| Each sample has an input range of `[0, 1]` and an output range of |
| `[0, 1]`. The first sample is required to be `(0, 0)`, and the last |
| sample is required to be `(1, 1)`. |
| </range> |
| <details> |
| This key contains a default tone curve that can be applied while |
| processing the image as a starting point for user adjustments. |
| The curve is specified as a list of value pairs in linear gamma. |
| The curve is interpolated using a cubic spline. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="greenSplit" type="float" visibility="public" optional="true"> |
| <description> |
| The worst-case divergence between Bayer green channels. |
| </description> |
| <range> |
| &gt;= 0 |
| </range> |
| <details> |
| This value is an estimate of the worst case split between the |
| Bayer green channels in the red and blue rows in the sensor color |
| filter array. |
| |
| The green split is calculated as follows: |
| |
| 1. A 5x5 pixel (or larger) window W within the active sensor array is |
| chosen. The term 'pixel' here is taken to mean a group of 4 Bayer |
| mosaic channels (R, Gr, Gb, B). The location and size of the window |
| chosen is implementation defined, and should be chosen to provide a |
| green split estimate that is both representative of the entire image |
| for this camera sensor, and can be calculated quickly. |
| 1. The arithmetic mean of the green channels from the red |
| rows (mean_Gr) within W is computed. |
| 1. The arithmetic mean of the green channels from the blue |
| rows (mean_Gb) within W is computed. |
| 1. The maximum ratio R of the two means is computed as follows: |
| `R = max((mean_Gr + 1)/(mean_Gb + 1), (mean_Gb + 1)/(mean_Gr + 1))` |
| |
| The ratio R is the green split divergence reported for this property, |
| which represents how much the green channels differ in the mosaic |
| pattern. This value is typically used to determine the treatment of |
| the green mosaic channels when demosaicing. |
| |
| The green split value can be roughly interpreted as follows: |
| |
| * R &lt; 1.03 is a negligible split (&lt;3% divergence). |
| * 1.20 &lt;= R &gt;= 1.03 will require some software |
| correction to avoid demosaic errors (3-20% divergence). |
| * R &gt; 1.20 will require strong software correction to produce |
| a usable image (&gt;20% divergence). |
| |
| Starting from Android Q, this key will not be present for a MONOCHROME camera, even if |
| the camera device has RAW capability. |
| </details> |
| <hal_details> |
| The green split given may be a static value based on prior |
| characterization of the camera sensor using the green split |
| calculation method given here over a large, representative, sample |
| set of images. Other methods of calculation that produce equivalent |
| results, and can be interpreted in the same manner, may be used. |
| </hal_details> |
| <tag id="RAW" /> |
| </entry> |
| </dynamic> |
| <controls> |
| <entry name="testPatternData" type="int32" visibility="public" optional="true" container="array"> |
| <array> |
| <size>4</size> |
| </array> |
| <description> |
| A pixel `[R, G_even, G_odd, B]` that supplies the test pattern |
| when android.sensor.testPatternMode is SOLID_COLOR. |
| </description> |
| <details> |
| Each color channel is treated as an unsigned 32-bit integer. |
| The camera device then uses the most significant X bits |
| that correspond to how many bits are in its Bayer raw sensor |
| output. |
| |
| For example, a sensor with RAW10 Bayer output would use the |
| 10 most significant bits from each color channel. |
| </details> |
| <hal_details> |
| </hal_details> |
| </entry> |
| <entry name="testPatternMode" type="int32" visibility="public" optional="true" |
| enum="true"> |
| <enum> |
| <value>OFF |
| <notes>No test pattern mode is used, and the camera |
| device returns captures from the image sensor. |
| |
| This is the default if the key is not set.</notes> |
| </value> |
| <value>SOLID_COLOR |
| <notes> |
| Each pixel in `[R, G_even, G_odd, B]` is replaced by its |
| respective color channel provided in |
| android.sensor.testPatternData. |
| |
| For example: |
| |
| android.sensor.testPatternData = [0, 0xFFFFFFFF, 0xFFFFFFFF, 0] |
| |
| All green pixels are 100% green. All red/blue pixels are black. |
| |
| android.sensor.testPatternData = [0xFFFFFFFF, 0, 0xFFFFFFFF, 0] |
| |
| All red pixels are 100% red. Only the odd green pixels |
| are 100% green. All blue pixels are 100% black. |
| </notes> |
| </value> |
| <value>COLOR_BARS |
| <notes> |
| All pixel data is replaced with an 8-bar color pattern. |
| |
| The vertical bars (left-to-right) are as follows: |
| |
| * 100% white |
| * yellow |
| * cyan |
| * green |
| * magenta |
| * red |
| * blue |
| * black |
| |
| In general the image would look like the following: |
| |
| W Y C G M R B K |
| W Y C G M R B K |
| W Y C G M R B K |
| W Y C G M R B K |
| W Y C G M R B K |
| . . . . . . . . |
| . . . . . . . . |
| . . . . . . . . |
| |
| (B = Blue, K = Black) |
| |
| Each bar should take up 1/8 of the sensor pixel array width. |
| When this is not possible, the bar size should be rounded |
| down to the nearest integer and the pattern can repeat |
| on the right side. |
| |
| Each bar's height must always take up the full sensor |
| pixel array height. |
| |
| Each pixel in this test pattern must be set to either |
| 0% intensity or 100% intensity. |
| </notes> |
| </value> |
| <value>COLOR_BARS_FADE_TO_GRAY |
| <notes> |
| The test pattern is similar to COLOR_BARS, except that |
| each bar should start at its specified color at the top, |
| and fade to gray at the bottom. |
| |
| Furthermore each bar is further subdivided into a left and |
| right half. The left half should have a smooth gradient, |
| and the right half should have a quantized gradient. |
| |
| In particular, the right half's should consist of blocks of the |
| same color for 1/16th active sensor pixel array width. |
| |
| The least significant bits in the quantized gradient should |
| be copied from the most significant bits of the smooth gradient. |
| |
| The height of each bar should always be a multiple of 128. |
| When this is not the case, the pattern should repeat at the bottom |
| of the image. |
| </notes> |
| </value> |
| <value>PN9 |
| <notes> |
| All pixel data is replaced by a pseudo-random sequence |
| generated from a PN9 512-bit sequence (typically implemented |
| in hardware with a linear feedback shift register). |
| |
| The generator should be reset at the beginning of each frame, |
| and thus each subsequent raw frame with this test pattern should |
| be exactly the same as the last. |
| </notes> |
| </value> |
| <value visibility="test" hal_version="3.6">BLACK |
| <notes> |
| All pixel data is replaced by 0% intensity (black) values. |
| |
| This test pattern is identical to SOLID_COLOR with a value of `[0, 0, 0, 0]` for |
| android.sensor.testPatternData. It is recommended that devices implement full |
| SOLID_COLOR support instead, but BLACK can be used to provide minimal support for a |
| test pattern suitable for privacy use cases. |
| </notes> |
| </value> |
| <value id="256">CUSTOM1 |
| <notes>The first custom test pattern. All custom patterns that are |
| available only on this camera device are at least this numeric |
| value. |
| |
| All of the custom test patterns will be static |
| (that is the raw image must not vary from frame to frame). |
| </notes> |
| </value> |
| </enum> |
| <description>When enabled, the sensor sends a test pattern instead of |
| doing a real exposure from the camera. |
| </description> |
| <range>android.sensor.availableTestPatternModes</range> |
| <details> |
| When a test pattern is enabled, all manual sensor controls specified |
| by android.sensor.* will be ignored. All other controls should |
| work as normal. |
| |
| For example, if manual flash is enabled, flash firing should still |
| occur (and that the test pattern remain unmodified, since the flash |
| would not actually affect it). |
| |
| Defaults to OFF. |
| </details> |
| <hal_details> |
| All test patterns are specified in the Bayer domain. |
| |
| The HAL may choose to substitute test patterns from the sensor |
| with test patterns from on-device memory. In that case, it should be |
| indistinguishable to the ISP whether the data came from the |
| sensor interconnect bus (such as CSI2) or memory. |
| |
| For privacy use cases, if the camera device: |
| |
| * supports SOLID_COLOR or BLACK test patterns, |
| * is a logical multi-camera, and |
| * lists testPatternMode as a physical request key, |
| |
| Each physical camera must support the same SOLID_COLOR and/or BLACK test patterns |
| as the logical camera. |
| </hal_details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.sensor.testPatternData" kind="controls"> |
| </clone> |
| <clone entry="android.sensor.testPatternMode" kind="controls"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="availableTestPatternModes" type="int32" visibility="public" optional="true" |
| type_notes="list of enums" container="array"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>List of sensor test pattern modes for android.sensor.testPatternMode |
| supported by this camera device. |
| </description> |
| <range>Any value listed in android.sensor.testPatternMode</range> |
| <details> |
| Defaults to OFF, and always includes OFF if defined. |
| </details> |
| <hal_details> |
| All custom modes must be >= CUSTOM1. |
| </hal_details> |
| </entry> |
| </static> |
| <dynamic> |
| <entry name="rollingShutterSkew" type="int64" visibility="public" hwlevel="limited"> |
| <description>Duration between the start of exposure for the first row of the image sensor, |
| and the start of exposure for one past the last row of the image sensor.</description> |
| <units>Nanoseconds</units> |
| <range> &gt;= 0 and &lt; |
| {@link android.hardware.camera2.params.StreamConfigurationMap#getOutputMinFrameDuration}.</range> |
| <details> |
| This is the exposure time skew between the first and `(last+1)` row exposure start times. The |
| first row and the last row are the first and last rows inside of the |
| android.sensor.info.activeArraySize. |
| |
| For typical camera sensors that use rolling shutters, this is also equivalent to the frame |
| readout time. |
| |
| If the image sensor is operating in a binned or cropped mode due to the current output |
| target resolutions, it's possible this skew is reported to be larger than the exposure |
| time, for example, since it is based on the full array even if a partial array is read |
| out. Be sure to scale the number to cover the section of the sensor actually being used |
| for the outputs you care about. So if your output covers N rows of the active array of |
| height H, scale this value by N/H to get the total skew for that viewport. |
| |
| *Note:* Prior to Android 11, this field was described as measuring duration from |
| first to last row of the image sensor, which is not equal to the frame readout time for a |
| rolling shutter sensor. Implementations generally reported the latter value, so to resolve |
| the inconsistency, the description has been updated to range from (first, last+1) row |
| exposure start, instead. |
| </details> |
| <hal_details> |
| The HAL must report `0` if the sensor is using global shutter, where all pixels begin |
| exposure at the same time. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| </dynamic> |
| <static> |
| <entry name="opticalBlackRegions" type="int32" visibility="public" optional="true" |
| container="array" typedef="rectangle"> |
| <array> |
| <size>4</size> |
| <size>num_regions</size> |
| </array> |
| <description>List of disjoint rectangles indicating the sensor |
| optically shielded black pixel regions. |
| </description> |
| <details> |
| In most camera sensors, the active array is surrounded by some |
| optically shielded pixel areas. By blocking light, these pixels |
| provides a reliable black reference for black level compensation |
| in active array region. |
| |
| This key provides a list of disjoint rectangles specifying the |
| regions of optically shielded (with metal shield) black pixel |
| regions if the camera device is capable of reading out these black |
| pixels in the output raw images. In comparison to the fixed black |
| level values reported by android.sensor.blackLevelPattern, this key |
| may provide a more accurate way for the application to calculate |
| black level of each captured raw images. |
| |
| When this key is reported, the android.sensor.dynamicBlackLevel and |
| android.sensor.dynamicWhiteLevel will also be reported. |
| </details> |
| <ndk_details> |
| The data representation is `int[4]`, which maps to `(left, top, width, height)`. |
| </ndk_details> |
| <hal_details> |
| This array contains (xmin, ymin, width, height). The (xmin, ymin) |
| must be &gt;= (0,0) and &lt;= |
| android.sensor.info.pixelArraySize. The (width, height) must be |
| &lt;= android.sensor.info.pixelArraySize. Each region must be |
| outside the region reported by |
| android.sensor.info.preCorrectionActiveArraySize. |
| |
| The HAL must report minimal number of disjoint regions for the |
| optically shielded back pixel regions. For example, if a region can |
| be covered by one rectangle, the HAL must not split this region into |
| multiple rectangles. |
| </hal_details> |
| </entry> |
| </static> |
| <dynamic> |
| <entry name="dynamicBlackLevel" type="float" visibility="public" |
| optional="true" type_notes="2x2 raw count block" container="array"> |
| <array> |
| <size>4</size> |
| </array> |
| <description> |
| A per-frame dynamic black level offset for each of the color filter |
| arrangement (CFA) mosaic channels. |
| </description> |
| <range>&gt;= 0 for each.</range> |
| <details> |
| Camera sensor black levels may vary dramatically for different |
| capture settings (e.g. android.sensor.sensitivity). The fixed black |
| level reported by android.sensor.blackLevelPattern may be too |
| inaccurate to represent the actual value on a per-frame basis. The |
| camera device internal pipeline relies on reliable black level values |
| to process the raw images appropriately. To get the best image |
| quality, the camera device may choose to estimate the per frame black |
| level values either based on optically shielded black regions |
| (android.sensor.opticalBlackRegions) or its internal model. |
| |
| This key reports the camera device estimated per-frame zero light |
| value for each of the CFA mosaic channels in the camera sensor. The |
| android.sensor.blackLevelPattern may only represent a coarse |
| approximation of the actual black level values. This value is the |
| black level used in camera device internal image processing pipeline |
| and generally more accurate than the fixed black level values. |
| However, since they are estimated values by the camera device, they |
| may not be as accurate as the black level values calculated from the |
| optical black pixels reported by android.sensor.opticalBlackRegions. |
| |
| The values are given in the same order as channels listed for the CFA |
| layout key (see android.sensor.info.colorFilterArrangement), i.e. the |
| nth value given corresponds to the black level offset for the nth |
| color channel listed in the CFA. |
| |
| For a MONOCHROME camera, all of the 2x2 channels must have the same values. |
| |
| This key will be available if android.sensor.opticalBlackRegions is available or the |
| camera device advertises this key via {@link |
| android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}. |
| </details> |
| <hal_details> |
| The values are given in row-column scan order, with the first value |
| corresponding to the element of the CFA in row=0, column=0. |
| </hal_details> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="dynamicWhiteLevel" type="int32" visibility="public" |
| optional="true" > |
| <description> |
| Maximum raw value output by sensor for this frame. |
| </description> |
| <range> &gt;= 0</range> |
| <details> |
| Since the android.sensor.blackLevelPattern may change for different |
| capture settings (e.g., android.sensor.sensitivity), the white |
| level will change accordingly. This key is similar to |
| android.sensor.info.whiteLevel, but specifies the camera device |
| estimated white level for each frame. |
| |
| This key will be available if android.sensor.opticalBlackRegions is |
| available or the camera device advertises this key via |
| {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureRequestKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}. |
| </details> |
| <hal_details> |
| The full bit depth of the sensor must be available in the raw data, |
| so the value for linear sensors should not be significantly lower |
| than maximum raw value supported, i.e. 2^(sensor bits per pixel). |
| </hal_details> |
| <tag id="RAW" /> |
| </entry> |
| </dynamic> |
| <static> |
| <entry name="opaqueRawSize" type="int32" visibility="system" container="array"> |
| <array> |
| <size>n</size> |
| <size>3</size> |
| </array> |
| <description>Size in bytes for all the listed opaque RAW buffer sizes</description> |
| <range>Must be large enough to fit the opaque RAW of corresponding size produced by |
| the camera</range> |
| <details> |
| This configurations are listed as `(width, height, size_in_bytes)` tuples. |
| This is used for sizing the gralloc buffers for opaque RAW buffers. |
| All RAW_OPAQUE output stream configuration listed in |
| android.scaler.availableStreamConfigurations will have a corresponding tuple in |
| this key. |
| </details> |
| <hal_details> |
| This key is added in legacy HAL3.4. |
| |
| For legacy HAL3.4 or above: devices advertising RAW_OPAQUE format output must list this |
| key. For legacy HAL3.3 or earlier devices: if RAW_OPAQUE ouput is advertised, camera |
| framework will derive this key by assuming each pixel takes two bytes and no padding bytes |
| between rows. |
| </hal_details> |
| </entry> |
| <entry name="opaqueRawSizeMaximumResolution" type="int32" visibility="system" |
| container="array" hal_version="3.6"> |
| <array> |
| <size>n</size> |
| <size>3</size> |
| </array> |
| <description>Size in bytes for all the listed opaque RAW buffer sizes when |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <range>Must be large enough to fit the opaque RAW of corresponding size produced by |
| the camera</range> |
| <details> |
| Refer to android.sensor.opaqueRawSize for details. |
| </details> |
| <hal_details> |
| Refer to android.sensor.opaqueRawSize for details. |
| </hal_details> |
| </entry> |
| </static> |
| <controls> |
| <entry name="pixelMode" type="byte" visibility="public" enum="true" |
| hal_version="3.6"> |
| <enum> |
| <value>DEFAULT |
| <notes> This is the default sensor pixel mode. |
| </notes> |
| </value> |
| <value>MAXIMUM_RESOLUTION |
| <notes> |
| In this mode, sensors typically do not bin pixels, as a result can offer larger |
| image sizes. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Switches sensor pixel mode between maximum resolution mode and default mode. |
| </description> |
| <details> |
| This key controls whether the camera sensor operates in |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} |
| mode or not. By default, all camera devices operate in |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} mode. |
| When operating in |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} mode, sensors |
| would typically perform pixel binning in order to improve low light |
| performance, noise reduction etc. However, in |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} |
| mode, sensors typically operate in unbinned mode allowing for a larger image size. |
| The stream configurations supported in |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} |
| mode are also different from those of |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_DEFAULT} mode. |
| They can be queried through |
| {@link android.hardware.camera2.CameraCharacteristics#get} with |
| {@link CameraCharacteristics#SCALER_STREAM_CONFIGURATION_MAP_MAXIMUM_RESOLUTION}. |
| Unless reported by both |
| {@link android.hardware.camera2.params.StreamConfigurationMap}s, the outputs from |
| `android.scaler.streamConfigurationMapMaximumResolution` and |
| `android.scaler.streamConfigurationMap` |
| must not be mixed in the same CaptureRequest. In other words, these outputs are |
| exclusive to each other. |
| This key does not need to be set for reprocess requests. |
| This key will be be present on devices supporting the |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability. It may also be present on devices which do not support the aforementioned |
| capability. In that case: |
| |
| * The mandatory stream combinations listed in |
| android.scaler.mandatoryMaximumResolutionStreamCombinations |
| would not apply. |
| |
| * The bayer pattern of {@code RAW} streams when |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION} |
| is selected will be the one listed in android.sensor.info.binningFactor. |
| |
| * The following keys will always be present: |
| |
| * android.scaler.streamConfigurationMapMaximumResolution |
| * android.sensor.info.activeArraySizeMaximumResolution |
| * android.sensor.info.pixelArraySizeMaximumResolution |
| * android.sensor.info.preCorrectionActiveArraySizeMaximumResolution |
| </details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.sensor.pixelMode" kind="controls"> |
| </clone> |
| <entry name="rawBinningFactorUsed" type="byte" visibility="public" enum="true" |
| typedef="boolean" hal_version="3.6"> |
| <enum> |
| <value>TRUE |
| <notes> The `RAW` targets in this capture have android.sensor.info.binningFactor as the |
| bayer pattern. |
| </notes> |
| </value> |
| <value>FALSE |
| <notes> The `RAW` targets have a regular bayer pattern in this capture. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Whether `RAW` images requested have their bayer pattern as described by |
| android.sensor.info.binningFactor. |
| </description> |
| <details> |
| This key will only be present in devices advertising the |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability which also advertise `REMOSAIC_REPROCESSING` capability. On all other devices |
| RAW targets will have a regular bayer pattern. |
| </details> |
| </entry> |
| </dynamic> |
| <static> |
| <entry name="readoutTimestamp" type="byte" visibility="java_public" |
| enum="true" hwlevel="legacy" hal_version="3.8"> |
| <enum> |
| <value>NOT_SUPPORTED |
| <notes>This camera device doesn't support readout timestamp and onReadoutStarted |
| callback. |
| </notes> |
| </value> |
| <value>HARDWARE |
| <notes>This camera device supports the onReadoutStarted callback as well as outputting |
| readout timestamps. The readout timestamp is generated by the camera hardware and it |
| has the same accuracy and timing characteristics of the start-of-exposure time. |
| </notes> |
| </value> |
| </enum> |
| <description>Whether or not the camera device supports readout timestamp and |
| {@code onReadoutStarted} callback.</description> |
| <details> |
| If this tag is {@code HARDWARE}, the camera device calls |
| {@link CameraCaptureSession.CaptureCallback#onReadoutStarted} in addition to the |
| {@link CameraCaptureSession.CaptureCallback#onCaptureStarted} callback for each capture. |
| The timestamp passed into the callback is the start of camera image readout rather than |
| the start of the exposure. The timestamp source of |
| {@link CameraCaptureSession.CaptureCallback#onReadoutStarted} is the same as that of |
| {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}. |
| |
| In addition, the application can switch an output surface's timestamp from start of |
| exposure to start of readout by calling |
| {@link android.hardware.camera2.params.OutputConfiguration#setReadoutTimestampEnabled}. |
| |
| The readout timestamp is beneficial for video recording, because the encoder favors |
| uniform timestamps, and the readout timestamps better reflect the cadence camera sensors |
| output data. |
| |
| Note that the camera device produces the start-of-exposure and start-of-readout callbacks |
| together. As a result, the {@link CameraCaptureSession.CaptureCallback#onReadoutStarted} |
| is called right after {@link CameraCaptureSession.CaptureCallback#onCaptureStarted}. The |
| difference in start-of-readout and start-of-exposure is the sensor exposure time, plus |
| certain constant offset. The offset is usually due to camera sensor level crop, and it is |
| generally constant over time for the same set of output resolutions and capture settings. |
| </details> |
| <hal_details> |
| This property may be set by HAL layers that implement the AIDL interface. If not set |
| camera framework will implicitly set it to HARDWARE for all AIDL HALs. Camera framework |
| will force set this to NOT_SUPPORTED for all HIDL HALs, regerdless of whether the HAL |
| provided a value or not. |
| </hal_details> |
| </entry> |
| </static> |
| </section> |
| <section name="shading"> |
| <controls> |
| <entry name="mode" type="byte" visibility="public" enum="true" hwlevel="full"> |
| <enum> |
| <value>OFF |
| <notes>No lens shading correction is applied.</notes></value> |
| <value>FAST |
| <notes>Apply lens shading corrections, without slowing |
| frame rate relative to sensor raw output</notes></value> |
| <value>HIGH_QUALITY |
| <notes>Apply high-quality lens shading correction, at the |
| cost of possibly reduced frame rate.</notes></value> |
| </enum> |
| <description>Quality of lens shading correction applied |
| to the image data.</description> |
| <range>android.shading.availableModes</range> |
| <details> |
| When set to OFF mode, no lens shading correction will be applied by the |
| camera device, and an identity lens shading map data will be provided |
| if `android.statistics.lensShadingMapMode == ON`. For example, for lens |
| shading map with size of `[ 4, 3 ]`, |
| the output android.statistics.lensShadingCorrectionMap for this case will be an identity |
| map shown below: |
| |
| [ 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, |
| 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, |
| 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, |
| 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, |
| 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, |
| 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 ] |
| |
| When set to other modes, lens shading correction will be applied by the camera |
| device. Applications can request lens shading map data by setting |
| android.statistics.lensShadingMapMode to ON, and then the camera device will provide lens |
| shading map data in android.statistics.lensShadingCorrectionMap; the returned shading map |
| data will be the one applied by the camera device for this capture request. |
| |
| The shading map data may depend on the auto-exposure (AE) and AWB statistics, therefore |
| the reliability of the map data may be affected by the AE and AWB algorithms. When AE and |
| AWB are in AUTO modes(android.control.aeMode `!=` OFF and android.control.awbMode `!=` |
| OFF), to get best results, it is recommended that the applications wait for the AE and AWB |
| to be converged before using the returned shading map data. |
| </details> |
| </entry> |
| <entry name="strength" type="byte"> |
| <description>Control the amount of shading correction |
| applied to the images</description> |
| <units>unitless: 1-10; 10 is full shading |
| compensation</units> |
| <tag id="FUTURE" /> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.shading.mode" kind="controls"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="availableModes" type="byte" visibility="public" |
| type_notes="List of enums (android.shading.mode)." container="array" |
| typedef="enumList" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of lens shading modes for android.shading.mode that are supported by this camera device. |
| </description> |
| <range>Any value listed in android.shading.mode</range> |
| <details> |
| This list contains lens shading modes that can be set for the camera device. |
| Camera devices that support the MANUAL_POST_PROCESSING capability will always |
| list OFF and FAST mode. This includes all FULL level devices. |
| LEGACY devices will always only support FAST mode. |
| </details> |
| <hal_details> |
| HAL must support both FAST and HIGH_QUALITY if lens shading correction control is |
| available on the camera device, but the underlying implementation can be the same for |
| both modes. That is, if the highest quality implementation on the camera device does not |
| slow down capture rate, then FAST and HIGH_QUALITY will generate the same output. |
| </hal_details> |
| </entry> |
| </static> |
| </section> |
| <section name="statistics"> |
| <controls> |
| <entry name="faceDetectMode" type="byte" visibility="public" enum="true" |
| hwlevel="legacy"> |
| <enum> |
| <value>OFF |
| <notes>Do not include face detection statistics in capture |
| results.</notes></value> |
| <value optional="true">SIMPLE |
| <notes>Return face rectangle and confidence values only. |
| </notes></value> |
| <value optional="true">FULL |
| <notes>Return all face |
| metadata. |
| |
| In this mode, face rectangles, scores, landmarks, and face IDs are all valid. |
| </notes></value> |
| </enum> |
| <description>Operating mode for the face detector |
| unit.</description> |
| <range>android.statistics.info.availableFaceDetectModes</range> |
| <details>Whether face detection is enabled, and whether it |
| should output just the basic fields or the full set of |
| fields.</details> |
| <hal_details> |
| SIMPLE mode must fill in android.statistics.faceRectangles and |
| android.statistics.faceScores. |
| FULL mode must also fill in android.statistics.faceIds, and |
| android.statistics.faceLandmarks. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="histogramMode" type="byte" enum="true" typedef="boolean"> |
| <enum> |
| <value>OFF</value> |
| <value>ON</value> |
| </enum> |
| <description>Operating mode for histogram |
| generation</description> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="sharpnessMapMode" type="byte" enum="true" typedef="boolean"> |
| <enum> |
| <value>OFF</value> |
| <value>ON</value> |
| </enum> |
| <description>Operating mode for sharpness map |
| generation</description> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="hotPixelMapMode" type="byte" visibility="public" enum="true" |
| typedef="boolean"> |
| <enum> |
| <value>OFF |
| <notes>Hot pixel map production is disabled. |
| </notes></value> |
| <value>ON |
| <notes>Hot pixel map production is enabled. |
| </notes></value> |
| </enum> |
| <description> |
| Operating mode for hot pixel map generation. |
| </description> |
| <range>android.statistics.info.availableHotPixelMapModes</range> |
| <details> |
| If set to `true`, a hot pixel map is returned in android.statistics.hotPixelMap. |
| If set to `false`, no hot pixel map will be returned. |
| </details> |
| <tag id="V1" /> |
| <tag id="RAW" /> |
| </entry> |
| </controls> |
| <static> |
| <namespace name="info"> |
| <entry name="availableFaceDetectModes" type="byte" |
| visibility="public" |
| type_notes="List of enums from android.statistics.faceDetectMode" |
| container="array" |
| typedef="enumList" |
| hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>List of face detection modes for android.statistics.faceDetectMode that are |
| supported by this camera device. |
| </description> |
| <range>Any value listed in android.statistics.faceDetectMode</range> |
| <details>OFF is always supported. |
| </details> |
| </entry> |
| <entry name="histogramBucketCount" type="int32"> |
| <description>Number of histogram buckets |
| supported</description> |
| <range>&gt;= 64</range> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="maxFaceCount" type="int32" visibility="public" hwlevel="legacy"> |
| <description>The maximum number of simultaneously detectable |
| faces.</description> |
| <range>0 for cameras without available face detection; otherwise: |
| `>=4` for LIMITED or FULL hwlevel devices or |
| `>0` for LEGACY devices.</range> |
| <tag id="BC" /> |
| </entry> |
| <entry name="maxHistogramCount" type="int32"> |
| <description>Maximum value possible for a histogram |
| bucket</description> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="maxSharpnessMapValue" type="int32"> |
| <description>Maximum value possible for a sharpness map |
| region.</description> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="sharpnessMapSize" type="int32" |
| type_notes="width x height" container="array" typedef="size"> |
| <array> |
| <size>2</size> |
| </array> |
| <description>Dimensions of the sharpness |
| map</description> |
| <range>Must be at least 32 x 32</range> |
| <tag id="FUTURE" /> |
| </entry> |
| <entry name="availableHotPixelMapModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="boolean"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of hot pixel map output modes for android.statistics.hotPixelMapMode that are |
| supported by this camera device. |
| </description> |
| <range>Any value listed in android.statistics.hotPixelMapMode</range> |
| <details> |
| If no hotpixel map output is available for this camera device, this will contain only |
| `false`. |
| |
| ON is always supported on devices with the RAW capability. |
| </details> |
| <tag id="V1" /> |
| <tag id="RAW" /> |
| </entry> |
| <entry name="availableLensShadingMapModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="enumList"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of lens shading map output modes for android.statistics.lensShadingMapMode that |
| are supported by this camera device. |
| </description> |
| <range>Any value listed in android.statistics.lensShadingMapMode</range> |
| <details> |
| If no lens shading map output is available for this camera device, this key will |
| contain only OFF. |
| |
| ON is always supported on devices with the RAW capability. |
| LEGACY mode devices will always only support OFF. |
| </details> |
| </entry> |
| <entry name="availableOisDataModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="enumList" hal_version="3.3"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of OIS data output modes for android.statistics.oisDataMode that |
| are supported by this camera device. |
| </description> |
| <range>Any value listed in android.statistics.oisDataMode</range> |
| <details> |
| If no OIS data output is available for this camera device, this key will |
| contain only OFF. |
| </details> |
| </entry> |
| </namespace> |
| </static> |
| <dynamic> |
| <clone entry="android.statistics.faceDetectMode" |
| kind="controls"></clone> |
| <entry name="faceIds" type="int32" visibility="ndk_public" |
| container="array" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>List of unique IDs for detected faces.</description> |
| <details> |
| Each detected face is given a unique ID that is valid for as long as the face is visible |
| to the camera device. A face that leaves the field of view and later returns may be |
| assigned a new ID. |
| |
| Only available if android.statistics.faceDetectMode == FULL</details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="faceLandmarks" type="int32" visibility="ndk_public" |
| type_notes="(leftEyeX, leftEyeY, rightEyeX, rightEyeY, mouthX, mouthY)" |
| container="array" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| <size>6</size> |
| </array> |
| <description>List of landmarks for detected |
| faces.</description> |
| <details> |
| For devices not supporting android.distortionCorrection.mode control, the coordinate |
| system always follows that of android.sensor.info.activeArraySize, with `(0, 0)` being |
| the top-left pixel of the active array. |
| |
| For devices supporting android.distortionCorrection.mode control, the coordinate |
| system depends on the mode being set. |
| When the distortion correction mode is OFF, the coordinate system follows |
| android.sensor.info.preCorrectionActiveArraySize, with |
| `(0, 0)` being the top-left pixel of the pre-correction active array. |
| When the distortion correction mode is not OFF, the coordinate system follows |
| android.sensor.info.activeArraySize, with |
| `(0, 0)` being the top-left pixel of the active array. |
| |
| Only available if android.statistics.faceDetectMode == FULL. |
| |
| Starting from API level 30, the coordinate system of activeArraySize or |
| preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not |
| pre-zoomRatio field of view. This means that if the relative position of faces and |
| the camera device doesn't change, when zooming in by increasing |
| android.control.zoomRatio, the face landmarks move farther away from the center of the |
| activeArray or preCorrectionActiveArray. If android.control.zoomRatio is set to 1.0 |
| (default), the face landmarks coordinates won't change as android.scaler.cropRegion |
| changes. See android.control.zoomRatio for details. Whether to use activeArraySize or |
| preCorrectionActiveArraySize still depends on distortion correction mode. |
| </details> |
| <hal_details> |
| HAL must always report face landmarks in the coordinate system of pre-correction |
| active array. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="faceRectangles" type="int32" visibility="ndk_public" |
| type_notes="(xmin, ymin, xmax, ymax). (0,0) is top-left of active pixel area" |
| container="array" typedef="rectangle" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <description>List of the bounding rectangles for detected |
| faces.</description> |
| <details> |
| For devices not supporting android.distortionCorrection.mode control, the coordinate |
| system always follows that of android.sensor.info.activeArraySize, with `(0, 0)` being |
| the top-left pixel of the active array. |
| |
| For devices supporting android.distortionCorrection.mode control, the coordinate |
| system depends on the mode being set. |
| When the distortion correction mode is OFF, the coordinate system follows |
| android.sensor.info.preCorrectionActiveArraySize, with |
| `(0, 0)` being the top-left pixel of the pre-correction active array. |
| When the distortion correction mode is not OFF, the coordinate system follows |
| android.sensor.info.activeArraySize, with |
| `(0, 0)` being the top-left pixel of the active array. |
| |
| Only available if android.statistics.faceDetectMode != OFF. |
| |
| Starting from API level 30, the coordinate system of activeArraySize or |
| preCorrectionActiveArraySize is used to represent post-zoomRatio field of view, not |
| pre-zoomRatio field of view. This means that if the relative position of faces and |
| the camera device doesn't change, when zooming in by increasing |
| android.control.zoomRatio, the face rectangles grow larger and move farther away from |
| the center of the activeArray or preCorrectionActiveArray. If android.control.zoomRatio |
| is set to 1.0 (default), the face rectangles won't change as android.scaler.cropRegion |
| changes. See android.control.zoomRatio for details. Whether to use activeArraySize or |
| preCorrectionActiveArraySize still depends on distortion correction mode. |
| </details> |
| <ndk_details> |
| The data representation is `int[4]`, which maps to `(left, top, right, bottom)`. |
| </ndk_details> |
| <hal_details> |
| HAL must always report face rectangles in the coordinate system of pre-correction |
| active array. |
| </hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="faceScores" type="byte" visibility="ndk_public" |
| container="array" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>List of the face confidence scores for |
| detected faces</description> |
| <range>1-100</range> |
| <details>Only available if android.statistics.faceDetectMode != OFF. |
| </details> |
| <hal_details> |
| The value should be meaningful (for example, setting 100 at |
| all times is illegal).</hal_details> |
| <tag id="BC" /> |
| </entry> |
| <entry name="faces" type="int32" visibility="java_public" synthetic="true" |
| container="array" typedef="face" hwlevel="legacy"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>List of the faces detected through camera face detection |
| in this capture.</description> |
| <details> |
| Only available if android.statistics.faceDetectMode `!=` OFF. |
| </details> |
| </entry> |
| <entry name="histogram" type="int32" |
| type_notes="count of pixels for each color channel that fall into each histogram bucket, scaled to be between 0 and maxHistogramCount" |
| container="array"> |
| <array> |
| <size>n</size> |
| <size>3</size> |
| </array> |
| <description>A 3-channel histogram based on the raw |
| sensor data</description> |
| <details>The k'th bucket (0-based) covers the input range |
| (with w = android.sensor.info.whiteLevel) of [ k * w/N, |
| (k + 1) * w / N ). If only a monochrome sharpness map is |
| supported, all channels should have the same data</details> |
| <tag id="FUTURE" /> |
| </entry> |
| <clone entry="android.statistics.histogramMode" |
| kind="controls"></clone> |
| <entry name="sharpnessMap" type="int32" |
| type_notes="estimated sharpness for each region of the input image. Normalized to be between 0 and maxSharpnessMapValue. Higher values mean sharper (better focused)" |
| container="array"> |
| <array> |
| <size>n</size> |
| <size>m</size> |
| <size>3</size> |
| </array> |
| <description>A 3-channel sharpness map, based on the raw |
| sensor data</description> |
| <details>If only a monochrome sharpness map is supported, |
| all channels should have the same data</details> |
| <tag id="FUTURE" /> |
| </entry> |
| <clone entry="android.statistics.sharpnessMapMode" |
| kind="controls"></clone> |
| <entry name="lensShadingCorrectionMap" type="byte" visibility="java_public" |
| typedef="lensShadingMap" hwlevel="full"> |
| <description>The shading map is a low-resolution floating-point map |
| that lists the coefficients used to correct for vignetting, for each |
| Bayer color channel.</description> |
| <range>Each gain factor is &gt;= 1</range> |
| <details> |
| The map provided here is the same map that is used by the camera device to |
| correct both color shading and vignetting for output non-RAW images. |
| |
| When there is no lens shading correction applied to RAW |
| output images (android.sensor.info.lensShadingApplied `==` |
| false), this map is the complete lens shading correction |
| map; when there is some lens shading correction applied to |
| the RAW output image (android.sensor.info.lensShadingApplied |
| `==` true), this map reports the remaining lens shading |
| correction map that needs to be applied to get shading |
| corrected images that match the camera device's output for |
| non-RAW formats. |
| |
| Therefore, whatever the value of lensShadingApplied is, the lens |
| shading map should always be applied to RAW images if the goal is to |
| match the shading appearance of processed (non-RAW) images. |
| |
| For a complete shading correction map, the least shaded |
| section of the image will have a gain factor of 1; all |
| other sections will have gains above 1. |
| |
| When android.colorCorrection.mode = TRANSFORM_MATRIX, the map |
| will take into account the colorCorrection settings. |
| |
| The shading map is for the entire active pixel array, and is not |
| affected by the crop region specified in the request. Each shading map |
| entry is the value of the shading compensation map over a specific |
| pixel on the sensor. Specifically, with a (N x M) resolution shading |
| map, and an active pixel array size (W x H), shading map entry |
| (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at |
| pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels. |
| The map is assumed to be bilinearly interpolated between the sample points. |
| |
| The channel order is [R, Geven, Godd, B], where Geven is the green |
| channel for the even rows of a Bayer pattern, and Godd is the odd rows. |
| The shading map is stored in a fully interleaved format. |
| |
| The shading map will generally have on the order of 30-40 rows and columns, |
| and will be smaller than 64x64. |
| |
| As an example, given a very small map defined as: |
| |
| width,height = [ 4, 3 ] |
| values = |
| [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2, |
| 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3, |
| 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0, |
| 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2, |
| 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2, |
| 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ] |
| |
| The low-resolution scaling map images for each channel are |
| (displayed using nearest-neighbor interpolation): |
| |
|  |
|  |
|  |
|  |
| |
| As a visualization only, inverting the full-color map to recover an |
| image of a gray wall (using bicubic interpolation for visual quality) as captured by the sensor gives: |
| |
|  |
| |
| For a MONOCHROME camera, all of the 2x2 channels must have the same values. An example |
| shading map for such a camera is defined as: |
| |
| android.lens.info.shadingMapSize = [ 4, 3 ] |
| android.statistics.lensShadingMap = |
| [ 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2, |
| 1.1, 1.1, 1.1, 1.1, 1.3, 1.3, 1.3, 1.3, |
| 1.2, 1.2, 1.2, 1.2, 1.1, 1.1, 1.1, 1.1, |
| 1.0, 1.0, 1.0, 1.0, 1.2, 1.2, 1.2, 1.2, |
| 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2, |
| 1.2, 1.2, 1.2, 1.2, 1.3, 1.3, 1.3, 1.3 ] |
| |
| </details> |
| </entry> |
| <entry name="lensShadingMap" type="float" visibility="ndk_public" |
| type_notes="2D array of float gain factors per channel to correct lens shading" |
| container="array" hwlevel="full"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| <size>m</size> |
| </array> |
| <description>The shading map is a low-resolution floating-point map |
| that lists the coefficients used to correct for vignetting and color shading, |
| for each Bayer color channel of RAW image data.</description> |
| <range>Each gain factor is &gt;= 1</range> |
| <details> |
| The map provided here is the same map that is used by the camera device to |
| correct both color shading and vignetting for output non-RAW images. |
| |
| When there is no lens shading correction applied to RAW |
| output images (android.sensor.info.lensShadingApplied `==` |
| false), this map is the complete lens shading correction |
| map; when there is some lens shading correction applied to |
| the RAW output image (android.sensor.info.lensShadingApplied |
| `==` true), this map reports the remaining lens shading |
| correction map that needs to be applied to get shading |
| corrected images that match the camera device's output for |
| non-RAW formats. |
| |
| For a complete shading correction map, the least shaded |
| section of the image will have a gain factor of 1; all |
| other sections will have gains above 1. |
| |
| When android.colorCorrection.mode = TRANSFORM_MATRIX, the map |
| will take into account the colorCorrection settings. |
| |
| The shading map is for the entire active pixel array, and is not |
| affected by the crop region specified in the request. Each shading map |
| entry is the value of the shading compensation map over a specific |
| pixel on the sensor. Specifically, with a (N x M) resolution shading |
| map, and an active pixel array size (W x H), shading map entry |
| (x,y) ϵ (0 ... N-1, 0 ... M-1) is the value of the shading map at |
| pixel ( ((W-1)/(N-1)) * x, ((H-1)/(M-1)) * y) for the four color channels. |
| The map is assumed to be bilinearly interpolated between the sample points. |
| |
| For a Bayer camera, the channel order is [R, Geven, Godd, B], where Geven is |
| the green channel for the even rows of a Bayer pattern, and Godd is the odd rows. |
| The shading map is stored in a fully interleaved format, and its size |
| is provided in the camera static metadata by android.lens.info.shadingMapSize. |
| |
| The shading map will generally have on the order of 30-40 rows and columns, |
| and will be smaller than 64x64. |
| |
| As an example, given a very small map for a Bayer camera defined as: |
| |
| android.lens.info.shadingMapSize = [ 4, 3 ] |
| android.statistics.lensShadingMap = |
| [ 1.3, 1.2, 1.15, 1.2, 1.2, 1.2, 1.15, 1.2, |
| 1.1, 1.2, 1.2, 1.2, 1.3, 1.2, 1.3, 1.3, |
| 1.2, 1.2, 1.25, 1.1, 1.1, 1.1, 1.1, 1.0, |
| 1.0, 1.0, 1.0, 1.0, 1.2, 1.3, 1.25, 1.2, |
| 1.3, 1.2, 1.2, 1.3, 1.2, 1.15, 1.1, 1.2, |
| 1.2, 1.1, 1.0, 1.2, 1.3, 1.15, 1.2, 1.3 ] |
| |
| The low-resolution scaling map images for each channel are |
| (displayed using nearest-neighbor interpolation): |
| |
|  |
|  |
|  |
|  |
| |
| As a visualization only, inverting the full-color map to recover an |
| image of a gray wall (using bicubic interpolation for visual quality) |
| as captured by the sensor gives: |
| |
|  |
| |
| For a MONOCHROME camera, all of the 2x2 channels must have the same values. An example |
| shading map for such a camera is defined as: |
| |
| android.lens.info.shadingMapSize = [ 4, 3 ] |
| android.statistics.lensShadingMap = |
| [ 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2, |
| 1.1, 1.1, 1.1, 1.1, 1.3, 1.3, 1.3, 1.3, |
| 1.2, 1.2, 1.2, 1.2, 1.1, 1.1, 1.1, 1.1, |
| 1.0, 1.0, 1.0, 1.0, 1.2, 1.2, 1.2, 1.2, |
| 1.3, 1.3, 1.3, 1.3, 1.2, 1.2, 1.2, 1.2, |
| 1.2, 1.2, 1.2, 1.2, 1.3, 1.3, 1.3, 1.3 ] |
| |
| Note that the RAW image data might be subject to lens shading |
| correction not reported on this map. Query |
| android.sensor.info.lensShadingApplied to see if RAW image data has subject |
| to lens shading correction. If android.sensor.info.lensShadingApplied |
| is TRUE, the RAW image data is subject to partial or full lens shading |
| correction. In the case full lens shading correction is applied to RAW |
| images, the gain factor map reported in this key will contain all 1.0 gains. |
| In other words, the map reported in this key is the remaining lens shading |
| that needs to be applied on the RAW image to get images without lens shading |
| artifacts. See android.request.maxNumOutputRaw for a list of RAW image |
| formats. |
| </details> |
| <hal_details> |
| The lens shading map calculation may depend on exposure and white balance statistics. |
| When AE and AWB are in AUTO modes |
| (android.control.aeMode `!=` OFF and android.control.awbMode `!=` OFF), the HAL |
| may have all the information it need to generate most accurate lens shading map. When |
| AE or AWB are in manual mode |
| (android.control.aeMode `==` OFF or android.control.awbMode `==` OFF), the shading map |
| may be adversely impacted by manual exposure or white balance parameters. To avoid |
| generating unreliable shading map data, the HAL may choose to lock the shading map with |
| the latest known good map generated when the AE and AWB are in AUTO modes. |
| </hal_details> |
| </entry> |
| <entry name="predictedColorGains" type="float" |
| visibility="hidden" |
| deprecated="true" |
| optional="true" |
| type_notes="A 1D array of floats for 4 color channel gains" |
| container="array"> |
| <array> |
| <size>4</size> |
| </array> |
| <description>The best-fit color channel gains calculated |
| by the camera device's statistics units for the current output frame. |
| </description> |
| <deprecation_description> |
| Never fully implemented or specified; do not use |
| </deprecation_description> |
| <details> |
| This may be different than the gains used for this frame, |
| since statistics processing on data from a new frame |
| typically completes after the transform has already been |
| applied to that frame. |
| |
| The 4 channel gains are defined in Bayer domain, |
| see android.colorCorrection.gains for details. |
| |
| This value should always be calculated by the auto-white balance (AWB) block, |
| regardless of the android.control.* current values. |
| </details> |
| </entry> |
| <entry name="predictedColorTransform" type="rational" |
| visibility="hidden" |
| deprecated="true" |
| optional="true" |
| type_notes="3x3 rational matrix in row-major order" |
| container="array"> |
| <array> |
| <size>3</size> |
| <size>3</size> |
| </array> |
| <description>The best-fit color transform matrix estimate |
| calculated by the camera device's statistics units for the current |
| output frame.</description> |
| <deprecation_description> |
| Never fully implemented or specified; do not use |
| </deprecation_description> |
| <details>The camera device will provide the estimate from its |
| statistics unit on the white balance transforms to use |
| for the next frame. These are the values the camera device believes |
| are the best fit for the current output frame. This may |
| be different than the transform used for this frame, since |
| statistics processing on data from a new frame typically |
| completes after the transform has already been applied to |
| that frame. |
| |
| These estimates must be provided for all frames, even if |
| capture settings and color transforms are set by the application. |
| |
| This value should always be calculated by the auto-white balance (AWB) block, |
| regardless of the android.control.* current values. |
| </details> |
| </entry> |
| <entry name="sceneFlicker" type="byte" visibility="public" enum="true" |
| hwlevel="full"> |
| <enum> |
| <value>NONE |
| <notes>The camera device does not detect any flickering illumination |
| in the current scene.</notes></value> |
| <value>50HZ |
| <notes>The camera device detects illumination flickering at 50Hz |
| in the current scene.</notes></value> |
| <value>60HZ |
| <notes>The camera device detects illumination flickering at 60Hz |
| in the current scene.</notes></value> |
| </enum> |
| <description>The camera device estimated scene illumination lighting |
| frequency.</description> |
| <details> |
| Many light sources, such as most fluorescent lights, flicker at a rate |
| that depends on the local utility power standards. This flicker must be |
| accounted for by auto-exposure routines to avoid artifacts in captured images. |
| The camera device uses this entry to tell the application what the scene |
| illuminant frequency is. |
| |
| When manual exposure control is enabled |
| (`android.control.aeMode == OFF` or `android.control.mode == |
| OFF`), the android.control.aeAntibandingMode doesn't perform |
| antibanding, and the application can ensure it selects |
| exposure times that do not cause banding issues by looking |
| into this metadata field. See |
| android.control.aeAntibandingMode for more details. |
| |
| Reports NONE if there doesn't appear to be flickering illumination. |
| </details> |
| </entry> |
| <clone entry="android.statistics.hotPixelMapMode" kind="controls"> |
| </clone> |
| <entry name="hotPixelMap" type="int32" visibility="public" |
| type_notes="list of coordinates based on android.sensor.pixelArraySize" |
| container="array" typedef="point"> |
| <array> |
| <size>2</size> |
| <size>n</size> |
| </array> |
| <description> |
| List of `(x, y)` coordinates of hot/defective pixels on the sensor. |
| </description> |
| <range> |
| n <= number of pixels on the sensor. |
| The `(x, y)` coordinates must be bounded by |
| android.sensor.info.pixelArraySize. |
| </range> |
| <details> |
| A coordinate `(x, y)` must lie between `(0, 0)`, and |
| `(width - 1, height - 1)` (inclusive), which are the top-left and |
| bottom-right of the pixel array, respectively. The width and |
| height dimensions are given in android.sensor.info.pixelArraySize. |
| This may include hot pixels that lie outside of the active array |
| bounds given by android.sensor.info.activeArraySize. |
| |
| For camera devices with the |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability or devices where |
| {@link CameraCharacteristics#getAvailableCaptureRequestKeys} |
| lists android.sensor.pixelMode, |
| android.sensor.info.pixelArraySizeMaximumResolution will be used as the |
| pixel array size if the corresponding request sets android.sensor.pixelMode to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <hal_details> |
| A hotpixel map contains the coordinates of pixels on the camera |
| sensor that do report valid values (usually due to defects in |
| the camera sensor). This includes pixels that are stuck at certain |
| values, or have a response that does not accurately encode the |
| incoming light from the scene. |
| |
| To avoid performance issues, there should be significantly fewer hot |
| pixels than actual pixels on the camera sensor. |
| </hal_details> |
| <tag id="V1" /> |
| <tag id="RAW" /> |
| </entry> |
| </dynamic> |
| <controls> |
| <entry name="lensShadingMapMode" type="byte" visibility="public" enum="true" hwlevel="full"> |
| <enum> |
| <value>OFF |
| <notes>Do not include a lens shading map in the capture result.</notes></value> |
| <value>ON |
| <notes>Include a lens shading map in the capture result.</notes></value> |
| </enum> |
| <description>Whether the camera device will output the lens |
| shading map in output result metadata.</description> |
| <range>android.statistics.info.availableLensShadingMapModes</range> |
| <details>When set to ON, |
| android.statistics.lensShadingMap will be provided in |
| the output result metadata. |
| |
| ON is always supported on devices with the RAW capability. |
| </details> |
| <tag id="RAW" /> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.statistics.lensShadingMapMode" kind="controls"> |
| </clone> |
| </dynamic> |
| <controls> |
| <entry name="oisDataMode" type="byte" visibility="public" enum="true" hal_version="3.3"> |
| <enum> |
| <value>OFF |
| <notes>Do not include OIS data in the capture result.</notes></value> |
| <value>ON |
| <notes>Include OIS data in the capture result.</notes> |
| <sdk_notes>android.statistics.oisSamples provides OIS sample data in the |
| output result metadata. |
| </sdk_notes> |
| <ndk_notes>android.statistics.oisTimestamps, android.statistics.oisXShifts, |
| and android.statistics.oisYShifts provide OIS data in the output result metadata. |
| </ndk_notes> |
| </value> |
| </enum> |
| <description>A control for selecting whether optical stabilization (OIS) position |
| information is included in output result metadata.</description> |
| <range>android.statistics.info.availableOisDataModes</range> |
| <details> |
| Since optical image stabilization generally involves motion much faster than the duration |
| of individual image exposure, multiple OIS samples can be included for a single capture |
| result. For example, if the OIS reporting operates at 200 Hz, a typical camera operating |
| at 30fps may have 6-7 OIS samples per capture result. This information can be combined |
| with the rolling shutter skew to account for lens motion during image exposure in |
| post-processing algorithms. |
| </details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.statistics.oisDataMode" kind="controls"> |
| </clone> |
| <entry name="oisTimestamps" type="int64" visibility="ndk_public" container="array" hal_version="3.3"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| An array of timestamps of OIS samples, in nanoseconds. |
| </description> |
| <units>nanoseconds</units> |
| <details> |
| The array contains the timestamps of OIS samples. The timestamps are in the same |
| timebase as and comparable to android.sensor.timestamp. |
| </details> |
| </entry> |
| <entry name="oisXShifts" type="float" visibility="ndk_public" container="array" hal_version="3.3"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| An array of shifts of OIS samples, in x direction. |
| </description> |
| <units>Pixels in active array.</units> |
| <details> |
| The array contains the amount of shifts in x direction, in pixels, based on OIS samples. |
| A positive value is a shift from left to right in the pre-correction active array |
| coordinate system. For example, if the optical center is (1000, 500) in pre-correction |
| active array coordinates, a shift of (3, 0) puts the new optical center at (1003, 500). |
| |
| The number of shifts must match the number of timestamps in |
| android.statistics.oisTimestamps. |
| |
| The OIS samples are not affected by whether lens distortion correction is enabled (on |
| supporting devices). They are always reported in pre-correction active array coordinates, |
| since the scaling of OIS shifts would depend on the specific spot on the sensor the shift |
| is needed. |
| </details> |
| </entry> |
| <entry name="oisYShifts" type="float" visibility="ndk_public" container="array" hal_version="3.3"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| An array of shifts of OIS samples, in y direction. |
| </description> |
| <units>Pixels in active array.</units> |
| <details> |
| The array contains the amount of shifts in y direction, in pixels, based on OIS samples. |
| A positive value is a shift from top to bottom in pre-correction active array coordinate |
| system. For example, if the optical center is (1000, 500) in active array coordinates, a |
| shift of (0, 5) puts the new optical center at (1000, 505). |
| |
| The number of shifts must match the number of timestamps in |
| android.statistics.oisTimestamps. |
| |
| The OIS samples are not affected by whether lens distortion correction is enabled (on |
| supporting devices). They are always reported in pre-correction active array coordinates, |
| since the scaling of OIS shifts would depend on the specific spot on the sensor the shift |
| is needed. |
| </details> |
| </entry> |
| <entry name="oisSamples" type="float" visibility="java_public" synthetic="true" |
| container="array" typedef="oisSample" hal_version="3.3"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| An array of optical stabilization (OIS) position samples. |
| </description> |
| <details> |
| Each OIS sample contains the timestamp and the amount of shifts in x and y direction, |
| in pixels, of the OIS sample. |
| |
| A positive value for a shift in x direction is a shift from left to right in the |
| pre-correction active array coordinate system. For example, if the optical center is |
| (1000, 500) in pre-correction active array coordinates, a shift of (3, 0) puts the new |
| optical center at (1003, 500). |
| |
| A positive value for a shift in y direction is a shift from top to bottom in |
| pre-correction active array coordinate system. For example, if the optical center is |
| (1000, 500) in active array coordinates, a shift of (0, 5) puts the new optical center at |
| (1000, 505). |
| |
| The OIS samples are not affected by whether lens distortion correction is enabled (on |
| supporting devices). They are always reported in pre-correction active array coordinates, |
| since the scaling of OIS shifts would depend on the specific spot on the sensor the shift |
| is needed. |
| </details> |
| </entry> |
| <entry name="lensIntrinsicsSamples" type="float" visibility="java_public" synthetic="true" |
| container="array" typedef="lensIntrinsicsSample" |
| hal_version="3.10"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| An array of intra-frame lens intrinsic samples. |
| </description> |
| <details> |
| Contains an array of intra-frame android.lens.intrinsicCalibration updates. This must |
| not be confused or compared to android.statistics.oisSamples. Although OIS could be the |
| main driver, all relevant factors such as focus distance and optical zoom must also |
| be included. Do note that OIS samples must not be applied on top of the lens intrinsic |
| samples. |
| Support for this capture result can be queried via |
| {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}. |
| If available, clients can expect multiple samples per capture result. The specific |
| amount will depend on current frame duration and sampling rate. Generally a sampling rate |
| greater than or equal to 200Hz is considered sufficient for high quality results. |
| </details> |
| </entry> |
| <entry name="lensIntrinsicTimestamps" type="int64" visibility="ndk_public" container="array" |
| hal_version="3.10"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| An array of timestamps of lens intrinsics samples, in nanoseconds. |
| </description> |
| <units>nanoseconds</units> |
| <details> |
| The array contains the timestamps of lens intrinsics samples. The timestamps are in the |
| same timebase as and comparable to android.sensor.timestamp. |
| </details> |
| </entry> |
| <entry name="lensIntrinsicSamples" type="float" visibility="ndk_public" |
| container="array" hal_version="3.10"> |
| <array> |
| <size>5</size> |
| <size>n</size> |
| </array> |
| <description> |
| An array of intra-frame lens intrinsics. |
| </description> |
| <units> |
| Pixels in the android.sensor.info.preCorrectionActiveArraySize coordinate system. |
| </units> |
| <details> |
| The data layout and contents of individual array entries matches with |
| android.lens.intrinsicCalibration. |
| </details> |
| </entry> |
| </dynamic> |
| </section> |
| <section name="tonemap"> |
| <controls> |
| <entry name="curveBlue" type="float" visibility="ndk_public" |
| type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints." |
| container="array" hwlevel="full"> |
| <array> |
| <size>n</size> |
| <size>2</size> |
| </array> |
| <description>Tonemapping / contrast / gamma curve for the blue |
| channel, to use when android.tonemap.mode is |
| CONTRAST_CURVE.</description> |
| <details>See android.tonemap.curveRed for more details.</details> |
| </entry> |
| <entry name="curveGreen" type="float" visibility="ndk_public" |
| type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints." |
| container="array" hwlevel="full"> |
| <array> |
| <size>n</size> |
| <size>2</size> |
| </array> |
| <description>Tonemapping / contrast / gamma curve for the green |
| channel, to use when android.tonemap.mode is |
| CONTRAST_CURVE.</description> |
| <details>See android.tonemap.curveRed for more details.</details> |
| </entry> |
| <entry name="curveRed" type="float" visibility="ndk_public" |
| type_notes="1D array of float pairs (P_IN, P_OUT). The maximum number of pairs is specified by android.tonemap.maxCurvePoints." |
| container="array" hwlevel="full"> |
| <array> |
| <size>n</size> |
| <size>2</size> |
| </array> |
| <description>Tonemapping / contrast / gamma curve for the red |
| channel, to use when android.tonemap.mode is |
| CONTRAST_CURVE.</description> |
| <range>0-1 on both input and output coordinates, normalized |
| as a floating-point value such that 0 == black and 1 == white. |
| </range> |
| <details> |
| Each channel's curve is defined by an array of control points: |
| |
| android.tonemap.curveRed = |
| [ P0in, P0out, P1in, P1out, P2in, P2out, P3in, P3out, ..., PNin, PNout ] |
| 2 <= N <= android.tonemap.maxCurvePoints |
| |
| These are sorted in order of increasing `Pin`; it is |
| required that input values 0.0 and 1.0 are included in the list to |
| define a complete mapping. For input values between control points, |
| the camera device must linearly interpolate between the control |
| points. |
| |
| Each curve can have an independent number of points, and the number |
| of points can be less than max (that is, the request doesn't have to |
| always provide a curve with number of points equivalent to |
| android.tonemap.maxCurvePoints). |
| |
| For devices with MONOCHROME capability, all three channels must have the same set of |
| control points. |
| |
| A few examples, and their corresponding graphical mappings; these |
| only specify the red channel and the precision is limited to 4 |
| digits, for conciseness. |
| |
| Linear mapping: |
| |
| android.tonemap.curveRed = [ 0, 0, 1.0, 1.0 ] |
| |
|  |
| |
| Invert mapping: |
| |
| android.tonemap.curveRed = [ 0, 1.0, 1.0, 0 ] |
| |
|  |
| |
| Gamma 1/2.2 mapping, with 16 control points: |
| |
| android.tonemap.curveRed = [ |
| 0.0000, 0.0000, 0.0667, 0.2920, 0.1333, 0.4002, 0.2000, 0.4812, |
| 0.2667, 0.5484, 0.3333, 0.6069, 0.4000, 0.6594, 0.4667, 0.7072, |
| 0.5333, 0.7515, 0.6000, 0.7928, 0.6667, 0.8317, 0.7333, 0.8685, |
| 0.8000, 0.9035, 0.8667, 0.9370, 0.9333, 0.9691, 1.0000, 1.0000 ] |
| |
|  |
| |
| Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points: |
| |
| android.tonemap.curveRed = [ |
| 0.0000, 0.0000, 0.0667, 0.2864, 0.1333, 0.4007, 0.2000, 0.4845, |
| 0.2667, 0.5532, 0.3333, 0.6125, 0.4000, 0.6652, 0.4667, 0.7130, |
| 0.5333, 0.7569, 0.6000, 0.7977, 0.6667, 0.8360, 0.7333, 0.8721, |
| 0.8000, 0.9063, 0.8667, 0.9389, 0.9333, 0.9701, 1.0000, 1.0000 ] |
| |
|  |
| </details> |
| <hal_details> |
| For good quality of mapping, at least 128 control points are |
| preferred. |
| |
| A typical use case of this would be a gamma-1/2.2 curve, with as many |
| control points used as are available. |
| </hal_details> |
| </entry> |
| <entry name="curve" type="float" visibility="java_public" synthetic="true" |
| typedef="tonemapCurve" |
| hwlevel="full"> |
| <description>Tonemapping / contrast / gamma curve to use when android.tonemap.mode |
| is CONTRAST_CURVE.</description> |
| <details> |
| The tonemapCurve consist of three curves for each of red, green, and blue |
| channels respectively. The following example uses the red channel as an |
| example. The same logic applies to green and blue channel. |
| Each channel's curve is defined by an array of control points: |
| |
| curveRed = |
| [ P0(in, out), P1(in, out), P2(in, out), P3(in, out), ..., PN(in, out) ] |
| 2 <= N <= android.tonemap.maxCurvePoints |
| |
| These are sorted in order of increasing `Pin`; it is always |
| guaranteed that input values 0.0 and 1.0 are included in the list to |
| define a complete mapping. For input values between control points, |
| the camera device must linearly interpolate between the control |
| points. |
| |
| Each curve can have an independent number of points, and the number |
| of points can be less than max (that is, the request doesn't have to |
| always provide a curve with number of points equivalent to |
| android.tonemap.maxCurvePoints). |
| |
| For devices with MONOCHROME capability, all three channels must have the same set of |
| control points. |
| |
| A few examples, and their corresponding graphical mappings; these |
| only specify the red channel and the precision is limited to 4 |
| digits, for conciseness. |
| |
| Linear mapping: |
| |
| curveRed = [ (0, 0), (1.0, 1.0) ] |
| |
|  |
| |
| Invert mapping: |
| |
| curveRed = [ (0, 1.0), (1.0, 0) ] |
| |
|  |
| |
| Gamma 1/2.2 mapping, with 16 control points: |
| |
| curveRed = [ |
| (0.0000, 0.0000), (0.0667, 0.2920), (0.1333, 0.4002), (0.2000, 0.4812), |
| (0.2667, 0.5484), (0.3333, 0.6069), (0.4000, 0.6594), (0.4667, 0.7072), |
| (0.5333, 0.7515), (0.6000, 0.7928), (0.6667, 0.8317), (0.7333, 0.8685), |
| (0.8000, 0.9035), (0.8667, 0.9370), (0.9333, 0.9691), (1.0000, 1.0000) ] |
| |
|  |
| |
| Standard sRGB gamma mapping, per IEC 61966-2-1:1999, with 16 control points: |
| |
| curveRed = [ |
| (0.0000, 0.0000), (0.0667, 0.2864), (0.1333, 0.4007), (0.2000, 0.4845), |
| (0.2667, 0.5532), (0.3333, 0.6125), (0.4000, 0.6652), (0.4667, 0.7130), |
| (0.5333, 0.7569), (0.6000, 0.7977), (0.6667, 0.8360), (0.7333, 0.8721), |
| (0.8000, 0.9063), (0.8667, 0.9389), (0.9333, 0.9701), (1.0000, 1.0000) ] |
| |
|  |
| </details> |
| <hal_details> |
| This entry is created by the framework from the curveRed, curveGreen and |
| curveBlue entries. |
| </hal_details> |
| </entry> |
| <entry name="mode" type="byte" visibility="public" enum="true" |
| hwlevel="full"> |
| <enum> |
| <value>CONTRAST_CURVE |
| <notes>Use the tone mapping curve specified in |
| the android.tonemap.curve* entries. |
| |
| All color enhancement and tonemapping must be disabled, except |
| for applying the tonemapping curve specified by |
| android.tonemap.curve. |
| |
| Must not slow down frame rate relative to raw |
| sensor output. |
| </notes> |
| </value> |
| <value>FAST |
| <notes> |
| Advanced gamma mapping and color enhancement may be applied, without |
| reducing frame rate compared to raw sensor output. |
| </notes> |
| </value> |
| <value>HIGH_QUALITY |
| <notes> |
| High-quality gamma mapping and color enhancement will be applied, at |
| the cost of possibly reduced frame rate compared to raw sensor output. |
| </notes> |
| </value> |
| <value>GAMMA_VALUE |
| <notes> |
| Use the gamma value specified in android.tonemap.gamma to perform |
| tonemapping. |
| |
| All color enhancement and tonemapping must be disabled, except |
| for applying the tonemapping curve specified by android.tonemap.gamma. |
| |
| Must not slow down frame rate relative to raw sensor output. |
| </notes> |
| </value> |
| <value>PRESET_CURVE |
| <notes> |
| Use the preset tonemapping curve specified in |
| android.tonemap.presetCurve to perform tonemapping. |
| |
| All color enhancement and tonemapping must be disabled, except |
| for applying the tonemapping curve specified by |
| android.tonemap.presetCurve. |
| |
| Must not slow down frame rate relative to raw sensor output. |
| </notes> |
| </value> |
| </enum> |
| <description>High-level global contrast/gamma/tonemapping control. |
| </description> |
| <range>android.tonemap.availableToneMapModes</range> |
| <details> |
| When switching to an application-defined contrast curve by setting |
| android.tonemap.mode to CONTRAST_CURVE, the curve is defined |
| per-channel with a set of `(in, out)` points that specify the |
| mapping from input high-bit-depth pixel value to the output |
| low-bit-depth value. Since the actual pixel ranges of both input |
| and output may change depending on the camera pipeline, the values |
| are specified by normalized floating-point numbers. |
| |
| More-complex color mapping operations such as 3D color look-up |
| tables, selective chroma enhancement, or other non-linear color |
| transforms will be disabled when android.tonemap.mode is |
| CONTRAST_CURVE. |
| |
| When using either FAST or HIGH_QUALITY, the camera device will |
| emit its own tonemap curve in android.tonemap.curve. |
| These values are always available, and as close as possible to the |
| actually used nonlinear/nonglobal transforms. |
| |
| If a request is sent with CONTRAST_CURVE with the camera device's |
| provided curve in FAST or HIGH_QUALITY, the image's tonemap will be |
| roughly the same.</details> |
| </entry> |
| </controls> |
| <static> |
| <entry name="maxCurvePoints" type="int32" visibility="public" |
| hwlevel="full"> |
| <description>Maximum number of supported points in the |
| tonemap curve that can be used for android.tonemap.curve. |
| </description> |
| <details> |
| If the actual number of points provided by the application (in android.tonemap.curve*) is |
| less than this maximum, the camera device will resample the curve to its internal |
| representation, using linear interpolation. |
| |
| The output curves in the result metadata may have a different number |
| of points than the input curves, and will represent the actual |
| hardware curves used as closely as possible when linearly interpolated. |
| </details> |
| <hal_details> |
| This value must be at least 64. This should be at least 128. |
| </hal_details> |
| </entry> |
| <entry name="availableToneMapModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="enumList" hwlevel="full"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of tonemapping modes for android.tonemap.mode that are supported by this camera |
| device. |
| </description> |
| <range>Any value listed in android.tonemap.mode</range> |
| <details> |
| Camera devices that support the MANUAL_POST_PROCESSING capability will always contain |
| at least one of below mode combinations: |
| |
| * CONTRAST_CURVE, FAST and HIGH_QUALITY |
| * GAMMA_VALUE, PRESET_CURVE, FAST and HIGH_QUALITY |
| |
| This includes all FULL level devices. |
| </details> |
| <hal_details> |
| HAL must support both FAST and HIGH_QUALITY if automatic tonemap control is available |
| on the camera device, but the underlying implementation can be the same for both modes. |
| That is, if the highest quality implementation on the camera device does not slow down |
| capture rate, then FAST and HIGH_QUALITY will generate the same output. |
| </hal_details> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.tonemap.curveBlue" kind="controls"> |
| </clone> |
| <clone entry="android.tonemap.curveGreen" kind="controls"> |
| </clone> |
| <clone entry="android.tonemap.curveRed" kind="controls"> |
| </clone> |
| <clone entry="android.tonemap.curve" kind="controls"> |
| </clone> |
| <clone entry="android.tonemap.mode" kind="controls"> |
| </clone> |
| </dynamic> |
| <controls> |
| <entry name="gamma" type="float" visibility="public"> |
| <description> Tonemapping curve to use when android.tonemap.mode is |
| GAMMA_VALUE |
| </description> |
| <details> |
| The tonemap curve will be defined the following formula: |
| |
| * OUT = pow(IN, 1.0 / gamma) |
| |
| where IN and OUT is the input pixel value scaled to range [0.0, 1.0], |
| pow is the power function and gamma is the gamma value specified by this |
| key. |
| |
| The same curve will be applied to all color channels. The camera device |
| may clip the input gamma value to its supported range. The actual applied |
| value will be returned in capture result. |
| |
| The valid range of gamma value varies on different devices, but values |
| within [1.0, 5.0] are guaranteed not to be clipped. |
| </details> |
| </entry> |
| <entry name="presetCurve" type="byte" visibility="public" enum="true"> |
| <enum> |
| <value>SRGB |
| <notes>Tonemapping curve is defined by sRGB</notes> |
| </value> |
| <value>REC709 |
| <notes>Tonemapping curve is defined by ITU-R BT.709</notes> |
| </value> |
| </enum> |
| <description> Tonemapping curve to use when android.tonemap.mode is |
| PRESET_CURVE |
| </description> |
| <details> |
| The tonemap curve will be defined by specified standard. |
| |
| sRGB (approximated by 16 control points): |
| |
|  |
| |
| Rec. 709 (approximated by 16 control points): |
| |
|  |
| |
| Note that above figures show a 16 control points approximation of preset |
| curves. Camera devices may apply a different approximation to the curve. |
| </details> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.tonemap.gamma" kind="controls"> |
| </clone> |
| <clone entry="android.tonemap.presetCurve" kind="controls"> |
| </clone> |
| </dynamic> |
| </section> |
| <section name="led"> |
| <controls> |
| <entry name="transmit" type="byte" visibility="hidden" optional="true" |
| enum="true" typedef="boolean"> |
| <enum> |
| <value>OFF</value> |
| <value>ON</value> |
| </enum> |
| <description>This LED is nominally used to indicate to the user |
| that the camera is powered on and may be streaming images back to the |
| Application Processor. In certain rare circumstances, the OS may |
| disable this when video is processed locally and not transmitted to |
| any untrusted applications. |
| |
| In particular, the LED *must* always be on when the data could be |
| transmitted off the device. The LED *should* always be on whenever |
| data is stored locally on the device. |
| |
| The LED *may* be off if a trusted application is using the data that |
| doesn't violate the above rules. |
| </description> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.led.transmit" kind="controls"></clone> |
| </dynamic> |
| <static> |
| <entry name="availableLeds" type="byte" visibility="hidden" optional="true" |
| enum="true" |
| container="array"> |
| <array> |
| <size>n</size> |
| </array> |
| <enum> |
| <value>TRANSMIT |
| <notes>android.led.transmit control is used.</notes> |
| </value> |
| </enum> |
| <description>A list of camera LEDs that are available on this system. |
| </description> |
| </entry> |
| </static> |
| </section> |
| <section name="info"> |
| <static> |
| <entry name="supportedHardwareLevel" type="byte" visibility="public" |
| enum="true" hwlevel="legacy"> |
| <enum> |
| <value> |
| LIMITED |
| <notes> |
| This camera device does not have enough capabilities to qualify as a `FULL` device or |
| better. |
| |
| Only the stream configurations listed in the `LEGACY` and `LIMITED` |
| [tables](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#limited-level-additional-guaranteed-configurations) |
| in the documentation are guaranteed to be supported. |
| |
| All `LIMITED` devices support the `BACKWARDS_COMPATIBLE` capability, indicating basic |
| support for color image capture. The only exception is that the device may |
| alternatively support only the `DEPTH_OUTPUT` capability, if it can only output depth |
| measurements and not color images. |
| |
| `LIMITED` devices and above require the use of android.control.aePrecaptureTrigger |
| to lock exposure metering (and calculate flash power, for cameras with flash) before |
| capturing a high-quality still image. |
| |
| A `LIMITED` device that only lists the `BACKWARDS_COMPATIBLE` capability is only |
| required to support full-automatic operation and post-processing (`OFF` is not |
| supported for android.control.aeMode, android.control.afMode, or |
| android.control.awbMode) |
| |
| Additional capabilities may optionally be supported by a `LIMITED`-level device, and |
| can be checked for in android.request.availableCapabilities. |
| </notes> |
| </value> |
| <value> |
| FULL |
| <notes> |
| This camera device is capable of supporting advanced imaging applications. |
| |
| The stream configurations listed in the `FULL`, `LEGACY` and `LIMITED` |
| [tables](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#full-level-additional-guaranteed-configurations) |
| in the documentation are guaranteed to be supported. |
| |
| A `FULL` device will support below capabilities: |
| |
| * `BURST_CAPTURE` capability (android.request.availableCapabilities contains |
| `BURST_CAPTURE`) |
| * Per frame control (android.sync.maxLatency `==` PER_FRAME_CONTROL) |
| * Manual sensor control (android.request.availableCapabilities contains `MANUAL_SENSOR`) |
| * Manual post-processing control (android.request.availableCapabilities contains |
| `MANUAL_POST_PROCESSING`) |
| * The required exposure time range defined in android.sensor.info.exposureTimeRange |
| * The required maxFrameDuration defined in android.sensor.info.maxFrameDuration |
| |
| Note: |
| Pre-API level 23, FULL devices also supported arbitrary cropping region |
| (android.scaler.croppingType `== FREEFORM`); this requirement was relaxed in API level |
| 23, and `FULL` devices may only support `CENTERED` cropping. |
| </notes> |
| </value> |
| <value> |
| LEGACY |
| <notes> |
| This camera device is running in backward compatibility mode. |
| |
| Only the stream configurations listed in the `LEGACY` |
| [table](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#legacy-level-guaranteed-configurations) |
| in the documentation are supported. |
| |
| A `LEGACY` device does not support per-frame control, manual sensor control, manual |
| post-processing, arbitrary cropping regions, and has relaxed performance constraints. |
| No additional capabilities beyond `BACKWARD_COMPATIBLE` will ever be listed by a |
| `LEGACY` device in android.request.availableCapabilities. |
| |
| In addition, the android.control.aePrecaptureTrigger is not functional on `LEGACY` |
| devices. Instead, every request that includes a JPEG-format output target is treated |
| as triggering a still capture, internally executing a precapture trigger. This may |
| fire the flash for flash power metering during precapture, and then fire the flash |
| for the final capture, if a flash is available on the device and the AE mode is set to |
| enable the flash. |
| |
| Devices that initially shipped with Android version {@link |
| android.os.Build.VERSION_CODES#Q Q} or newer will not include any LEGACY-level devices. |
| </notes> |
| </value> |
| <value> |
| 3 |
| <notes> |
| This camera device is capable of YUV reprocessing and RAW data capture, in addition to |
| FULL-level capabilities. |
| |
| The stream configurations listed in the `LEVEL_3`, `RAW`, `FULL`, `LEGACY` and |
| `LIMITED` |
| [tables](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#level-3-additional-guaranteed-configurations) |
| in the documentation are guaranteed to be supported. |
| |
| The following additional capabilities are guaranteed to be supported: |
| |
| * `YUV_REPROCESSING` capability (android.request.availableCapabilities contains |
| `YUV_REPROCESSING`) |
| * `RAW` capability (android.request.availableCapabilities contains |
| `RAW`) |
| </notes> |
| </value> |
| <value hal_version="3.3"> |
| EXTERNAL |
| <notes> |
| This camera device is backed by an external camera connected to this Android device. |
| |
| The device has capability identical to a LIMITED level device, with the following |
| exceptions: |
| |
| * The device may not report lens/sensor related information such as |
| - android.lens.focalLength |
| - android.lens.info.hyperfocalDistance |
| - android.sensor.info.physicalSize |
| - android.sensor.info.whiteLevel |
| - android.sensor.blackLevelPattern |
| - android.sensor.info.colorFilterArrangement |
| - android.sensor.rollingShutterSkew |
| * The device will report 0 for android.sensor.orientation |
| * The device has less guarantee on stable framerate, as the framerate partly depends |
| on the external camera being used. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Generally classifies the overall set of the camera device functionality. |
| </description> |
| <details> |
| The supported hardware level is a high-level description of the camera device's |
| capabilities, summarizing several capabilities into one field. Each level adds additional |
| features to the previous one, and is always a strict superset of the previous level. |
| The ordering is `LEGACY < LIMITED < FULL < LEVEL_3`. |
| |
| Starting from `LEVEL_3`, the level enumerations are guaranteed to be in increasing |
| numerical value as well. To check if a given device is at least at a given hardware level, |
| the following code snippet can be used: |
| |
| // Returns true if the device supports the required hardware level, or better. |
| boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) { |
| final int[] sortedHwLevels = { |
| CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY, |
| CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL, |
| CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED, |
| CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL, |
| CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3 |
| }; |
| int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); |
| if (requiredLevel == deviceLevel) { |
| return true; |
| } |
| |
| for (int sortedlevel : sortedHwLevels) { |
| if (sortedlevel == requiredLevel) { |
| return true; |
| } else if (sortedlevel == deviceLevel) { |
| return false; |
| } |
| } |
| return false; // Should never reach here |
| } |
| |
| At a high level, the levels are: |
| |
| * `LEGACY` devices operate in a backwards-compatibility mode for older |
| Android devices, and have very limited capabilities. |
| * `LIMITED` devices represent the |
| baseline feature set, and may also include additional capabilities that are |
| subsets of `FULL`. |
| * `FULL` devices additionally support per-frame manual control of sensor, flash, lens and |
| post-processing settings, and image capture at a high rate. |
| * `LEVEL_3` devices additionally support YUV reprocessing and RAW image capture, along |
| with additional output stream configurations. |
| * `EXTERNAL` devices are similar to `LIMITED` devices with exceptions like some sensor or |
| lens information not reported or less stable framerates. |
| |
| See the individual level enums for full descriptions of the supported capabilities. The |
| android.request.availableCapabilities entry describes the device's capabilities at a |
| finer-grain level, if needed. In addition, many controls have their available settings or |
| ranges defined in individual entries from {@link |
| android.hardware.camera2.CameraCharacteristics|ACameraManager_getCameraCharacteristics}. |
| |
| Some features are not part of any particular hardware level or capability and must be |
| queried separately. These include: |
| |
| * Calibrated timestamps (android.sensor.info.timestampSource `==` REALTIME) |
| * Precision lens control (android.lens.info.focusDistanceCalibration `==` CALIBRATED) |
| * Face detection (android.statistics.info.availableFaceDetectModes) |
| * Optical or electrical image stabilization |
| (android.lens.info.availableOpticalStabilization, |
| android.control.availableVideoStabilizationModes) |
| |
| </details> |
| <hal_details> |
| A camera HALv3 device can implement one of three possible operational modes; LIMITED, |
| FULL, and LEVEL_3. |
| |
| FULL support or better is expected from new higher-end devices. Limited |
| mode has hardware requirements roughly in line with those for a camera HAL device v1 |
| implementation, and is expected from older or inexpensive devices. Each level is a strict |
| superset of the previous level, and they share the same essential operational flow. |
| |
| For full details refer to "S3. Operational Modes" in camera3.h |
| |
| Camera HAL3+ must not implement LEGACY mode. It is there for backwards compatibility in |
| the `android.hardware.camera2` user-facing API only on legacy HALv1 devices, and is |
| implemented by the camera framework code. |
| |
| EXTERNAL level devices have lower performance bar in CTS since the performance might depend |
| on the external camera being used and is not fully controlled by the device manufacturer. |
| The ITS test suite is exempted for the same reason. |
| </hal_details> |
| </entry> |
| <entry name="version" type="byte" visibility="public" typedef="string" hal_version="3.3"> |
| <description> |
| A short string for manufacturer version information about the camera device, such as |
| ISP hardware, sensors, etc. |
| </description> |
| <details> |
| This can be used in {@link android.media.ExifInterface#TAG_IMAGE_DESCRIPTION TAG_IMAGE_DESCRIPTION} |
| in jpeg EXIF. This key may be absent if no version information is available on the |
| device. |
| </details> |
| <hal_details> |
| The string must consist of only alphanumeric characters, punctuation, and |
| whitespace, i.e. it must match regular expression "[\p{Alnum}\p{Punct}\p{Space}]*". |
| It must not exceed 256 characters. |
| </hal_details> |
| </entry> |
| <entry name="supportedBufferManagementVersion" type="byte" visibility="system" |
| enum="true" hal_version="3.4"> |
| <enum> |
| <value> |
| HIDL_DEVICE_3_5 |
| <notes> |
| This camera device supports and opts in to the buffer management APIs provided by |
| HIDL ICameraDevice version 3.5. |
| </notes> |
| </value> |
| <value hal_version="3.10"> |
| SESSION_CONFIGURABLE |
| <notes> |
| This camera device supports the buffer management APIs provided by AIDL ICameraDevice |
| version 1. It also supports the ICameraDeviceSession.configureStreamsV2 call to |
| inform the camera framework whether HAL buffer manager must be used for the |
| particular session configured. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| The version of buffer management API this camera device supports and opts into. |
| </description> |
| <details> |
| When this key is not present, camera framework will interact with this camera device |
| without any buffer management HAL API. When this key is present and camera framework |
| supports the buffer management API version, camera framework will interact with camera |
| HAL using such version of buffer management API. |
| </details> |
| </entry> |
| <entry name="deviceStateSensorOrientationMap" type="int64" visibility="java_public" |
| synthetic="true" optional="true" typedef="deviceStateSensorOrientationMap" |
| hwlevel="limited"> |
| <description>This lists the mapping between a device folding state and |
| specific camera sensor orientation for logical cameras on a foldable device. |
| </description> |
| <details> |
| Logical cameras on foldable devices can support sensors with different orientation |
| values. The orientation value may need to change depending on the specific folding |
| state. Information about the mapping between the device folding state and the |
| sensor orientation can be obtained in |
| {@link android.hardware.camera2.params.DeviceStateSensorOrientationMap}. |
| Device state orientation maps are optional and maybe present on devices that support |
| android.scaler.rotateAndCrop. |
| </details> |
| </entry> |
| <entry name="deviceStateOrientations" type="int64" visibility="ndk_public" |
| container="array" hwlevel="limited" hal_version="3.7"> |
| <array> |
| <size>2</size> |
| <size>n</size> |
| </array> |
| <units>(device fold state, sensor orientation) x n</units> |
| <details> |
| HAL must populate the array with |
| (hardware::camera::provider::V2_5::DeviceState, sensorOrientation) pairs for each |
| supported device state bitwise combination. |
| </details> |
| </entry> |
| <entry name="sessionConfigurationQueryVersion" type="int32" |
| visibility="fwk_java_public" enum="true" typedef="versionCode" |
| hwlevel="legacy" hal_version="3.10"> |
| <enum> |
| <value id="34">UPSIDE_DOWN_CAKE</value> |
| <value id="35">VANILLA_ICE_CREAM</value> |
| <value id="36" hal_version="3.11">BAKLAVA</value> |
| </enum> |
| <description>The version of the session configuration query |
| {@link android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported} |
| and {@link android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics} |
| APIs. |
| </description> |
| <details>The possible values in this key correspond to the values defined in |
| android.os.Build.VERSION_CODES. Each version defines a set of feature combinations the |
| camera device must reliably report whether they are supported via |
| {@link android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported}. |
| It also defines the set of session specific keys in CameraCharacteristics when returned from |
| {@link android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics}. |
| The version is always less or equal to android.os.Build.VERSION.SDK_INT. |
| |
| If set to UPSIDE_DOWN_CAKE, this camera device doesn't support the |
| {@link android.hardware.camera2.CameraDevice.CameraDeviceSetup} API. |
| Trying to create a CameraDeviceSetup instance throws an UnsupportedOperationException. |
| |
| From VANILLA_ICE_CREAM onwards, the camera compliance tests verify a set of |
| commonly used SessionConfigurations to ensure that the outputs of |
| {@link android.hardware.camera2.CameraDevice.CameraDeviceSetup#isSessionConfigurationSupported} |
| and {@link android.hardware.camera2.CameraDevice.CameraDeviceSetup#getSessionCharacteristics} |
| are accurate. The application is encouraged to use these SessionConfigurations when turning on |
| multiple features at the same time. |
| |
| When set to VANILLA_ICE_CREAM, the combinations of the following configurations are verified |
| by the compliance tests: |
| |
| * A set of commonly used stream combinations: |
| |
| Target 1 | Size | Target 2 | Size | |
| :----------:|:-------------:|:---------------:|:------------:| |
| PRIV | S1080P | | | |
| PRIV | S720P | | | |
| PRIV | S1080P | JPEG/JPEG_R | MAXIMUM_16_9 | |
| PRIV | S1080P | JPEG/JPEG_R | UHD | |
| PRIV | S1080P | JPEG/JPEG_R | S1440P | |
| PRIV | S1080P | JPEG/JPEG_R | S1080P | |
| PRIV | S1080P | PRIV | UHD | |
| PRIV | S720P | JPEG/JPEG_R | MAXIMUM_16_9 | |
| PRIV | S720P | JPEG/JPEG_R | UHD | |
| PRIV | S720P | JPEG/JPEG_R | S1080P | |
| PRIV | XVGA | JPEG/JPEG_R | MAXIMUM_4_3 | |
| PRIV | S1080P_4_3 | JPEG/JPEG_R | MAXIMUM_4_3 | |
| |
| * {@code MAXIMUM_4_3} refers to the camera device's maximum output resolution with |
| 4:3 aspect ratio for that format from {@code StreamConfigurationMap#getOutputSizes}. |
| * {@code MAXIMUM_16_9} is the maximum output resolution with 16:9 aspect ratio. |
| * {@code S1440P} refers to {@code 2560x1440 (16:9)}. |
| * {@code S1080P} refers to {@code 1920x1080 (16:9)}. |
| * {@code S720P} refers to {@code 1280x720 (16:9)}. |
| * {@code UHD} refers to {@code 3840x2160 (16:9)}. |
| * {@code XVGA} refers to {@code 1024x768 (4:3)}. |
| * {@code S1080P_43} refers to {@code 1440x1080 (4:3)}. |
| |
| * VIDEO_STABILIZATION_MODE: {OFF, PREVIEW} |
| |
| * AE_TARGET_FPS_RANGE: { {\*, 30}, {\*, 60} } |
| |
| * DYNAMIC_RANGE_PROFILE: {STANDARD, HLG10} |
| |
| All of the above configurations can be set up with a SessionConfiguration. The list of |
| OutputConfiguration contains the stream configurations and DYNAMIC_RANGE_PROFILE, and |
| the AE_TARGET_FPS_RANGE and VIDEO_STABILIZATION_MODE are set as session parameters. |
| |
| When set to BAKLAVA, the additional stream combinations below are verified |
| by the compliance tests: |
| |
| Target 1 | Size | Target 2 | Size | |
| :----------:|:-------------:|:---------------:|:------------:| |
| PRIV | S1080P | PRIV | S1080P | |
| PRIV | S1080P | PRIV | S1440P | |
| |
| </details> |
| <hal_details> |
| Preview stabilization must be orthogonal to other features. In other words, if preview |
| stabilization is supported by the camera device, the return value of |
| isStreamCombinationWithSettingsSupported for a particular combination must return |
| the same value between stabilization off and preview stabilization on. This reduces the |
| search space for feature combination queries. |
| </hal_details> |
| </entry> |
| <entry name="deviceId" type="int32" visibility="fwk_only"> |
| <description> |
| Id of the device that owns this camera. |
| </description> |
| <details> |
| In case of a virtual camera, this would be the id of the virtual device |
| owning the camera. For any other camera, this key would not be present. |
| Callers should assume {@link android.content.Context#DEVICE_ID_DEFAULT} |
| in case this key is not present. |
| </details> |
| </entry> |
| </static> |
| </section> |
| <section name="blackLevel"> |
| <controls> |
| <entry name="lock" type="byte" visibility="public" enum="true" |
| typedef="boolean" hwlevel="full"> |
| <enum> |
| <value>OFF</value> |
| <value>ON</value> |
| </enum> |
| <description> Whether black-level compensation is locked |
| to its current values, or is free to vary.</description> |
| <details>When set to `true` (ON), the values used for black-level |
| compensation will not change until the lock is set to |
| `false` (OFF). |
| |
| Since changes to certain capture parameters (such as |
| exposure time) may require resetting of black level |
| compensation, the camera device must report whether setting |
| the black level lock was successful in the output result |
| metadata. |
| |
| For example, if a sequence of requests is as follows: |
| |
| * Request 1: Exposure = 10ms, Black level lock = OFF |
| * Request 2: Exposure = 10ms, Black level lock = ON |
| * Request 3: Exposure = 10ms, Black level lock = ON |
| * Request 4: Exposure = 20ms, Black level lock = ON |
| * Request 5: Exposure = 20ms, Black level lock = ON |
| * Request 6: Exposure = 20ms, Black level lock = ON |
| |
| And the exposure change in Request 4 requires the camera |
| device to reset the black level offsets, then the output |
| result metadata is expected to be: |
| |
| * Result 1: Exposure = 10ms, Black level lock = OFF |
| * Result 2: Exposure = 10ms, Black level lock = ON |
| * Result 3: Exposure = 10ms, Black level lock = ON |
| * Result 4: Exposure = 20ms, Black level lock = OFF |
| * Result 5: Exposure = 20ms, Black level lock = ON |
| * Result 6: Exposure = 20ms, Black level lock = ON |
| |
| This indicates to the application that on frame 4, black |
| levels were reset due to exposure value changes, and pixel |
| values may not be consistent across captures. |
| |
| The camera device will maintain the lock to the extent |
| possible, only overriding the lock to OFF when changes to |
| other request parameters require a black level recalculation |
| or reset. |
| </details> |
| <hal_details> |
| If for some reason black level locking is no longer possible |
| (for example, the analog gain has changed, which forces |
| black level offsets to be recalculated), then the HAL must |
| override this request (and it must report 'OFF' when this |
| does happen) until the next capture for which locking is |
| possible again.</hal_details> |
| <tag id="HAL2" /> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.blackLevel.lock" |
| kind="controls"> |
| <details> |
| Whether the black level offset was locked for this frame. Should be |
| ON if android.blackLevel.lock was ON in the capture request, unless |
| a change in other capture settings forced the camera device to |
| perform a black level reset. |
| </details> |
| </clone> |
| </dynamic> |
| </section> |
| <section name="sync"> |
| <dynamic> |
| <entry name="frameNumber" type="int64" visibility="ndk_public" |
| enum="true" hwlevel="legacy"> |
| <enum> |
| <value id="-1">CONVERGING |
| <notes> |
| The current result is not yet fully synchronized to any request. |
| |
| Synchronization is in progress, and reading metadata from this |
| result may include a mix of data that have taken effect since the |
| last synchronization time. |
| |
| In some future result, within android.sync.maxLatency frames, |
| this value will update to the actual frame number frame number |
| the result is guaranteed to be synchronized to (as long as the |
| request settings remain constant). |
| </notes> |
| </value> |
| <value id="-2">UNKNOWN |
| <notes> |
| The current result's synchronization status is unknown. |
| |
| The result may have already converged, or it may be in |
| progress. Reading from this result may include some mix |
| of settings from past requests. |
| |
| After a settings change, the new settings will eventually all |
| take effect for the output buffers and results. However, this |
| value will not change when that happens. Altering settings |
| rapidly may provide outcomes using mixes of settings from recent |
| requests. |
| |
| This value is intended primarily for backwards compatibility with |
| the older camera implementations (for android.hardware.Camera). |
| </notes> |
| </value> |
| </enum> |
| <description>The frame number corresponding to the last request |
| with which the output result (metadata + buffers) has been fully |
| synchronized.</description> |
| <range>Either a non-negative value corresponding to a |
| `frame_number`, or one of the two enums (CONVERGING / UNKNOWN). |
| </range> |
| <details> |
| When a request is submitted to the camera device, there is usually a |
| delay of several frames before the controls get applied. A camera |
| device may either choose to account for this delay by implementing a |
| pipeline and carefully submit well-timed atomic control updates, or |
| it may start streaming control changes that span over several frame |
| boundaries. |
| |
| In the latter case, whenever a request's settings change relative to |
| the previous submitted request, the full set of changes may take |
| multiple frame durations to fully take effect. Some settings may |
| take effect sooner (in less frame durations) than others. |
| |
| While a set of control changes are being propagated, this value |
| will be CONVERGING. |
| |
| Once it is fully known that a set of control changes have been |
| finished propagating, and the resulting updated control settings |
| have been read back by the camera device, this value will be set |
| to a non-negative frame number (corresponding to the request to |
| which the results have synchronized to). |
| |
| Older camera device implementations may not have a way to detect |
| when all camera controls have been applied, and will always set this |
| value to UNKNOWN. |
| |
| FULL capability devices will always have this value set to the |
| frame number of the request corresponding to this result. |
| |
| _Further details_: |
| |
| * Whenever a request differs from the last request, any future |
| results not yet returned may have this value set to CONVERGING (this |
| could include any in-progress captures not yet returned by the camera |
| device, for more details see pipeline considerations below). |
| * Submitting a series of multiple requests that differ from the |
| previous request (e.g. r1, r2, r3 s.t. r1 != r2 != r3) |
| moves the new synchronization frame to the last non-repeating |
| request (using the smallest frame number from the contiguous list of |
| repeating requests). |
| * Submitting the same request repeatedly will not change this value |
| to CONVERGING, if it was already a non-negative value. |
| * When this value changes to non-negative, that means that all of the |
| metadata controls from the request have been applied, all of the |
| metadata controls from the camera device have been read to the |
| updated values (into the result), and all of the graphics buffers |
| corresponding to this result are also synchronized to the request. |
| |
| _Pipeline considerations_: |
| |
| Submitting a request with updated controls relative to the previously |
| submitted requests may also invalidate the synchronization state |
| of all the results corresponding to currently in-flight requests. |
| |
| In other words, results for this current request and up to |
| android.request.pipelineMaxDepth prior requests may have their |
| android.sync.frameNumber change to CONVERGING. |
| </details> |
| <hal_details> |
| Using UNKNOWN here is illegal unless android.sync.maxLatency |
| is also UNKNOWN. |
| |
| FULL capability devices should simply set this value to the |
| `frame_number` of the request this result corresponds to. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| </dynamic> |
| <static> |
| <entry name="maxLatency" type="int32" visibility="public" enum="true" |
| hwlevel="legacy"> |
| <enum> |
| <value id="0">PER_FRAME_CONTROL |
| <notes> |
| Every frame has the requests immediately applied. |
| |
| Changing controls over multiple requests one after another will |
| produce results that have those controls applied atomically |
| each frame. |
| |
| All FULL capability devices will have this as their maxLatency. |
| </notes> |
| </value> |
| <value id="-1">UNKNOWN |
| <notes> |
| Each new frame has some subset (potentially the entire set) |
| of the past requests applied to the camera settings. |
| |
| By submitting a series of identical requests, the camera device |
| will eventually have the camera settings applied, but it is |
| unknown when that exact point will be. |
| |
| All LEGACY capability devices will have this as their maxLatency. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| The maximum number of frames that can occur after a request |
| (different than the previous) has been submitted, and before the |
| result's state becomes synchronized. |
| </description> |
| <units>Frame counts</units> |
| <range>A positive value, PER_FRAME_CONTROL, or UNKNOWN.</range> |
| <details> |
| This defines the maximum distance (in number of metadata results), |
| between the frame number of the request that has new controls to apply |
| and the frame number of the result that has all the controls applied. |
| |
| In other words this acts as an upper boundary for how many frames |
| must occur before the camera device knows for a fact that the new |
| submitted camera settings have been applied in outgoing frames. |
| </details> |
| <hal_details> |
| For example if maxLatency was 2, |
| |
| initial request = X (repeating) |
| request1 = X |
| request2 = Y |
| request3 = Y |
| request4 = Y |
| |
| where requestN has frameNumber N, and the first of the repeating |
| initial request's has frameNumber F (and F < 1). |
| |
| initial result = X' + { android.sync.frameNumber == F } |
| result1 = X' + { android.sync.frameNumber == F } |
| result2 = X' + { android.sync.frameNumber == CONVERGING } |
| result3 = X' + { android.sync.frameNumber == CONVERGING } |
| result4 = X' + { android.sync.frameNumber == 2 } |
| |
| where resultN has frameNumber N. |
| |
| Since `result4` has a `frameNumber == 4` and |
| `android.sync.frameNumber == 2`, the distance is clearly |
| `4 - 2 = 2`. |
| |
| Use `frame_count` from camera3_request_t instead of |
| android.request.frameCount or |
| `{@link android.hardware.camera2.CaptureResult#getFrameNumber}`. |
| |
| LIMITED devices are strongly encouraged to use a non-negative |
| value. If UNKNOWN is used here then app developers do not have a way |
| to know when sensor settings have been applied. |
| </hal_details> |
| <tag id="V1" /> |
| </entry> |
| </static> |
| </section> |
| <section name="reprocess"> |
| <controls> |
| <entry name="effectiveExposureFactor" type="float" visibility="java_public" hwlevel="limited"> |
| <description> |
| The amount of exposure time increase factor applied to the original output |
| frame by the application processing before sending for reprocessing. |
| </description> |
| <units>Relative exposure time increase factor.</units> |
| <range> &gt;= 1.0</range> |
| <details> |
| This is optional, and will be supported if the camera device supports YUV_REPROCESSING |
| capability (android.request.availableCapabilities contains YUV_REPROCESSING). |
| |
| For some YUV reprocessing use cases, the application may choose to filter the original |
| output frames to effectively reduce the noise to the same level as a frame that was |
| captured with longer exposure time. To be more specific, assuming the original captured |
| images were captured with a sensitivity of S and an exposure time of T, the model in |
| the camera device is that the amount of noise in the image would be approximately what |
| would be expected if the original capture parameters had been a sensitivity of |
| S/effectiveExposureFactor and an exposure time of T*effectiveExposureFactor, rather |
| than S and T respectively. If the captured images were processed by the application |
| before being sent for reprocessing, then the application may have used image processing |
| algorithms and/or multi-frame image fusion to reduce the noise in the |
| application-processed images (input images). By using the effectiveExposureFactor |
| control, the application can communicate to the camera device the actual noise level |
| improvement in the application-processed image. With this information, the camera |
| device can select appropriate noise reduction and edge enhancement parameters to avoid |
| excessive noise reduction (android.noiseReduction.mode) and insufficient edge |
| enhancement (android.edge.mode) being applied to the reprocessed frames. |
| |
| For example, for multi-frame image fusion use case, the application may fuse |
| multiple output frames together to a final frame for reprocessing. When N image are |
| fused into 1 image for reprocessing, the exposure time increase factor could be up to |
| square root of N (based on a simple photon shot noise model). The camera device will |
| adjust the reprocessing noise reduction and edge enhancement parameters accordingly to |
| produce the best quality images. |
| |
| This is relative factor, 1.0 indicates the application hasn't processed the input |
| buffer in a way that affects its effective exposure time. |
| |
| This control is only effective for YUV reprocessing capture request. For noise |
| reduction reprocessing, it is only effective when `android.noiseReduction.mode != OFF`. |
| Similarly, for edge enhancement reprocessing, it is only effective when |
| `android.edge.mode != OFF`. |
| </details> |
| <tag id="REPROC" /> |
| </entry> |
| </controls> |
| <dynamic> |
| <clone entry="android.reprocess.effectiveExposureFactor" kind="controls"> |
| </clone> |
| </dynamic> |
| <static> |
| <entry name="maxCaptureStall" type="int32" visibility="java_public" hwlevel="limited"> |
| <description> |
| The maximal camera capture pipeline stall (in unit of frame count) introduced by a |
| reprocess capture request. |
| </description> |
| <units>Number of frames.</units> |
| <range> &lt;= 4</range> |
| <details> |
| The key describes the maximal interference that one reprocess (input) request |
| can introduce to the camera simultaneous streaming of regular (output) capture |
| requests, including repeating requests. |
| |
| When a reprocessing capture request is submitted while a camera output repeating request |
| (e.g. preview) is being served by the camera device, it may preempt the camera capture |
| pipeline for at least one frame duration so that the camera device is unable to process |
| the following capture request in time for the next sensor start of exposure boundary. |
| When this happens, the application may observe a capture time gap (longer than one frame |
| duration) between adjacent capture output frames, which usually exhibits as preview |
| glitch if the repeating request output targets include a preview surface. This key gives |
| the worst-case number of frame stall introduced by one reprocess request with any kind of |
| formats/sizes combination. |
| |
| If this key reports 0, it means a reprocess request doesn't introduce any glitch to the |
| ongoing camera repeating request outputs, as if this reprocess request is never issued. |
| |
| This key is supported if the camera device supports PRIVATE or YUV reprocessing ( |
| i.e. android.request.availableCapabilities contains PRIVATE_REPROCESSING or |
| YUV_REPROCESSING). |
| </details> |
| <tag id="REPROC" /> |
| </entry> |
| </static> |
| </section> |
| <section name="depth"> |
| <static> |
| <entry name="maxDepthSamples" type="int32" visibility="system" hwlevel="limited"> |
| <description>Maximum number of points that a depth point cloud may contain. |
| </description> |
| <details> |
| If a camera device supports outputting depth range data in the form of a depth point |
| cloud ({@link android.graphics.ImageFormat#DEPTH_POINT_CLOUD}), this is the maximum |
| number of points an output buffer may contain. |
| |
| Any given buffer may contain between 0 and maxDepthSamples points, inclusive. |
| If output in the depth point cloud format is not supported, this entry will |
| not be defined. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="availableDepthStreamConfigurations" type="int32" visibility="ndk_public" |
| enum="true" container="array" typedef="streamConfiguration" hwlevel="limited"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available depth dataspace stream |
| configurations that this camera device supports |
| (i.e. format, width, height, output/input stream). |
| </description> |
| <details> |
| These are output stream configurations for use with |
| dataSpace HAL_DATASPACE_DEPTH. The configurations are |
| listed as `(format, width, height, input?)` tuples. |
| |
| Only devices that support depth output for at least |
| the HAL_PIXEL_FORMAT_Y16 dense depth map may include |
| this entry. |
| |
| A device that also supports the HAL_PIXEL_FORMAT_BLOB |
| sparse depth point cloud must report a single entry for |
| the format in this list as `(HAL_PIXEL_FORMAT_BLOB, |
| android.depth.maxDepthSamples, 1, OUTPUT)` in addition to |
| the entries for HAL_PIXEL_FORMAT_Y16. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="availableDepthMinFrameDurations" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hwlevel="limited"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the minimum frame duration for each |
| format/size combination for depth output formats. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| This should correspond to the frame duration when only that |
| stream is active, with all processing (typically in android.*.mode) |
| set to either OFF or FAST. |
| |
| When multiple streams are used in a request, the minimum frame |
| duration will be max(individual stream min durations). |
| |
| The minimum frame duration of a stream (of a particular format, size) |
| is the same regardless of whether the stream is input or output. |
| |
| See android.sensor.frameDuration and |
| android.scaler.availableStallDurations for more details about |
| calculating the max frame rate. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="availableDepthStallDurations" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hwlevel="limited"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the maximum stall duration for each |
| output format/size combination for depth streams. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| A stall duration is how much extra time would get added |
| to the normal minimum frame duration for a repeating request |
| that has streams with non-zero stall. |
| |
| This functions similarly to |
| android.scaler.availableStallDurations for depth |
| streams. |
| |
| All depth output stream formats may have a nonzero stall |
| duration. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="depthIsExclusive" type="byte" visibility="public" |
| enum="true" typedef="boolean" hwlevel="limited"> |
| <enum> |
| <value>FALSE</value> |
| <value>TRUE</value> |
| </enum> |
| <description>Indicates whether a capture request may target both a |
| DEPTH16 / DEPTH_POINT_CLOUD output, and normal color outputs (such as |
| YUV_420_888, JPEG, or RAW) simultaneously. |
| </description> |
| <details> |
| If TRUE, including both depth and color outputs in a single |
| capture request is not supported. An application must interleave color |
| and depth requests. If FALSE, a single request can target both types |
| of output. |
| |
| Typically, this restriction exists on camera devices that |
| need to emit a specific pattern or wavelength of light to |
| measure depth values, which causes the color image to be |
| corrupted during depth measurement. |
| </details> |
| </entry> |
| <entry name="availableRecommendedDepthStreamConfigurations" type="int32" |
| visibility="ndk_public" optional="true" container="array" |
| typedef="recommendedStreamConfiguration" hal_version="3.4"> |
| <array> |
| <size>n</size> |
| <size>5</size> |
| </array> |
| <description>Recommended depth stream configurations for common client use cases. |
| </description> |
| <details>Optional subset of the android.depth.availableDepthStreamConfigurations that |
| contains similar tuples listed as |
| (i.e. width, height, format, output/input stream, usecase bit field). |
| Camera devices will be able to suggest particular depth stream configurations which are |
| power and performance efficient for specific use cases. For more information about |
| retrieving the suggestions see |
| {@link android.hardware.camera2.CameraCharacteristics#getRecommendedStreamConfigurationMap}. |
| </details> |
| <ndk_details> |
| For data representation please refer to |
| android.scaler.availableRecommendedStreamConfigurations |
| </ndk_details> |
| <hal_details> |
| Recommended depth configurations are expected to be declared with SNAPSHOT and/or |
| ZSL if supported by the device. |
| For additional details on how to declare recommended stream configurations, check |
| android.scaler.availableRecommendedStreamConfigurations. |
| For additional requirements on depth streams please consider |
| android.depth.availableDepthStreamConfigurations. |
| </hal_details> |
| </entry> |
| <entry name="availableDynamicDepthStreamConfigurations" type="int32" visibility="ndk_public" |
| enum="true" container="array" typedef="streamConfiguration" hal_version="3.4"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available dynamic depth dataspace stream |
| configurations that this camera device supports |
| (i.e. format, width, height, output/input stream). |
| </description> |
| <details> |
| These are output stream configurations for use with |
| dataSpace DYNAMIC_DEPTH. The configurations are |
| listed as `(format, width, height, input?)` tuples. |
| |
| Only devices that support depth output for at least |
| the HAL_PIXEL_FORMAT_Y16 dense depth map along with |
| HAL_PIXEL_FORMAT_BLOB with the same size or size with |
| the same aspect ratio can have dynamic depth dataspace |
| stream configuration. android.depth.depthIsExclusive also |
| needs to be set to FALSE. |
| </details> |
| <hal_details> |
| Do not set this property directly. |
| It is populated by camera framework and must not be set |
| at the HAL layer. |
| </hal_details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="availableDynamicDepthMinFrameDurations" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hal_version="3.4"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the minimum frame duration for each |
| format/size combination for dynamic depth output streams. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| This should correspond to the frame duration when only that |
| stream is active, with all processing (typically in android.*.mode) |
| set to either OFF or FAST. |
| |
| When multiple streams are used in a request, the minimum frame |
| duration will be max(individual stream min durations). |
| |
| The minimum frame duration of a stream (of a particular format, size) |
| is the same regardless of whether the stream is input or output. |
| </details> |
| <hal_details> |
| Do not set this property directly. |
| It is populated by camera framework and must not be set |
| at the HAL layer. |
| </hal_details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="availableDynamicDepthStallDurations" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hal_version="3.4"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the maximum stall duration for each |
| output format/size combination for dynamic depth streams. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| A stall duration is how much extra time would get added |
| to the normal minimum frame duration for a repeating request |
| that has streams with non-zero stall. |
| |
| All dynamic depth output streams may have a nonzero stall |
| duration. |
| </details> |
| <hal_details> |
| Do not set this property directly. |
| It is populated by camera framework and must not be set |
| at the HAL layer. |
| </hal_details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="availableDepthStreamConfigurationsMaximumResolution" type="int32" |
| visibility="ndk_public" enum="true" container="array" typedef="streamConfiguration" |
| hal_version="3.6"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available depth dataspace stream |
| configurations that this camera device supports |
| (i.e. format, width, height, output/input stream) when a CaptureRequest is submitted with |
| android.sensor.pixelMode set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <details> |
| Analogous to android.depth.availableDepthStreamConfigurations, for configurations which |
| are applicable when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="availableDepthMinFrameDurationsMaximumResolution" type="int64" |
| visibility="ndk_public" container="array" typedef="streamConfigurationDuration" |
| hal_version="3.6"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the minimum frame duration for each |
| format/size combination for depth output formats when a CaptureRequest is submitted with |
| android.sensor.pixelMode set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| Analogous to android.depth.availableDepthMinFrameDurations, for configurations which |
| are applicable when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| |
| See android.sensor.frameDuration and |
| android.scaler.availableStallDurationsMaximumResolution for more details about |
| calculating the max frame rate. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="availableDepthStallDurationsMaximumResolution" type="int64" |
| visibility="ndk_public" container="array" typedef="streamConfigurationDuration" |
| hal_version="3.6"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the maximum stall duration for each |
| output format/size combination for depth streams for CaptureRequests where |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| Analogous to android.depth.availableDepthStallDurations, for configurations which |
| are applicable when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="availableDynamicDepthStreamConfigurationsMaximumResolution" type="int32" |
| visibility="ndk_public" enum="true" container="array" typedef="streamConfiguration" |
| hal_version="3.6"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available dynamic depth dataspace stream |
| configurations that this camera device supports (i.e. format, width, height, |
| output/input stream) for CaptureRequests where android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <details> |
| Analogous to android.depth.availableDynamicDepthStreamConfigurations, for configurations |
| which are applicable when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <hal_details> |
| Do not set this property directly. |
| It is populated by camera framework and must not be set |
| at the HAL layer. |
| </hal_details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="availableDynamicDepthMinFrameDurationsMaximumResolution" type="int64" |
| visibility="ndk_public" container="array" typedef="streamConfigurationDuration" |
| hal_version="3.6"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the minimum frame duration for each |
| format/size combination for dynamic depth output streams for CaptureRequests where |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| Analogous to android.depth.availableDynamicDepthMinFrameDurations, for configurations |
| which are applicable when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <hal_details> |
| Do not set this property directly. |
| It is populated by camera framework and must not be set |
| at the HAL layer. |
| </hal_details> |
| <tag id="DEPTH" /> |
| </entry> |
| <entry name="availableDynamicDepthStallDurationsMaximumResolution" type="int64" |
| visibility="ndk_public" container="array" typedef="streamConfigurationDuration" |
| hal_version="3.6"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the maximum stall duration for each |
| output format/size combination for dynamic depth streams for CaptureRequests where |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| Analogous to android.depth.availableDynamicDepthStallDurations, for configurations |
| which are applicable when android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <hal_details> |
| Do not set this property directly. |
| It is populated by camera framework and must not be set |
| at the HAL layer. |
| </hal_details> |
| <tag id="DEPTH" /> |
| </entry> |
| </static> |
| </section> |
| <section name="logicalMultiCamera"> |
| <static> |
| <entry name="physicalIds" type="byte" visibility="ndk_public" |
| container="array" hwlevel="limited" hal_version="3.3"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>String containing the ids of the underlying physical cameras. |
| </description> |
| <units>UTF-8 null-terminated string</units> |
| <details> |
| For a logical camera, this is concatenation of all underlying physical camera IDs. |
| The null terminator for physical camera ID must be preserved so that the whole string |
| can be tokenized using '\0' to generate list of physical camera IDs. |
| |
| For example, if the physical camera IDs of the logical camera are "2" and "3", the |
| value of this tag will be ['2', '\0', '3', '\0']. |
| |
| The number of physical camera IDs must be no less than 2. |
| </details> |
| <hal_details> |
| Each physical camera id should uniquely identify a camera lens in the system. |
| So if each camera lens only backs one logical camera, all camera IDs in the system, |
| physical IDs or non-physical IDs, should be unique. |
| |
| In rare cases, one camera lens backs two different logical cameras, the |
| physicalIds of both logical cameras should contain a physical camera ID |
| identifying that same camera lens. For example, if the mobile device has 3 rear facing |
| cameras and no front facing cameras, and the 3 rear facing lenses may be modelled as |
| 2 logical cameras: |
| |
| - "device@1.0/internal/10": physicalIds: "camera0", "camera42" |
| - "device@1.0/internal/11": physicalIds: "camera1", "camera42" |
| |
| In this case, the two logical cameras are conflicting devices because they are backed |
| by a common lens. |
| |
| Physical camera IDs can be an arbitrary string not containing '\0'. |
| </hal_details> |
| <tag id="LOGICALCAMERA" /> |
| </entry> |
| <entry name="sensorSyncType" type="byte" visibility="public" |
| enum="true" hwlevel="limited" hal_version="3.3"> |
| <enum> |
| <value>APPROXIMATE |
| <notes> |
| A software mechanism is used to synchronize between the physical cameras. As a result, |
| the timestamp of an image from a physical stream is only an approximation of the |
| image sensor start-of-exposure time. |
| </notes> |
| </value> |
| <value>CALIBRATED |
| <notes> |
| The camera device supports frame timestamp synchronization at the hardware level, |
| and the timestamp of a physical stream image accurately reflects its |
| start-of-exposure time. |
| </notes> |
| </value> |
| </enum> |
| <description>The accuracy of frame timestamp synchronization between physical cameras</description> |
| <details> |
| The accuracy of the frame timestamp synchronization determines the physical cameras' |
| ability to start exposure at the same time. If the sensorSyncType is CALIBRATED, the |
| physical camera sensors usually run in leader/follower mode where one sensor generates a |
| timing signal for the other, so that their shutter time is synchronized. For APPROXIMATE |
| sensorSyncType, the camera sensors usually run in leader/leader mode, where both sensors |
| use their own timing generator, and there could be offset between their start of exposure. |
| |
| In both cases, all images generated for a particular capture request still carry the same |
| timestamps, so that they can be used to look up the matching frame number and |
| onCaptureStarted callback. |
| |
| This tag is only applicable if the logical camera device supports concurrent physical |
| streams from different physical cameras. |
| </details> |
| <tag id="LOGICALCAMERA" /> |
| </entry> |
| </static> |
| <dynamic> |
| <entry name="activePhysicalId" type="byte" visibility="public" |
| typedef="string" hal_version="3.4"> |
| <description>String containing the ID of the underlying active physical camera. |
| </description> |
| <units>UTF-8 null-terminated string</units> |
| <details> |
| The ID of the active physical camera that's backing the logical camera. All camera |
| streams and metadata that are not physical camera specific will be originating from this |
| physical camera. |
| |
| For a logical camera made up of physical cameras where each camera's lenses have |
| different characteristics, the camera device may choose to switch between the physical |
| cameras when application changes FOCAL_LENGTH or SCALER_CROP_REGION. |
| At the time of lens switch, this result metadata reflects the new active physical camera |
| ID. |
| |
| This key will be available if the camera device advertises this key via {@link |
| android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys|ACAMERA_REQUEST_AVAILABLE_RESULT_KEYS}. |
| When available, this must be one of valid physical IDs backing this logical multi-camera. |
| If this key is not available for a logical multi-camera, the camera device implementation |
| may still switch between different active physical cameras based on use case, but the |
| current active physical camera information won't be available to the application. |
| </details> |
| <hal_details> |
| Staring from HIDL ICameraDevice version 3.5, the tag must be available in the capture |
| result metadata to indicate current active physical camera ID. |
| </hal_details> |
| <tag id="LOGICALCAMERA" /> |
| </entry> |
| <entry name="activePhysicalSensorCropRegion" type="int32" visibility="public" |
| container="array" typedef="rectangle" hal_version="3.10"> |
| <array> |
| <size>4</size> |
| </array> |
| <description>The current region of the active physical sensor that will be read out for this |
| capture.</description> |
| <units>Pixel coordinates relative to |
| android.sensor.info.activeArraySize or |
| android.sensor.info.preCorrectionActiveArraySize of the currently |
| android.logicalMultiCamera.activePhysicalId depending on distortion correction capability |
| and mode</units> |
| <details> |
| This capture result matches with android.scaler.cropRegion on non-logical single |
| camera sensor devices. In case of logical cameras that can switch between several |
| physical devices in response to android.control.zoomRatio, this capture result will |
| not behave like android.scaler.cropRegion and android.control.zoomRatio, where the |
| combination of both reflects the effective zoom and crop of the logical camera output. |
| Instead, this capture result value will describe the zoom and crop of the active physical |
| device. Some examples of when the value of this capture result will change include |
| switches between different physical lenses, switches between regular and maximum |
| resolution pixel mode and going through the device digital or optical range. |
| This capture result is similar to android.scaler.cropRegion with respect to distortion |
| correction. When the distortion correction mode is OFF, the coordinate system follows |
| android.sensor.info.preCorrectionActiveArraySize, with (0, 0) being the top-left pixel |
| of the pre-correction active array. When the distortion correction mode is not OFF, |
| the coordinate system follows android.sensor.info.activeArraySize, with (0, 0) being |
| the top-left pixel of the active array. |
| |
| For camera devices with the |
| {@link android.hardware.camera2.CameraMetadata#REQUEST_AVAILABLE_CAPABILITIES_ULTRA_HIGH_RESOLUTION_SENSOR} |
| capability or devices where {@link CameraCharacteristics#getAvailableCaptureRequestKeys} |
| lists android.sensor.pixelMode, |
| the current active physical device |
| android.sensor.info.activeArraySizeMaximumResolution / |
| android.sensor.info.preCorrectionActiveArraySizeMaximumResolution must be used as the |
| coordinate system for requests where android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </details> |
| <ndk_details> |
| The data representation is int[4], which maps to (left, top, width, height). |
| </ndk_details> |
| <hal_details> |
| The output streams must maintain square pixels at all |
| times, no matter what the relative aspect ratios of the |
| crop region and the stream are. Negative values for |
| corner are allowed for raw output if full pixel array is |
| larger than active pixel array. Width and height may be |
| rounded to nearest larger supportable width, especially |
| for raw output, where only a few fixed scales may be |
| possible. |
| </hal_details> |
| <tag id="LOGICALCAMERA" /> |
| </entry> |
| </dynamic> |
| </section> |
| <section name="distortionCorrection"> |
| <controls> |
| <entry name="mode" type="byte" visibility="public" enum="true" hal_version="3.3"> |
| <enum> |
| <value>OFF |
| <notes>No distortion correction is applied.</notes></value> |
| <value>FAST <notes>Lens distortion correction is applied without reducing frame rate |
| relative to sensor output. It may be the same as OFF if distortion correction would |
| reduce frame rate relative to sensor.</notes></value> |
| <value>HIGH_QUALITY <notes>High-quality distortion correction is applied, at the cost of |
| possibly reduced frame rate relative to sensor output.</notes></value> |
| </enum> |
| <description>Mode of operation for the lens distortion correction block.</description> |
| <range>android.distortionCorrection.availableModes</range> |
| <details>The lens distortion correction block attempts to improve image quality by fixing |
| radial, tangential, or other geometric aberrations in the camera device's optics. If |
| available, the android.lens.distortion field documents the lens's distortion parameters. |
| |
| OFF means no distortion correction is done. |
| |
| FAST/HIGH_QUALITY both mean camera device determined distortion correction will be |
| applied. HIGH_QUALITY mode indicates that the camera device will use the highest-quality |
| correction algorithms, even if it slows down capture rate. FAST means the camera device |
| will not slow down capture rate when applying correction. FAST may be the same as OFF if |
| any correction at all would slow down capture rate. Every output stream will have a |
| similar amount of enhancement applied. |
| |
| The correction only applies to processed outputs such as YUV, Y8, JPEG, or DEPTH16; it is |
| not applied to any RAW output. |
| |
| This control will be on by default on devices that support this control. Applications |
| disabling distortion correction need to pay extra attention with the coordinate system of |
| metering regions, crop region, and face rectangles. When distortion correction is OFF, |
| metadata coordinates follow the coordinate system of |
| android.sensor.info.preCorrectionActiveArraySize. When distortion is not OFF, metadata |
| coordinates follow the coordinate system of android.sensor.info.activeArraySize. The |
| camera device will map these metadata fields to match the corrected image produced by the |
| camera device, for both capture requests and results. However, this mapping is not very |
| precise, since rectangles do not generally map to rectangles when corrected. Only linear |
| scaling between the active array and precorrection active array coordinates is |
| performed. Applications that require precise correction of metadata need to undo that |
| linear scaling, and apply a more complete correction that takes into the account the app's |
| own requirements. |
| |
| The full list of metadata that is affected in this way by distortion correction is: |
| |
| * android.control.afRegions |
| * android.control.aeRegions |
| * android.control.awbRegions |
| * android.scaler.cropRegion |
| * android.statistics.faces |
| </details> |
| </entry> |
| </controls> |
| <static> |
| <entry name="availableModes" type="byte" visibility="public" |
| type_notes="list of enums" container="array" typedef="enumList" hal_version="3.3"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of distortion correction modes for android.distortionCorrection.mode that are |
| supported by this camera device. |
| </description> |
| <range>Any value listed in android.distortionCorrection.mode</range> |
| <details> |
| No device is required to support this API; such devices will always list only 'OFF'. |
| All devices that support this API will list both FAST and HIGH_QUALITY. |
| </details> |
| <hal_details> |
| HAL must support both FAST and HIGH_QUALITY if distortion correction is available |
| on the camera device, but the underlying implementation can be the same for both modes. |
| That is, if the highest quality implementation on the camera device does not slow down |
| capture rate, then FAST and HIGH_QUALITY will generate the same output. |
| </hal_details> |
| <tag id="V1" /> |
| <tag id="REPROC" /> |
| </entry> |
| </static> |
| <dynamic> |
| <clone entry="android.distortionCorrection.mode" kind="controls" hal_version="3.3"> |
| </clone> |
| </dynamic> |
| </section> |
| <section name="heic"> |
| <static> |
| <namespace name="info"> |
| <entry name="supported" type="byte" visibility="system" enum="true" |
| typedef="boolean" hwlevel="limited" hal_version="3.4"> |
| <enum> |
| <value>FALSE</value> |
| <value>TRUE</value> |
| </enum> |
| <description>Whether this camera device can support identical set of stream combinations |
| involving HEIC image format, compared to the |
| [table of combinations](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#legacy-level-guaranteed-configurations) |
| involving JPEG image format required for the device's hardware |
| level and capabilities. |
| </description> |
| <details> |
| All the static, control and dynamic metadata tags related to JPEG apply to HEIC formats |
| as well. For example, the same android.jpeg.orientation and android.jpeg.quality are |
| used to control the orientation and quality of the HEIC image. Configuring JPEG and |
| HEIC streams at the same time is not supported. |
| |
| If a camera device supports HEIC format (ISO/IEC 23008-12), not only does it |
| support the existing mandatory stream |
| [combinations](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#legacy-level-guaranteed-configurations) |
| required for the device's hardware level and capabilities, it also |
| supports swapping each JPEG stream with HEIC stream in all guaranteed combinations. |
| |
| For every HEIC stream configured by the application, the camera framework sets up 2 |
| internal streams with camera HAL: |
| |
| * A YUV_420_888 or IMPLEMENTATION_DEFINED HAL stream as input to HEIC or HEVC encoder. |
| * A BLOB stream with JPEG_APPS_SEGMENTS dataspace to extract application markers |
| including EXIF and thumbnail to be saved in HEIF container. |
| |
| A camera device can output HEIC format to the application if and only if: |
| |
| * The system contains a HEIC or HEVC encoder with constant quality mode, and |
| * This tag is set to TRUE, meaning that camera HAL supports replacing JPEG streams in |
| all mandatory stream combinations with a [YUV_420_888/IMPLEMENTATION_DEFINED stream + |
| JPEG_APPS_SEGMENT BLOB stream] combo. |
| |
| As an example, if the camera device's hardware level is LIMITED, and it supports HEIC, |
| in addition to the required stream combinations, HAL must support below stream |
| combinations as well: |
| |
| * IMPLEMENTATION_DEFINED/YUV_420_888 MAXIMUM + JPEG_SEGMENTS_BLOB, |
| * PRIV PREVIEW + IMPLEMENTATION_DEFINED/YUV_420_888 MAXIMUM + JPEG_SEGMENTS_BLOB, |
| * YUV PREVIEW + IMPLEMENTATION_DEFINED/YUV_420_888 MAXIMUM + JPEG_SEGMENTS_BLOB, |
| * PRIV PREVIEW + YUV PREVIEW + IMPLEMENTATION_DEFINED/YUV_420_888 MAXIMUM + |
| JPEG_SEGMENTS_BLOB |
| |
| The selection logic between YUV_420_888 and IMPLEMENTATION_DEFINED for HAL internal |
| stream is as follows: |
| |
| if (HEIC encoder exists and supports the size) { |
| use IMPLEMENTATION_DEFINED with GRALLOC_USAGE_HW_IMAGE_ENCODER usage flag; |
| } else { |
| // HVC encoder exists |
| if (size is less than framework predefined tile size) { |
| use IMPLEMENTATINO_DEFINED with GRALLOC_USAGE_HW_VIDEO_ENCODER usage flag; |
| } else { |
| use YUV_420_888; |
| } |
| } |
| </details> |
| <tag id="HEIC" /> |
| </entry> |
| <entry name="maxJpegAppSegmentsCount" type="byte" visibility="system" |
| hwlevel="limited" hal_version="3.4"> |
| <description>The maximum number of Jpeg APP segments supported by the camera HAL device. |
| </description> |
| <details> |
| The camera framework will use this value to derive the size of the BLOB buffer with |
| JPEG_APP_SEGMENTS dataspace, with each APP segment occupying at most 64K bytes. If the |
| value of this tag is n, the size of the framework allocated buffer will be: |
| |
| n * (2 + 0xFFFF) + sizeof(struct CameraBlob) |
| |
| where 2 is number of bytes for APP marker, 0xFFFF is the maximum size per APP segment |
| (including segment size). |
| |
| The value of this tag must be at least 1, and APP1 marker (0xFFE1) segment must be the |
| first segment stored in the JPEG_APPS_SEGMENTS BLOB buffer. APP1 segment stores EXIF and |
| thumbnail. |
| |
| Since media encoder embeds the orientation in the metadata of the output image, to be |
| consistent between main image and thumbnail, camera HAL must not rotate the thumbnail |
| image data based on android.jpeg.orientation. The framework will write the orientation |
| into EXIF and HEIC container. |
| |
| APP1 segment is followed immediately by one or multiple APP2 segments, and APPn |
| segments. After the HAL fills and returns the JPEG_APP_SEGMENTS buffer, the camera |
| framework modifies the APP1 segment by filling in the EXIF tags that are related to |
| main image bitstream and the tags that can be derived from capture result metadata, |
| before saving them into the HEIC container. |
| |
| The value of this tag must not be more than 16. |
| </details> |
| <tag id="HEIC" /> |
| </entry> |
| </namespace> |
| |
| <entry name="availableHeicStreamConfigurations" type="int32" visibility="ndk_public" |
| enum="true" container="array" typedef="streamConfiguration" |
| hwlevel="limited" hal_version="3.4"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available HEIC (ISO/IEC 23008-12) stream |
| configurations that this camera device supports |
| (i.e. format, width, height, output/input stream). |
| </description> |
| <details> |
| The configurations are listed as `(format, width, height, input?)` tuples. |
| |
| If the camera device supports HEIC image format, it will support identical set of stream |
| combinations involving HEIC image format, compared to the combinations involving JPEG |
| image format as required by the device's hardware level and capabilities. |
| |
| All the static, control, and dynamic metadata tags related to JPEG apply to HEIC formats. |
| Configuring JPEG and HEIC streams at the same time is not supported. |
| </details> |
| <ndk_details> |
| All the configuration tuples `(format, width, height, input?)` will contain |
| AIMAGE_FORMAT_HEIC format as OUTPUT only. |
| </ndk_details> |
| <hal_details> |
| These are output stream configurations for use with dataSpace HAL_DATASPACE_HEIF. |
| |
| Do not set this property directly. It is populated by camera framework and must not be |
| set by the HAL layer. |
| </hal_details> |
| <tag id="HEIC" /> |
| </entry> |
| <entry name="availableHeicMinFrameDurations" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hwlevel="limited" |
| hal_version="3.4"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the minimum frame duration for each |
| format/size combination for HEIC output formats. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| This should correspond to the frame duration when only that |
| stream is active, with all processing (typically in android.*.mode) |
| set to either OFF or FAST. |
| |
| When multiple streams are used in a request, the minimum frame |
| duration will be max(individual stream min durations). |
| |
| See android.sensor.frameDuration and |
| android.scaler.availableStallDurations for more details about |
| calculating the max frame rate. |
| </details> |
| <hal_details> |
| Do not set this property directly. It is populated by camera framework and must not be |
| set by the HAL layer. |
| </hal_details> |
| <tag id="HEIC" /> |
| </entry> |
| <entry name="availableHeicStallDurations" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hwlevel="limited" |
| hal_version="3.4"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the maximum stall duration for each |
| output format/size combination for HEIC streams. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| A stall duration is how much extra time would get added |
| to the normal minimum frame duration for a repeating request |
| that has streams with non-zero stall. |
| |
| This functions similarly to |
| android.scaler.availableStallDurations for HEIC |
| streams. |
| |
| All HEIC output stream formats may have a nonzero stall |
| duration. |
| </details> |
| <hal_details> |
| Do not set this property directly. It is populated by camera framework and must not be |
| set by the HAL layer. |
| </hal_details> |
| <tag id="HEIC" /> |
| </entry> |
| <entry name="availableHeicStreamConfigurationsMaximumResolution" type="int32" |
| visibility="ndk_public" enum="true" container="array" typedef="streamConfiguration" |
| hal_version="3.6"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available HEIC (ISO/IEC 23008-12) stream |
| configurations that this camera device supports |
| (i.e. format, width, height, output/input stream). |
| </description> |
| <details> |
| Refer to android.heic.availableHeicStreamConfigurations for details. |
| </details> |
| <ndk_details> |
| All the configuration tuples `(format, width, height, input?)` will contain |
| AIMAGE_FORMAT_HEIC format as OUTPUT only. |
| </ndk_details> |
| <hal_details> |
| These are output stream configurations for use with dataSpace HAL_DATASPACE_HEIF. |
| |
| Do not set this property directly. It is populated by camera framework and must not be |
| set by the HAL layer. |
| </hal_details> |
| <tag id="HEIC" /> |
| </entry> |
| <entry name="availableHeicMinFrameDurationsMaximumResolution" type="int64" |
| visibility="ndk_public" container="array" typedef="streamConfigurationDuration" |
| hal_version="3.6"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the minimum frame duration for each |
| format/size combination for HEIC output formats for CaptureRequests where |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| Refer to android.heic.availableHeicMinFrameDurations for details. |
| </details> |
| <hal_details> |
| Do not set this property directly. It is populated by camera framework and must not be |
| set by the HAL layer. |
| </hal_details> |
| <tag id="HEIC" /> |
| </entry> |
| <entry name="availableHeicStallDurationsMaximumResolution" type="int64" |
| visibility="ndk_public" container="array" typedef="streamConfigurationDuration" |
| hal_version="3.6"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the maximum stall duration for each |
| output format/size combination for HEIC streams for CaptureRequests where |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| Refer to android.heic.availableHeicStallDurations for details. |
| </details> |
| <hal_details> |
| Do not set this property directly. It is populated by camera framework and must not be |
| set by the HAL layer. |
| </hal_details> |
| <tag id="HEIC" /> |
| </entry> |
| <entry name="availableHeicUltraHdrStreamConfigurations" type="int32" visibility="ndk_public" |
| enum="true" container="array" typedef="streamConfiguration" hwlevel="limited" |
| aconfig_flag="camera_heif_gainmap" hal_version="3.11"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available HEIC (ISO/IEC 23008-12/24) UltraHDR stream |
| configurations that this camera device supports |
| (i.e. format, width, height, output/input stream). |
| </description> |
| <details> |
| The configurations are listed as `(format, width, height, input?)` tuples. |
| |
| All the static, control, and dynamic metadata tags related to JPEG apply to HEIC formats. |
| Configuring JPEG and HEIC streams at the same time is not supported. |
| </details> |
| <ndk_details> |
| All the configuration tuples `(format, width, height, input?)` will contain |
| AIMAGE_FORMAT_HEIC format as OUTPUT only. |
| </ndk_details> |
| <hal_details> |
| These are output stream configurations for use with dataSpace DATASPACE_HEIF_ULTRAHDR. |
| </hal_details> |
| <tag id="HEIC" /> |
| </entry> |
| <entry name="availableHeicUltraHdrMinFrameDurations" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hwlevel="limited" |
| aconfig_flag="camera_heif_gainmap" hal_version="3.11"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the minimum frame duration for each |
| format/size combination for HEIC UltraHDR output formats. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| This should correspond to the frame duration when only that |
| stream is active, with all processing (typically in android.*.mode) |
| set to either OFF or FAST. |
| |
| When multiple streams are used in a request, the minimum frame |
| duration will be max(individual stream min durations). |
| |
| See android.sensor.frameDuration and |
| android.scaler.availableStallDurations for more details about |
| calculating the max frame rate. |
| </details> |
| <tag id="HEIC" /> |
| </entry> |
| <entry name="availableHeicUltraHdrStallDurations" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hwlevel="limited" |
| aconfig_flag="camera_heif_gainmap" hal_version="3.11"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the maximum stall duration for each |
| output format/size combination for HEIC UltraHDR streams. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| A stall duration is how much extra time would get added |
| to the normal minimum frame duration for a repeating request |
| that has streams with non-zero stall. |
| |
| This functions similarly to |
| android.scaler.availableStallDurations for HEIC UltraHDR |
| streams. |
| |
| All HEIC output stream formats may have a nonzero stall |
| duration. |
| </details> |
| <tag id="HEIC" /> |
| </entry> |
| <entry name="availableHeicUltraHdrStreamConfigurationsMaximumResolution" type="int32" |
| visibility="ndk_public" enum="true" container="array" typedef="streamConfiguration" |
| aconfig_flag="camera_heif_gainmap" hal_version="3.11"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available HEIC (ISO/IEC 23008-12/24) UltraHDR stream |
| configurations that this camera device supports |
| (i.e. format, width, height, output/input stream) for CaptureRequests where |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <details> |
| Refer to android.heic.availableHeicStreamConfigurations for details. |
| </details> |
| <ndk_details> |
| All the configuration tuples `(format, width, height, input?)` will contain |
| AIMAGE_FORMAT_HEIC format as OUTPUT only. |
| </ndk_details> |
| <hal_details> |
| These are output stream configurations for use with dataSpace DATASPACE_HEIF_ULTRAHDR. |
| </hal_details> |
| <tag id="HEIC" /> |
| </entry> |
| <entry name="availableHeicUltraHdrMinFrameDurationsMaximumResolution" type="int64" |
| visibility="ndk_public" container="array" typedef="streamConfigurationDuration" |
| aconfig_flag="camera_heif_gainmap" hal_version="3.11"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the minimum frame duration for each |
| format/size combination for HEIC UltraHDR output formats for CaptureRequests where |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| Refer to android.heic.availableHeicMinFrameDurations for details. |
| </details> |
| <tag id="HEIC" /> |
| </entry> |
| <entry name="availableHeicUltraHdrStallDurationsMaximumResolution" type="int64" |
| visibility="ndk_public" container="array" typedef="streamConfigurationDuration" |
| aconfig_flag="camera_heif_gainmap" hal_version="3.11"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the maximum stall duration for each |
| output format/size combination for HEIC UltraHDR streams for CaptureRequests where |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| Refer to android.heic.availableHeicStallDurations for details. |
| </details> |
| <tag id="HEIC" /> |
| </entry> |
| </static> |
| </section> |
| <section name="automotive"> |
| <static> |
| <namespace name="lens"> |
| <entry name="facing" type="byte" visibility="public" enum="true" container="array" |
| hal_version="3.8"> |
| <array> |
| <size>n</size> |
| </array> |
| <enum> |
| <value>EXTERIOR_OTHER |
| <notes> |
| The camera device faces the outside of the vehicle body frame but not exactly |
| one of the exterior sides defined by this enum. Applications should determine |
| the exact facing direction from android.lens.poseRotation and |
| android.lens.poseTranslation. |
| </notes> |
| </value> |
| <value>EXTERIOR_FRONT |
| <notes> |
| The camera device faces the front of the vehicle body frame. |
| </notes> |
| </value> |
| <value>EXTERIOR_REAR |
| <notes> |
| The camera device faces the rear of the vehicle body frame. |
| </notes> |
| </value> |
| <value>EXTERIOR_LEFT |
| <notes> |
| The camera device faces the left side of the vehicle body frame. |
| </notes> |
| </value> |
| <value>EXTERIOR_RIGHT |
| <notes> |
| The camera device faces the right side of the vehicle body frame. |
| </notes> |
| </value> |
| <value>INTERIOR_OTHER |
| <notes> |
| The camera device faces the inside of the vehicle body frame but not exactly |
| one of seats described by this enum. Applications should determine the exact |
| facing direction from android.lens.poseRotation and android.lens.poseTranslation. |
| </notes> |
| </value> |
| <value>INTERIOR_SEAT_ROW_1_LEFT |
| <notes> |
| The camera device faces the left side seat of the first row. |
| </notes> |
| </value> |
| <value>INTERIOR_SEAT_ROW_1_CENTER |
| <notes> |
| The camera device faces the center seat of the first row. |
| </notes> |
| </value> |
| <value>INTERIOR_SEAT_ROW_1_RIGHT |
| <notes> |
| The camera device faces the right seat of the first row. |
| </notes> |
| </value> |
| <value>INTERIOR_SEAT_ROW_2_LEFT |
| <notes> |
| The camera device faces the left side seat of the second row. |
| </notes> |
| </value> |
| <value>INTERIOR_SEAT_ROW_2_CENTER |
| <notes> |
| The camera device faces the center seat of the second row. |
| </notes> |
| </value> |
| <value>INTERIOR_SEAT_ROW_2_RIGHT |
| <notes> |
| The camera device faces the right side seat of the second row. |
| </notes> |
| </value> |
| <value>INTERIOR_SEAT_ROW_3_LEFT |
| <notes> |
| The camera device faces the left side seat of the third row. |
| </notes> |
| </value> |
| <value>INTERIOR_SEAT_ROW_3_CENTER |
| <notes> |
| The camera device faces the center seat of the third row. |
| </notes> |
| </value> |
| <value>INTERIOR_SEAT_ROW_3_RIGHT |
| <notes> |
| The camera device faces the right seat of the third row. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| The direction of the camera faces relative to the vehicle body frame and the |
| passenger seats. |
| </description> |
| <details> |
| This enum defines the lens facing characteristic of the cameras on the automotive |
| devices with locations android.automotive.location defines. If the system has |
| FEATURE_AUTOMOTIVE, the camera will have this entry in its static metadata. |
| |
| When android.automotive.location is INTERIOR, this has one or more INTERIOR_\* |
| values or a single EXTERIOR_\* value. When this has more than one INTERIOR_\*, |
| the first value must be the one for the seat closest to the optical axis. If this |
| contains INTERIOR_OTHER, all other values will be ineffective. |
| |
| When android.automotive.location is EXTERIOR_\* or EXTRA, this has a single |
| EXTERIOR_\* value. |
| |
| If a camera has INTERIOR_OTHER or EXTERIOR_OTHER, or more than one camera is at the |
| same location and facing the same direction, their static metadata will list the |
| following entries, so that applications can determine their lenses' exact facing |
| directions: |
| |
| * android.lens.poseReference |
| * android.lens.poseRotation |
| * android.lens.poseTranslation |
| </details> |
| </entry> |
| </namespace> |
| <entry name="location" type="byte" visibility="public" enum="true" hal_version="3.8"> |
| <enum> |
| <value>INTERIOR |
| <notes> |
| The camera device exists inside of the vehicle cabin. |
| </notes> |
| </value> |
| <value>EXTERIOR_OTHER |
| <notes> |
| The camera exists outside of the vehicle body frame but not exactly on one of the |
| exterior locations this enum defines. The applications should determine the exact |
| location from android.lens.poseTranslation. |
| </notes> |
| </value> |
| <value>EXTERIOR_FRONT |
| <notes> |
| The camera device exists outside of the vehicle body frame and on its front side. |
| </notes> |
| </value> |
| <value>EXTERIOR_REAR |
| <notes> |
| The camera device exists outside of the vehicle body frame and on its rear side. |
| </notes> |
| </value> |
| <value>EXTERIOR_LEFT |
| <notes> |
| The camera device exists outside and on left side of the vehicle body frame. |
| </notes> |
| </value> |
| <value>EXTERIOR_RIGHT |
| <notes> |
| The camera device exists outside and on right side of the vehicle body frame. |
| </notes> |
| </value> |
| <value>EXTRA_OTHER |
| <notes> |
| The camera device exists on an extra vehicle, such as the trailer, but not exactly |
| on one of front, rear, left, or right side. Applications should determine the exact |
| location from android.lens.poseTranslation. |
| </notes> |
| </value> |
| <value>EXTRA_FRONT |
| <notes> |
| The camera device exists outside of the extra vehicle's body frame and on its front |
| side. |
| </notes> |
| </value> |
| <value>EXTRA_REAR |
| <notes> |
| The camera device exists outside of the extra vehicle's body frame and on its rear |
| side. |
| </notes> |
| </value> |
| <value>EXTRA_LEFT |
| <notes> |
| The camera device exists outside and on left side of the extra vehicle body. |
| </notes> |
| </value> |
| <value>EXTRA_RIGHT |
| <notes> |
| The camera device exists outside and on right side of the extra vehicle body. |
| </notes> |
| </value> |
| </enum> |
| <description> |
| Location of the cameras on the automotive devices. |
| </description> |
| <details> |
| This enum defines the locations of the cameras relative to the vehicle body frame on |
| [the automotive sensor coordinate system](https://source.android.com/devices/sensors/sensor-types#auto_axes). |
| If the system has FEATURE_AUTOMOTIVE, the camera will have this entry in its static |
| metadata. |
| |
| * INTERIOR is the inside of the vehicle body frame (or the passenger cabin). |
| * EXTERIOR is the outside of the vehicle body frame. |
| * EXTRA is the extra vehicle such as a trailer. |
| |
| Each side of the vehicle body frame on this coordinate system is defined as below: |
| |
| * FRONT is where the Y-axis increases toward. |
| * REAR is where the Y-axis decreases toward. |
| * LEFT is where the X-axis decreases toward. |
| * RIGHT is where the X-axis increases toward. |
| |
| If the camera has either EXTERIOR_OTHER or EXTRA_OTHER, its static metadata will list |
| the following entries, so that applications can determine the camera's exact location: |
| |
| * android.lens.poseReference |
| * android.lens.poseRotation |
| * android.lens.poseTranslation |
| </details> |
| </entry> |
| </static> |
| </section> |
| <section name="extension"> |
| <controls> |
| <entry name="strength" type="int32" visibility="fwk_java_public" hal_version="3.9"> |
| <description>Strength of the extension post-processing effect |
| </description> |
| <range>0 - 100</range> |
| <details> |
| This control allows Camera extension clients to configure the strength of the applied |
| extension effect. Strength equal to 0 means that the extension must not apply any |
| post-processing and return a regular captured frame. Strength equal to 100 is the |
| maximum level of post-processing. Values between 0 and 100 will have different effect |
| depending on the extension type as described below: |
| |
| * {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_BOKEH BOKEH} - |
| the strength is expected to control the amount of blur. |
| * {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_HDR HDR} and |
| {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_NIGHT NIGHT} - |
| the strength can control the amount of images fused and the brightness of the final image. |
| * {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_FACE_RETOUCH FACE_RETOUCH} - |
| the strength value will control the amount of cosmetic enhancement and skin |
| smoothing. |
| |
| The control will be supported if the capture request key is part of the list generated by |
| {@link android.hardware.camera2.CameraExtensionCharacteristics#getAvailableCaptureRequestKeys}. |
| The control is only defined and available to clients sending capture requests via |
| {@link android.hardware.camera2.CameraExtensionSession}. |
| If the client doesn't specify the extension strength value, then a default value will |
| be set by the extension. Clients can retrieve the default value by checking the |
| corresponding capture result. |
| </details> |
| </entry> |
| </controls> |
| <dynamic> |
| <entry name="currentType" type="int32" visibility="fwk_java_public" hal_version="3.9"> |
| <description>Contains the extension type of the currently active extension |
| </description> |
| <range>Extension type value listed in |
| {@link android.hardware.camera2.CameraExtensionCharacteristics}</range> |
| <details> |
| The capture result will only be supported and included by camera extension |
| {@link android.hardware.camera2.CameraExtensionSession sessions}. |
| In case the extension session was configured to use |
| {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_AUTOMATIC AUTO}, |
| then the extension type value will indicate the currently active extension like |
| {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_HDR HDR}, |
| {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_NIGHT NIGHT} etc. |
| , and will never return |
| {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_AUTOMATIC AUTO}. |
| In case the extension session was configured to use an extension different from |
| {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_AUTOMATIC AUTO}, |
| then the result type will always match with the configured extension type. |
| </details> |
| </entry> |
| <clone entry="android.extension.strength" kind="controls"> |
| </clone> |
| <entry name="nightModeIndicator" type="int32" visibility="public" optional="true" |
| enum="true" aconfig_flag="night_mode_indicator" hal_version="3.11"> |
| <enum> |
| <value>UNKNOWN |
| <notes> |
| The camera can't accurately assess the scene's lighting to determine if a Night Mode |
| Camera Extension capture would improve the photo. This can happen when the current |
| camera configuration doesn't support night mode indicator detection, such as when |
| the auto exposure mode is ON_AUTO_FLASH, ON_ALWAYS_FLASH, ON_AUTO_FLASH_REDEYE, or |
| ON_EXTERNAL_FLASH. |
| </notes> |
| </value> |
| <value>OFF |
| <notes> |
| The camera has detected lighting conditions that are sufficiently bright. Night |
| Mode Camera Extensions is available but may not be able to optimize the camera |
| settings to take a higher quality photo. |
| </notes> |
| </value> |
| <value>ON |
| <notes> |
| The camera has detected low-light conditions. It is recommended to use Night Mode |
| Camera Extension to optimize the camera settings to take a high-quality photo in |
| the dark. |
| </notes> |
| </value> |
| </enum> |
| <description>Indicates when to activate Night Mode Camera Extension for high-quality |
| still captures in low-light conditions.</description> |
| <details> |
| Provides awareness to the application when the current scene can benefit from using a |
| Night Mode Camera Extension to take a high-quality photo. |
| |
| Support for this capture result can be queried via |
| {@link android.hardware.camera2.CameraCharacteristics#getAvailableCaptureResultKeys}. |
| |
| If the device supports this capability then it will also support |
| {@link android.hardware.camera2.CameraExtensionCharacteristics#EXTENSION_NIGHT NIGHT} |
| and will be available in both |
| {@link android.hardware.camera2.CameraCaptureSession sessions} and |
| {@link android.hardware.camera2.CameraExtensionSession sessions}. |
| |
| The value will be {@code UNKNOWN} in the following auto exposure modes: ON_AUTO_FLASH, |
| ON_ALWAYS_FLASH, ON_AUTO_FLASH_REDEYE, or ON_EXTERNAL_FLASH. |
| </details> |
| </entry> |
| </dynamic> |
| </section> |
| <section name="jpegr"> |
| <static> |
| <entry name="availableJpegRStreamConfigurations" type="int32" visibility="ndk_public" |
| enum="true" container="array" typedef="streamConfiguration" |
| hwlevel="limited" hal_version="3.9"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available Jpeg/R stream |
| configurations that this camera device supports |
| (i.e. format, width, height, output/input stream). |
| </description> |
| <details> |
| The configurations are listed as `(format, width, height, input?)` tuples. |
| |
| If the camera device supports Jpeg/R, it will support the same stream combinations with |
| Jpeg/R as it does with P010. The stream combinations with Jpeg/R (or P010) supported |
| by the device is determined by the device's hardware level and capabilities. |
| |
| All the static, control, and dynamic metadata tags related to JPEG apply to Jpeg/R formats. |
| Configuring JPEG and Jpeg/R streams at the same time is not supported. |
| </details> |
| <ndk_details> |
| All the configuration tuples `(format, width, height, input?)` will contain |
| AIMAGE_FORMAT_JPEGR format as OUTPUT only. |
| </ndk_details> |
| </entry> |
| <entry name="availableJpegRMinFrameDurations" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hwlevel="limited" |
| hal_version="3.9"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the minimum frame duration for each |
| format/size combination for Jpeg/R output formats. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| This should correspond to the frame duration when only that |
| stream is active, with all processing (typically in android.*.mode) |
| set to either OFF or FAST. |
| |
| When multiple streams are used in a request, the minimum frame |
| duration will be max(individual stream min durations). |
| |
| See android.sensor.frameDuration and |
| android.scaler.availableStallDurations for more details about |
| calculating the max frame rate. |
| </details> |
| </entry> |
| <entry name="availableJpegRStallDurations" type="int64" visibility="ndk_public" |
| container="array" typedef="streamConfigurationDuration" hwlevel="limited" |
| hal_version="3.9"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the maximum stall duration for each |
| output format/size combination for Jpeg/R streams. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| A stall duration is how much extra time would get added |
| to the normal minimum frame duration for a repeating request |
| that has streams with non-zero stall. |
| |
| This functions similarly to |
| android.scaler.availableStallDurations for Jpeg/R |
| streams. |
| |
| All Jpeg/R output stream formats may have a nonzero stall |
| duration. |
| </details> |
| </entry> |
| <entry name="availableJpegRStreamConfigurationsMaximumResolution" type="int32" |
| visibility="ndk_public" enum="true" container="array" typedef="streamConfiguration" |
| hal_version="3.9"> |
| <array> |
| <size>n</size> |
| <size>4</size> |
| </array> |
| <enum> |
| <value>OUTPUT</value> |
| <value>INPUT</value> |
| </enum> |
| <description>The available Jpeg/R stream |
| configurations that this camera device supports |
| (i.e. format, width, height, output/input stream). |
| </description> |
| <details> |
| Refer to android.jpegr.availableJpegRStreamConfigurations for details. |
| </details> |
| <ndk_details> |
| All the configuration tuples `(format, width, height, input?)` will contain |
| AIMAGE_FORMAT_JPEG_R format as OUTPUT only. |
| </ndk_details> |
| </entry> |
| <entry name="availableJpegRMinFrameDurationsMaximumResolution" type="int64" |
| visibility="ndk_public" container="array" typedef="streamConfigurationDuration" |
| hal_version="3.9"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the minimum frame duration for each |
| format/size combination for Jpeg/R output formats for CaptureRequests where |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| Refer to android.jpegr.availableJpegRMinFrameDurations for details. |
| </details> |
| </entry> |
| <entry name="availableJpegRStallDurationsMaximumResolution" type="int64" |
| visibility="ndk_public" container="array" typedef="streamConfigurationDuration" |
| hal_version="3.9"> |
| <array> |
| <size>4</size> |
| <size>n</size> |
| </array> |
| <description>This lists the maximum stall duration for each |
| output format/size combination for Jpeg/R streams for CaptureRequests where |
| android.sensor.pixelMode is set to |
| {@link android.hardware.camera2.CameraMetadata#SENSOR_PIXEL_MODE_MAXIMUM_RESOLUTION}. |
| </description> |
| <units>(format, width, height, ns) x n</units> |
| <details> |
| Refer to android.jpegr.availableJpegRStallDurations for details. |
| </details> |
| </entry> |
| </static> |
| </section> |
| <section name="sharedSession"> |
| <static> |
| <entry name="colorSpace" type="byte" visibility="fwk_only" optional="true" |
| enum="true" aconfig_flag="camera_multi_client" hal_version="3.11"> |
| <enum> |
| <value id="-1">UNSPECIFIED</value> |
| <value id="0">SRGB</value> |
| <value id="7">DISPLAY_P3</value> |
| <value id="16">BT2020_HLG</value> |
| </enum> |
| <description>Color space used for shared session configuration for all the output targets |
| when camera is opened in shared mode. This should be one of the values specified in |
| availableColorSpaceProfilesMap. |
| </description> |
| <hal_details> |
| Do not set this property directly. Android camera framework will generate this tag if the |
| camera device can be opened in shared mode. |
| </hal_details> |
| </entry> |
| <entry name="outputConfigurations" type="int64" visibility="fwk_only" |
| optional="true" container="array" aconfig_flag="camera_multi_client" |
| hal_version="3.11"> |
| <array> |
| <size>n</size> |
| </array> |
| <description>List of shared output configurations that this camera device supports when |
| camera is opened in shared mode. Array contains following entries for each supported |
| shared configuration: |
| 1) surface type |
| 2) width |
| 3) height |
| 4) format |
| 5) mirrorMode |
| 6) useReadoutTimestamp |
| 7) timestampBase |
| 8) dataspace |
| 9) usage |
| 10) streamUsecase |
| 11) physical camera id len |
| 12) physical camera id as UTF-8 null terminated string. |
| </description> |
| <hal_details> |
| Do not set this property directly. Android camera framework will generate this tag if the |
| camera device can be opened in shared mode. |
| </hal_details> |
| </entry> |
| <entry name="configuration" type="byte" visibility="fwk_system_public" |
| synthetic="true" optional="true" typedef="sharedSessionConfiguration" |
| aconfig_flag="camera_multi_client"> |
| <description>The available stream configurations that this camera device supports for |
| shared capture session when camera is opened in shared mode. Android camera framework |
| will generate this tag if the camera device can be opened in shared mode. |
| </description> |
| <hal_details> |
| Do not set this property directly (it is synthetic and will not be available at the |
| HAL layer); |
| </hal_details> |
| </entry> |
| </static> |
| </section> |
| <section name="desktopEffects"> |
| <static> |
| <entry name="capabilities" type="byte" visibility="system" optional="true" |
| enum="true" type_notes="list of enums" container="array" typedef="enumList" |
| aconfig_flag="desktop_effects"> |
| <array> |
| <size>n</size> |
| </array> |
| <enum> |
| <value>BACKGROUND_BLUR |
| <notes> |
| Background blur can be activated via android.desktopEffects.backgroundBlurMode |
| </notes> |
| </value> |
| <value>FACE_RETOUCH |
| <notes> |
| Face retouch can be activated via android.desktopEffects.faceRetouchMode |
| </notes> |
| </value> |
| <value>PORTRAIT_RELIGHT |
| <notes> |
| Portrait relight can be activated via android.desktopEffects.portraitRelightMode |
| </notes> |
| </value> |
| </enum> |
| <description> |
| List of special effects supported by the camera device. |
| </description> |
| <details> |
| Available features supported by the camera device for large screen video conferencing. |
| </details> |
| </entry> |
| <entry name="backgroundBlurModes" type="byte" visibility="system" optional="true" |
| type_notes="list of enums (android.desktopEffects.backgroundBlurMode)" container="array" |
| typedef="enumList" aconfig_flag="desktop_effects"> |
| <array> |
| <size>n</size> |
| </array> |
| <description> |
| List of background blur modes supported by the camera device. The key will only exist |
| if BACKGROUND_BLUR is listed by android.desktopEffects.capabilities. |
| </description> |
| <range>Any value listed in android.desktopEffects.backgroundBlurMode</range> |
| <details> |
| Lists the valid modes for android.desktopEffects.backgroundBlurMode. |
| </details> |
| </entry> |
| </static> |
| <controls> |
| <entry name="backgroundBlurMode" type="byte" visibility="system" |
| optional="true" enum="true" aconfig_flag="desktop_effects"> |
| <enum> |
| <value>OFF |
| <notes> |
| Don't use background blur |
| </notes> |
| </value> |
| <value>LIGHT |
| <notes> |
| Blur the background with light blur strength |
| </notes> |
| </value> |
| <value>FULL |
| <notes> |
| Blur the background with full blur strength |
| </notes> |
| </value> |
| </enum> |
| <description>Control how the background should be blurred. Supported modes are listed in |
| android.desktopEffects.backgroundBlurModes by the camera device.</description> |
| </entry> |
| <entry name="faceRetouchMode" type="byte" visibility="system" |
| optional="true" enum="true" typedef="boolean" aconfig_flag="desktop_effects"> |
| <enum> |
| <value>OFF |
| <notes> |
| Turn off face retouch |
| </notes> |
| </value> |
| <value>ON |
| <notes> |
| Turn on face retouch. A strength can be set by android.desktopEffects.faceRetouchStrength |
| </notes> |
| </value> |
| </enum> |
| <description>Whether to enable face retouch effect.</description> |
| </entry> |
| <entry name="faceRetouchStrength" type="byte" visibility="system" |
| optional="true" aconfig_flag="desktop_effects"> |
| <description>Control the strength of face retouch applied to the frames. If |
| android.desktopEffects.faceRetouchMode in ON without a faceRetouchStrength, |
| a default will be set by the camera device.</description> |
| <units>1-100; 100 is maximum strength.</units> |
| </entry> |
| <entry name="portraitRelightMode" type="byte" visibility="system" |
| optional="true" enum="true" typedef="boolean" aconfig_flag="desktop_effects"> |
| <enum> |
| <value>OFF |
| <notes> |
| Do not use portrait relight |
| </notes> |
| </value> |
| <value>ON |
| <notes> |
| Use portrait relight |
| </notes> |
| </value> |
| </enum> |
| <description>Whether to enable portrait relighting effect.</description> |
| </entry> |
| </controls> |
| </section> |
| </namespace> |
| </metadata> |