Camera2: Update all CTS tests for depth-only devices
It is possible for a device to only support depth outputs; such
devices need to skip most tests that use YUV or JPEG outputs, or the
tests need to be updated to select DEPTH16 sizes instead of YUV.
A few tests are updated to verify other properties of depth-only
devices.
Bug: 20537722
Change-Id: I7b0237e9247107cd4387014bf77ea7c43f2c7491
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/AllocationTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/AllocationTest.java
index d4fb235..229185d 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/AllocationTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/AllocationTest.java
@@ -728,7 +728,12 @@
mCameraIds[i]));
continue;
}
-
+ if (!staticInfo.isColorOutputSupported()) {
+ Log.i(TAG, String.format(
+ "Skipping this test for camera %s, does not support regular outputs",
+ mCameraIds[i]));
+ continue;
+ }
// Open camera and execute test
Log.i(TAG, "Testing Camera " + mCameraIds[i]);
try {
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/BurstCaptureTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/BurstCaptureTest.java
index 5ac9d09..d8fae6d 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/BurstCaptureTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/BurstCaptureTest.java
@@ -50,6 +50,9 @@
Log.i(TAG, "Testing YUV Burst for camera " + id);
openDevice(id);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ }
if (!mStaticInfo.isAeLockSupported() || !mStaticInfo.isAwbLockSupported()) {
Log.i(TAG, "AE/AWB lock is not supported in camera " + id +
". Skip the test");
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java
index d71acd6..44fda14 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraDeviceTest.java
@@ -34,6 +34,7 @@
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.cts.helpers.StaticMetadata;
import android.hardware.camera2.cts.testcases.Camera2AndroidTestCase;
import android.hardware.camera2.params.MeteringRectangle;
import android.media.ImageReader;
@@ -282,6 +283,11 @@
sTemplates[j] == CameraDevice.TEMPLATE_VIDEO_SNAPSHOT) {
continue;
}
+ // Skip non-PREVIEW templates for non-color output
+ if (!mStaticInfo.isColorOutputSupported() &&
+ sTemplates[j] != CameraDevice.TEMPLATE_PREVIEW) {
+ continue;
+ }
CaptureRequest.Builder capReq = mCamera.createCaptureRequest(sTemplates[j]);
assertNotNull("Failed to create capture request", capReq);
if (mStaticInfo.areKeysAvailable(CaptureRequest.SENSOR_EXPOSURE_TIME)) {
@@ -405,9 +411,6 @@
*/
public void testChainedOperation() throws Throwable {
- // Set up single dummy target
- createDefaultImageReader(DEFAULT_CAPTURE_SIZE, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
- /*listener*/ null);
final ArrayList<Surface> outputs = new ArrayList<>();
outputs.add(mReaderSurface);
@@ -546,6 +549,12 @@
for (int i = 0; i < mCameraIds.length; i++) {
Throwable result;
+ if (!(new StaticMetadata(mCameraManager.getCameraCharacteristics(mCameraIds[i]))).
+ isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] + " does not support color outputs, skipping");
+ continue;
+ }
+
// Start chained cascade
ChainedCameraListener cameraListener = new ChainedCameraListener();
mCameraManager.openCamera(mCameraIds[i], cameraListener, mHandler);
@@ -594,6 +603,11 @@
try {
openDevice(mCameraIds[i], mCameraMockListener);
waitForDeviceState(STATE_OPENED, CAMERA_OPEN_TIMEOUT_MS);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
prepareTestByCamera();
}
@@ -611,6 +625,11 @@
try {
openDevice(mCameraIds[i], mCameraMockListener);
waitForDeviceState(STATE_OPENED, CAMERA_OPEN_TIMEOUT_MS);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
testCreateSessionsByCamera(mCameraIds[i]);
}
@@ -982,6 +1001,11 @@
sTemplates[j] == CameraDevice.TEMPLATE_VIDEO_SNAPSHOT) {
continue;
}
+ // Skip non-PREVIEW templates for non-color output
+ if (!mStaticInfo.isColorOutputSupported() &&
+ sTemplates[j] != CameraDevice.TEMPLATE_PREVIEW) {
+ continue;
+ }
captureSingleShot(mCameraIds[i], sTemplates[j], repeating, abort);
}
}
@@ -989,13 +1013,16 @@
// Test: burst of one shot
captureBurstShot(mCameraIds[i], sTemplates, 1, repeating, abort);
+ int template = mStaticInfo.isColorOutputSupported() ?
+ CameraDevice.TEMPLATE_STILL_CAPTURE :
+ CameraDevice.TEMPLATE_PREVIEW;
int[] templates = new int[] {
- CameraDevice.TEMPLATE_STILL_CAPTURE,
- CameraDevice.TEMPLATE_STILL_CAPTURE,
- CameraDevice.TEMPLATE_STILL_CAPTURE,
- CameraDevice.TEMPLATE_STILL_CAPTURE,
- CameraDevice.TEMPLATE_STILL_CAPTURE
- };
+ template,
+ template,
+ template,
+ template,
+ template
+ };
// Test: burst of 5 shots of the same template type
captureBurstShot(mCameraIds[i], templates, templates.length, repeating, abort);
@@ -1075,6 +1102,11 @@
templates[i] == CameraDevice.TEMPLATE_VIDEO_SNAPSHOT) {
continue;
}
+ // Skip non-PREVIEW templates for non-color outpu
+ if (!mStaticInfo.isColorOutputSupported() &&
+ templates[i] != CameraDevice.TEMPLATE_PREVIEW) {
+ continue;
+ }
CaptureRequest.Builder requestBuilder = mCamera.createCaptureRequest(templates[i]);
assertNotNull("Failed to create capture request", requestBuilder);
requestBuilder.addTarget(mReaderSurface);
@@ -1135,6 +1167,11 @@
mSessionMockListener = spy(new BlockingSessionCallback());
mSessionWaiter = mSessionMockListener.getStateWaiter();
+ if (!mStaticInfo.isColorOutputSupported()) {
+ createDefaultImageReader(getMaxDepthSize(mCamera.getId(), mCameraManager),
+ ImageFormat.DEPTH16, MAX_NUM_IMAGES, new ImageDropperListener());
+ }
+
List<Surface> outputSurfaces = new ArrayList<>(Arrays.asList(mReaderSurface));
mCamera.createCaptureSession(outputSurfaces, mSessionMockListener, mHandler);
@@ -1229,9 +1266,9 @@
private void checkAfMode(CaptureRequest.Builder request, int template,
CameraCharacteristics props) {
- boolean hasFocuser = !props.getKeys().contains(CameraCharacteristics.
- LENS_INFO_MINIMUM_FOCUS_DISTANCE) ||
- props.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) > 0f;
+ boolean hasFocuser = props.getKeys().contains(CameraCharacteristics.
+ LENS_INFO_MINIMUM_FOCUS_DISTANCE) &&
+ (props.get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE) > 0f);
if (!hasFocuser) {
return;
@@ -1273,6 +1310,8 @@
return;
}
+ if (!mStaticInfo.isColorOutputSupported()) return;
+
List<Integer> availableAntiBandingModes =
Arrays.asList(toObject(mStaticInfo.getAeAvailableAntiBandingModesChecked()));
@@ -1324,12 +1363,16 @@
}
// 3A settings--AE/AWB/AF.
- int maxRegionsAe = props.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
- int maxRegionsAwb = props.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB);
- int maxRegionsAf = props.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
+ Integer maxRegionsAeVal = props.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
+ int maxRegionsAe = maxRegionsAeVal != null ? maxRegionsAeVal : 0;
+ Integer maxRegionsAwbVal = props.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB);
+ int maxRegionsAwb = maxRegionsAwbVal != null ? maxRegionsAwbVal : 0;
+ Integer maxRegionsAfVal = props.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
+ int maxRegionsAf = maxRegionsAfVal != null ? maxRegionsAfVal : 0;
+
+ checkFpsRange(request, template, props);
checkAfMode(request, template, props);
- checkFpsRange(request, template, props);
checkAntiBandingMode(request, template);
if (template == CameraDevice.TEMPLATE_MANUAL) {
@@ -1339,45 +1382,47 @@
mCollector.expectKeyValueEquals(request, CONTROL_AWB_MODE,
CaptureRequest.CONTROL_AWB_MODE_OFF);
} else {
- mCollector.expectKeyValueEquals(request, CONTROL_AE_MODE,
- CaptureRequest.CONTROL_AE_MODE_ON);
- mCollector.expectKeyValueEquals(request, CONTROL_AE_EXPOSURE_COMPENSATION, 0);
- mCollector.expectKeyValueEquals(request, CONTROL_AE_PRECAPTURE_TRIGGER,
- CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
- // if AE lock is not supported, expect the control key to be non-exist or false
- if (mStaticInfo.isAeLockSupported() || request.get(CONTROL_AE_LOCK) != null) {
- mCollector.expectKeyValueEquals(request, CONTROL_AE_LOCK, false);
- }
+ if (mStaticInfo.isColorOutputSupported()) {
+ mCollector.expectKeyValueEquals(request, CONTROL_AE_MODE,
+ CaptureRequest.CONTROL_AE_MODE_ON);
+ mCollector.expectKeyValueEquals(request, CONTROL_AE_EXPOSURE_COMPENSATION, 0);
+ mCollector.expectKeyValueEquals(request, CONTROL_AE_PRECAPTURE_TRIGGER,
+ CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_IDLE);
+ // if AE lock is not supported, expect the control key to be non-exist or false
+ if (mStaticInfo.isAeLockSupported() || request.get(CONTROL_AE_LOCK) != null) {
+ mCollector.expectKeyValueEquals(request, CONTROL_AE_LOCK, false);
+ }
- mCollector.expectKeyValueEquals(request, CONTROL_AF_TRIGGER,
- CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
+ mCollector.expectKeyValueEquals(request, CONTROL_AF_TRIGGER,
+ CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
- mCollector.expectKeyValueEquals(request, CONTROL_AWB_MODE,
- CaptureRequest.CONTROL_AWB_MODE_AUTO);
- // if AWB lock is not supported, expect the control key to be non-exist or false
- if (mStaticInfo.isAwbLockSupported() || request.get(CONTROL_AWB_LOCK) != null) {
- mCollector.expectKeyValueEquals(request, CONTROL_AWB_LOCK, false);
- }
+ mCollector.expectKeyValueEquals(request, CONTROL_AWB_MODE,
+ CaptureRequest.CONTROL_AWB_MODE_AUTO);
+ // if AWB lock is not supported, expect the control key to be non-exist or false
+ if (mStaticInfo.isAwbLockSupported() || request.get(CONTROL_AWB_LOCK) != null) {
+ mCollector.expectKeyValueEquals(request, CONTROL_AWB_LOCK, false);
+ }
- // Check 3A regions.
- if (VERBOSE) {
- Log.v(TAG, String.format("maxRegions is: {AE: %s, AWB: %s, AF: %s}",
- maxRegionsAe, maxRegionsAwb, maxRegionsAf));
- }
- if (maxRegionsAe > 0) {
- mCollector.expectKeyValueNotNull(request, CONTROL_AE_REGIONS);
- MeteringRectangle[] aeRegions = request.get(CONTROL_AE_REGIONS);
- checkMeteringRect(aeRegions);
- }
- if (maxRegionsAwb > 0) {
- mCollector.expectKeyValueNotNull(request, CONTROL_AWB_REGIONS);
- MeteringRectangle[] awbRegions = request.get(CONTROL_AWB_REGIONS);
- checkMeteringRect(awbRegions);
- }
- if (maxRegionsAf > 0) {
- mCollector.expectKeyValueNotNull(request, CONTROL_AF_REGIONS);
- MeteringRectangle[] afRegions = request.get(CONTROL_AF_REGIONS);
- checkMeteringRect(afRegions);
+ // Check 3A regions.
+ if (VERBOSE) {
+ Log.v(TAG, String.format("maxRegions is: {AE: %s, AWB: %s, AF: %s}",
+ maxRegionsAe, maxRegionsAwb, maxRegionsAf));
+ }
+ if (maxRegionsAe > 0) {
+ mCollector.expectKeyValueNotNull(request, CONTROL_AE_REGIONS);
+ MeteringRectangle[] aeRegions = request.get(CONTROL_AE_REGIONS);
+ checkMeteringRect(aeRegions);
+ }
+ if (maxRegionsAwb > 0) {
+ mCollector.expectKeyValueNotNull(request, CONTROL_AWB_REGIONS);
+ MeteringRectangle[] awbRegions = request.get(CONTROL_AWB_REGIONS);
+ checkMeteringRect(awbRegions);
+ }
+ if (maxRegionsAf > 0) {
+ mCollector.expectKeyValueNotNull(request, CONTROL_AF_REGIONS);
+ MeteringRectangle[] afRegions = request.get(CONTROL_AF_REGIONS);
+ checkMeteringRect(afRegions);
+ }
}
}
@@ -1446,10 +1491,12 @@
}
// ISP-processing settings.
- mCollector.expectKeyValueEquals(
- request, STATISTICS_FACE_DETECT_MODE,
- CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF);
- mCollector.expectKeyValueEquals(request, FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
+ if (mStaticInfo.isColorOutputSupported()) {
+ mCollector.expectKeyValueEquals(
+ request, STATISTICS_FACE_DETECT_MODE,
+ CaptureRequest.STATISTICS_FACE_DETECT_MODE_OFF);
+ mCollector.expectKeyValueEquals(request, FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
+ }
List<Integer> availableCaps = mStaticInfo.getAvailableCapabilitiesChecked();
if (mStaticInfo.areKeysAvailable(STATISTICS_LENS_SHADING_MAP_MODE)) {
@@ -1608,6 +1655,10 @@
} else if (sLegacySkipTemplates.contains(template) &&
mStaticInfo.isHardwareLevelLegacy()) {
// OK
+ } else if (template != CameraDevice.TEMPLATE_PREVIEW &&
+ mStaticInfo.isDepthOutputSupported() &&
+ !mStaticInfo.isColorOutputSupported()) {
+ // OK, depth-only devices need only support PREVIEW template
} else {
throw e; // rethrow
}
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java
index a823512..c5eb27b 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CameraTestUtils.java
@@ -817,9 +817,10 @@
ByteBuffer buffer = null;
// JPEG doesn't have pixelstride and rowstride, treat it as 1D buffer.
- if (format == ImageFormat.JPEG) {
+ // Same goes for DEPTH_POINT_CLOUD
+ if (format == ImageFormat.JPEG || format == ImageFormat.DEPTH_POINT_CLOUD) {
buffer = planes[0].getBuffer();
- assertNotNull("Fail to get jpeg ByteBuffer", buffer);
+ assertNotNull("Fail to get jpeg or depth ByteBuffer", buffer);
data = new byte[buffer.remaining()];
buffer.get(data);
buffer.rewind();
@@ -897,7 +898,9 @@
break;
case ImageFormat.JPEG:
case ImageFormat.RAW_SENSOR:
- assertEquals("Jpeg Image should have one plane", 1, planes.length);
+ case ImageFormat.DEPTH16:
+ case ImageFormat.DEPTH_POINT_CLOUD:
+ assertEquals("JPEG/RAW/depth Images should have one plane", 1, planes.length);
break;
default:
fail("Unsupported Image Format: " + format);
@@ -1164,6 +1167,16 @@
}
/**
+ * Get max depth size for a camera device.
+ */
+ static public Size getMaxDepthSize(String cameraId, CameraManager cameraManager)
+ throws CameraAccessException {
+ List<Size> sizes = getSortedSizesForFormat(cameraId, cameraManager, ImageFormat.DEPTH16,
+ /*bound*/ null);
+ return sizes.get(0);
+ }
+
+ /**
* Get the largest size by area.
*
* @param sizes an array of sizes, must have at least 1 element
@@ -1172,7 +1185,7 @@
*
* @throws IllegalArgumentException if sizes was null or had 0 elements
*/
- public static Size getMaxSize(Size[] sizes) {
+ public static Size getMaxSize(Size... sizes) {
if (sizes == null || sizes.length == 0) {
throw new IllegalArgumentException("sizes was empty");
}
@@ -1267,10 +1280,6 @@
/**
* Validate image based on format and size.
- * <p>
- * Only RAW_SENSOR, YUV420_888 and JPEG formats are supported. Calling this
- * method with other formats will cause a UnsupportedOperationException.
- * </p>
*
* @param image The image to be validated.
* @param width The image width.
@@ -1278,8 +1287,7 @@
* @param format The image format.
* @param filePath The debug dump file path, null if don't want to dump to
* file.
- * @throws UnsupportedOperationException if calling with format other than
- * RAW_SENSOR, YUV420_888 or JPEG.
+ * @throws UnsupportedOperationException if calling with an unknown format
*/
public static void validateImage(Image image, int width, int height, int format,
String filePath) {
@@ -1306,6 +1314,12 @@
case ImageFormat.RAW_SENSOR:
validateRaw16Data(data, width, height, format, image.getTimestamp(), filePath);
break;
+ case ImageFormat.DEPTH16:
+ validateDepth16Data(data, width, height, format, image.getTimestamp(), filePath);
+ break;
+ case ImageFormat.DEPTH_POINT_CLOUD:
+ validateDepthPointCloudData(data, width, height, format, image.getTimestamp(), filePath);
+ break;
default:
throw new UnsupportedOperationException("Unsupported format for validation: "
+ format);
@@ -1389,7 +1403,7 @@
long ts, String filePath) {
if (VERBOSE) Log.v(TAG, "Validating raw data");
int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
- assertEquals("Yuv data doesn't match", expectedSize, rawData.length);
+ assertEquals("Raw data doesn't match", expectedSize, rawData.length);
// TODO: Can add data validation for test pattern.
@@ -1402,6 +1416,41 @@
return;
}
+ private static void validateDepth16Data(byte[] depthData, int width, int height, int format,
+ long ts, String filePath) {
+
+ if (VERBOSE) Log.v(TAG, "Validating depth16 data");
+ int expectedSize = width * height * ImageFormat.getBitsPerPixel(format) / 8;
+ assertEquals("Depth data doesn't match", expectedSize, depthData.length);
+
+
+ if (DEBUG && filePath != null) {
+ String fileName =
+ filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth16";
+ dumpFile(fileName, depthData);
+ }
+
+ return;
+
+ }
+
+ private static void validateDepthPointCloudData(byte[] depthData, int width, int height, int format,
+ long ts, String filePath) {
+
+ if (VERBOSE) Log.v(TAG, "Validating depth point cloud data");
+
+ // Can't validate size since it is variable
+
+ if (DEBUG && filePath != null) {
+ String fileName =
+ filePath + "/" + width + "x" + height + "_" + ts / 1e6 + ".depth_point_cloud";
+ dumpFile(fileName, depthData);
+ }
+
+ return;
+
+ }
+
public static <T> T getValueNotNull(CaptureResult result, CaptureResult.Key<T> key) {
if (result == null) {
throw new IllegalArgumentException("Result must not be null");
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java
index 235c1f8..3e76fbc 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureRequestTest.java
@@ -268,7 +268,7 @@
// Without manual sensor control, exposure time cannot be verified
if (!mStaticInfo.isCapabilitySupported(
CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
- return;
+ continue;
}
int[] modes = mStaticInfo.getAeAvailableAntiBandingModesChecked();
@@ -300,6 +300,11 @@
for (int i = 0; i < mCameraIds.length; i++) {
try {
openDevice(mCameraIds[i]);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
Size maxPreviewSz = mOrderedPreviewSizes.get(0); // Max preview size.
@@ -327,6 +332,11 @@
for (int i = 0; i < mCameraIds.length; i++) {
try {
openDevice(mCameraIds[i]);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
SimpleCaptureCallback listener = new SimpleCaptureCallback();
CaptureRequest.Builder requestBuilder =
@@ -364,7 +374,11 @@
for (int i = 0; i < mCameraIds.length; i++) {
try {
openDevice(mCameraIds[i]);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
faceDetectionTestByCamera();
} finally {
closeDevice();
@@ -479,7 +493,10 @@
for (String id : mCameraIds) {
try {
openDevice(id);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
awbModeAndLockTestByCamera();
} finally {
closeDevice();
@@ -494,7 +511,10 @@
for (String id : mCameraIds) {
try {
openDevice(id);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
afModeTestByCamera();
} finally {
closeDevice();
@@ -517,7 +537,10 @@
Log.i(TAG, "Camera " + id + " doesn't support any stabilization modes");
continue;
}
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
stabilizationTestByCamera();
} finally {
closeDevice();
@@ -533,7 +556,10 @@
for (String id : mCameraIds) {
try {
openDevice(id);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
Size maxPreviewSize = mOrderedPreviewSizes.get(0);
digitalZoomTestByCamera(maxPreviewSize);
} finally {
@@ -550,7 +576,10 @@
for (String id : mCameraIds) {
try {
openDevice(id);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
digitalZoomPreviewCombinationTestByCamera();
} finally {
closeDevice();
@@ -581,7 +610,10 @@
for (String id : mCameraIds) {
try {
openDevice(id);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
effectModeTestByCamera();
} finally {
closeDevice();
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java
index df3f151..b752c07 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/CaptureResultTest.java
@@ -97,10 +97,16 @@
for (String id : mCameraIds) {
try {
openDevice(id);
- // Create image reader and surface.
- Size size = mOrderedPreviewSizes.get(0);
- createDefaultImageReader(size, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
- new ImageDropperListener());
+ if (mStaticInfo.isColorOutputSupported()) {
+ // Create image reader and surface.
+ Size size = mOrderedPreviewSizes.get(0);
+ createDefaultImageReader(size, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
+ new ImageDropperListener());
+ } else {
+ Size size = getMaxDepthSize(id, mCameraManager);
+ createDefaultImageReader(size, ImageFormat.DEPTH16, MAX_NUM_IMAGES,
+ new ImageDropperListener());
+ }
// Configure output streams.
List<Surface> outputSurfaces = new ArrayList<Surface>(1);
@@ -158,9 +164,15 @@
}
// Create image reader and surface.
- Size size = mOrderedPreviewSizes.get(0);
- createDefaultImageReader(size, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
- new ImageDropperListener());
+ if (mStaticInfo.isColorOutputSupported()) {
+ Size size = mOrderedPreviewSizes.get(0);
+ createDefaultImageReader(size, ImageFormat.YUV_420_888, MAX_NUM_IMAGES,
+ new ImageDropperListener());
+ } else {
+ Size size = getMaxDepthSize(id, mCameraManager);
+ createDefaultImageReader(size, ImageFormat.DEPTH16, MAX_NUM_IMAGES,
+ new ImageDropperListener());
+ }
// Configure output streams.
List<Surface> outputSurfaces = new ArrayList<Surface>(1);
@@ -263,6 +275,10 @@
SimpleImageReaderListener prevListener = new SimpleImageReaderListener();
try {
openDevice(id);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
CaptureRequest.Builder previewBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
@@ -469,6 +485,8 @@
CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT)) {
waiverKeys.add(CaptureResult.LENS_POSE_ROTATION);
waiverKeys.add(CaptureResult.LENS_POSE_TRANSLATION);
+ waiverKeys.add(CaptureResult.LENS_INTRINSIC_CALIBRATION);
+ waiverKeys.add(CaptureResult.LENS_RADIAL_DISTORTION);
}
if (mStaticInfo.getAeMaxRegionsChecked() == 0) {
@@ -539,12 +557,12 @@
waiverKeys.add(CaptureResult.LENS_FILTER_DENSITY);
}
- if (mStaticInfo.isHardwareLevelLimited()) {
+ if (mStaticInfo.isHardwareLevelLimited() && mStaticInfo.isColorOutputSupported()) {
return waiverKeys;
}
/*
- * Hardware Level = LEGACY
+ * Hardware Level = LEGACY or no regular output is supported
*/
waiverKeys.add(CaptureResult.CONTROL_AE_PRECAPTURE_TRIGGER);
waiverKeys.add(CaptureResult.CONTROL_AE_STATE);
@@ -558,6 +576,30 @@
waiverKeys.add(CaptureResult.CONTROL_AE_TARGET_FPS_RANGE);
waiverKeys.add(CaptureResult.CONTROL_AF_TRIGGER);
+ if (mStaticInfo.isHardwareLevelLegacy()) {
+ return waiverKeys;
+ }
+
+ /*
+ * Regular output not supported, only depth, waive color-output-related keys
+ */
+ waiverKeys.add(CaptureResult.CONTROL_SCENE_MODE);
+ waiverKeys.add(CaptureResult.CONTROL_EFFECT_MODE);
+ waiverKeys.add(CaptureResult.CONTROL_VIDEO_STABILIZATION_MODE);
+ waiverKeys.add(CaptureResult.SENSOR_TEST_PATTERN_MODE);
+ waiverKeys.add(CaptureResult.NOISE_REDUCTION_MODE);
+ waiverKeys.add(CaptureResult.COLOR_CORRECTION_ABERRATION_MODE);
+ waiverKeys.add(CaptureResult.CONTROL_AE_ANTIBANDING_MODE);
+ waiverKeys.add(CaptureResult.CONTROL_AE_EXPOSURE_COMPENSATION);
+ waiverKeys.add(CaptureResult.CONTROL_AE_LOCK);
+ waiverKeys.add(CaptureResult.CONTROL_AE_MODE);
+ waiverKeys.add(CaptureResult.CONTROL_AF_MODE);
+ waiverKeys.add(CaptureResult.CONTROL_AWB_MODE);
+ waiverKeys.add(CaptureResult.CONTROL_AWB_LOCK);
+ waiverKeys.add(CaptureResult.STATISTICS_FACE_DETECT_MODE);
+ waiverKeys.add(CaptureResult.FLASH_MODE);
+ waiverKeys.add(CaptureResult.SCALER_CROP_REGION);
+
return waiverKeys;
}
@@ -706,6 +748,8 @@
resultKeys.add(CaptureResult.LENS_OPTICAL_STABILIZATION_MODE);
resultKeys.add(CaptureResult.LENS_POSE_ROTATION);
resultKeys.add(CaptureResult.LENS_POSE_TRANSLATION);
+ resultKeys.add(CaptureResult.LENS_INTRINSIC_CALIBRATION);
+ resultKeys.add(CaptureResult.LENS_RADIAL_DISTORTION);
resultKeys.add(CaptureResult.LENS_FOCUS_RANGE);
resultKeys.add(CaptureResult.LENS_STATE);
resultKeys.add(CaptureResult.NOISE_REDUCTION_MODE);
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/ExtendedCameraCharacteristicsTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/ExtendedCameraCharacteristicsTest.java
index f8ee615..80cd288 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/ExtendedCameraCharacteristicsTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/ExtendedCameraCharacteristicsTest.java
@@ -146,7 +146,17 @@
mIds[counter]), config);
int[] outputFormats = config.getOutputFormats();
+ int[] actualCapabilities = c.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
+ assertNotNull("android.request.availableCapabilities must never be null",
+ actualCapabilities);
+
// Check required formats exist (JPEG, and YUV_420_888).
+ if (!arrayContains(actualCapabilities, BC)) {
+ Log.i(TAG, "Camera " + mIds[counter] +
+ ": BACKWARD_COMPATIBLE capability not supported, skipping test");
+ continue;
+ }
+
assertArrayContains(
String.format("No valid YUV_420_888 preview formats found for: ID %s",
mIds[counter]), outputFormats, ImageFormat.YUV_420_888);
@@ -291,75 +301,76 @@
* for a fact most keys are going to be illegal there so they should never be
* available.
*
- * (TODO: Codegen this)
+ * For LIMITED-level keys, if the level is >= LIMITED, then the capabilities are used to
+ * do the actual checking.
*/
{
// (Key Name) (HW Level) (Capabilities <Var-Arg>)
- expectKeyAvailable(c, CameraCharacteristics.COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AVAILABLE_MODES , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_AVAILABLE_ANTIBANDING_MODES , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_LOCK_AVAILABLE , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AVAILABLE_EFFECTS , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_AWB_LOCK_AVAILABLE , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_MAX_REGIONS_AE , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_MAX_REGIONS_AF , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.CONTROL_MAX_REGIONS_AWB , LEGACY , BC );
+ expectKeyAvailable(c, CameraCharacteristics.COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AVAILABLE_MODES , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_AVAILABLE_ANTIBANDING_MODES , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_AVAILABLE_MODES , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_COMPENSATION_RANGE , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_COMPENSATION_STEP , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AE_LOCK_AVAILABLE , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AVAILABLE_EFFECTS , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AVAILABLE_SCENE_MODES , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AWB_AVAILABLE_MODES , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_AWB_LOCK_AVAILABLE , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_MAX_REGIONS_AE , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_MAX_REGIONS_AF , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.CONTROL_MAX_REGIONS_AWB , OPT , BC );
expectKeyAvailable(c, CameraCharacteristics.EDGE_AVAILABLE_EDGE_MODES , FULL , NONE );
- expectKeyAvailable(c, CameraCharacteristics.FLASH_INFO_AVAILABLE , LEGACY , BC );
+ expectKeyAvailable(c, CameraCharacteristics.FLASH_INFO_AVAILABLE , OPT , BC );
expectKeyAvailable(c, CameraCharacteristics.HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES , OPT , RAW );
- expectKeyAvailable(c, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.LENS_FACING , LEGACY , BC );
+ expectKeyAvailable(c, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.JPEG_AVAILABLE_THUMBNAIL_SIZES , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.LENS_FACING , OPT , BC );
expectKeyAvailable(c, CameraCharacteristics.LENS_INFO_AVAILABLE_APERTURES , FULL , MANUAL_SENSOR );
expectKeyAvailable(c, CameraCharacteristics.LENS_INFO_AVAILABLE_FILTER_DENSITIES , FULL , MANUAL_SENSOR );
- expectKeyAvailable(c, CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION , LIMITED , MANUAL_SENSOR );
+ expectKeyAvailable(c, CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION , LIMITED , BC );
expectKeyAvailable(c, CameraCharacteristics.LENS_INFO_FOCUS_DISTANCE_CALIBRATION , LIMITED , MANUAL_SENSOR );
- expectKeyAvailable(c, CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE , LIMITED , MANUAL_SENSOR );
- expectKeyAvailable(c, CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE , LIMITED , NONE );
- expectKeyAvailable(c, CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES , LEGACY , BC );
+ expectKeyAvailable(c, CameraCharacteristics.LENS_INFO_HYPERFOCAL_DISTANCE , LIMITED , BC );
+ expectKeyAvailable(c, CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE , LIMITED , BC );
+ expectKeyAvailable(c, CameraCharacteristics.NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES , OPT , BC );
expectKeyAvailable(c, CameraCharacteristics.REQUEST_MAX_NUM_INPUT_STREAMS , OPT , YUV_REPROCESS, OPAQUE_REPROCESS);
expectKeyAvailable(c, CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP , OPT , CONSTRAINED_HIGH_SPEED);
- expectKeyAvailable(c, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.REQUEST_PARTIAL_RESULT_COUNT , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.SCALER_CROPPING_TYPE , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.SENSOR_AVAILABLE_TEST_PATTERN_MODES , LEGACY , NONE );
+ expectKeyAvailable(c, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_PROC_STALLING , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.REQUEST_MAX_NUM_OUTPUT_RAW , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.REQUEST_PARTIAL_RESULT_COUNT , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.REQUEST_PIPELINE_MAX_DEPTH , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.SCALER_CROPPING_TYPE , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.SENSOR_AVAILABLE_TEST_PATTERN_MODES , OPT , BC );
expectKeyAvailable(c, CameraCharacteristics.SENSOR_BLACK_LEVEL_PATTERN , FULL , MANUAL_SENSOR, RAW );
expectKeyAvailable(c, CameraCharacteristics.SENSOR_CALIBRATION_TRANSFORM1 , OPT , RAW );
expectKeyAvailable(c, CameraCharacteristics.SENSOR_COLOR_TRANSFORM1 , OPT , RAW );
expectKeyAvailable(c, CameraCharacteristics.SENSOR_FORWARD_MATRIX1 , OPT , RAW );
- expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE , LEGACY , BC, RAW );
+ expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE , OPT , BC, RAW );
expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_COLOR_FILTER_ARRANGEMENT , FULL , RAW );
expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_EXPOSURE_TIME_RANGE , FULL , MANUAL_SENSOR );
expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_MAX_FRAME_DURATION , FULL , MANUAL_SENSOR );
- expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE , LEGACY , BC );
+ expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_PIXEL_ARRAY_SIZE , OPT , BC );
expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_SENSITIVITY_RANGE , FULL , MANUAL_SENSOR );
expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_WHITE_LEVEL , OPT , RAW );
- expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE , LEGACY , BC );
+ expectKeyAvailable(c, CameraCharacteristics.SENSOR_INFO_TIMESTAMP_SOURCE , OPT , BC );
expectKeyAvailable(c, CameraCharacteristics.SENSOR_MAX_ANALOG_SENSITIVITY , FULL , MANUAL_SENSOR );
- expectKeyAvailable(c, CameraCharacteristics.SENSOR_ORIENTATION , LEGACY , BC );
+ expectKeyAvailable(c, CameraCharacteristics.SENSOR_ORIENTATION , OPT , BC );
expectKeyAvailable(c, CameraCharacteristics.SENSOR_REFERENCE_ILLUMINANT1 , OPT , RAW );
expectKeyAvailable(c, CameraCharacteristics.SHADING_AVAILABLE_MODES , LIMITED , MANUAL_POSTPROC, RAW );
- expectKeyAvailable(c, CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES , LEGACY , BC );
+ expectKeyAvailable(c, CameraCharacteristics.STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES , OPT , BC );
expectKeyAvailable(c, CameraCharacteristics.STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES , OPT , RAW );
expectKeyAvailable(c, CameraCharacteristics.STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES, LIMITED , RAW );
- expectKeyAvailable(c, CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT , LEGACY , BC );
- expectKeyAvailable(c, CameraCharacteristics.SYNC_MAX_LATENCY , LEGACY , BC );
+ expectKeyAvailable(c, CameraCharacteristics.STATISTICS_INFO_MAX_FACE_COUNT , OPT , BC );
+ expectKeyAvailable(c, CameraCharacteristics.SYNC_MAX_LATENCY , OPT , BC );
expectKeyAvailable(c, CameraCharacteristics.TONEMAP_AVAILABLE_TONE_MAP_MODES , FULL , MANUAL_POSTPROC );
expectKeyAvailable(c, CameraCharacteristics.TONEMAP_MAX_CURVE_POINTS , FULL , MANUAL_POSTPROC );
@@ -476,6 +487,14 @@
// Check if the burst capability is defined
boolean haveBurstCapability = arrayContains(actualCapabilities,
CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
+ boolean haveBC = arrayContains(actualCapabilities,
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
+
+ if(haveBurstCapability && !haveBC) {
+ fail("Must have BACKWARD_COMPATIBLE capability if BURST_CAPTURE capability is defined");
+ }
+
+ if (!haveBC) continue;
StreamConfigurationMap config =
c.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
@@ -730,31 +749,41 @@
assertNotNull(String.format("No stream configuration map found for: ID %s",
mIds[counter]), config);
- assertTrue("ImageReader must be supported",
+ int[] actualCapabilities = c.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
+ assertNotNull("android.request.availableCapabilities must never be null",
+ actualCapabilities);
+
+ if (arrayContains(actualCapabilities, BC)) {
+ assertTrue("ImageReader must be supported",
config.isOutputSupportedFor(android.media.ImageReader.class));
- assertTrue("MediaRecorder must be supported",
+ assertTrue("MediaRecorder must be supported",
config.isOutputSupportedFor(android.media.MediaRecorder.class));
- assertTrue("MediaCodec must be supported",
+ assertTrue("MediaCodec must be supported",
config.isOutputSupportedFor(android.media.MediaCodec.class));
- assertTrue("Allocation must be supported",
+ assertTrue("Allocation must be supported",
config.isOutputSupportedFor(android.renderscript.Allocation.class));
- assertTrue("SurfaceHolder must be supported",
+ assertTrue("SurfaceHolder must be supported",
config.isOutputSupportedFor(android.view.SurfaceHolder.class));
- assertTrue("SurfaceTexture must be supported",
+ assertTrue("SurfaceTexture must be supported",
config.isOutputSupportedFor(android.graphics.SurfaceTexture.class));
- assertTrue("YUV_420_888 must be supported",
+ assertTrue("YUV_420_888 must be supported",
config.isOutputSupportedFor(ImageFormat.YUV_420_888));
- assertTrue("JPEG must be supported",
+ assertTrue("JPEG must be supported",
config.isOutputSupportedFor(ImageFormat.JPEG));
+ } else {
+ assertTrue("YUV_420_88 may not be supported if BACKWARD_COMPATIBLE capability is not listed",
+ !config.isOutputSupportedFor(ImageFormat.YUV_420_888));
+ assertTrue("JPEG may not be supported if BACKWARD_COMPATIBLE capability is not listed",
+ !config.isOutputSupportedFor(ImageFormat.JPEG));
+ }
// Legacy YUV formats should not be listed
assertTrue("NV21 must not be supported",
!config.isOutputSupportedFor(ImageFormat.NV21));
- int[] actualCapabilities = c.get(CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES);
- assertNotNull("android.request.availableCapabilities must never be null",
- actualCapabilities);
+ // Check RAW
+
if (arrayContains(actualCapabilities,
CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_RAW)) {
assertTrue("RAW_SENSOR must be supported if RAW capability is advertised",
@@ -872,49 +901,50 @@
} // formats
// Cross-check opaque format and sizes
+ if (arrayContains(actualCapabilities, BC)) {
+ SurfaceTexture st = new SurfaceTexture(1);
+ Surface surf = new Surface(st);
- SurfaceTexture st = new SurfaceTexture(1);
- Surface surf = new Surface(st);
+ Size[] opaqueSizes = CameraTestUtils.getSupportedSizeForClass(SurfaceTexture.class,
+ mIds[counter], mCameraManager);
+ assertTrue("Opaque format has no sizes listed",
+ opaqueSizes.length > 0);
+ for (Size size : opaqueSizes) {
+ long stallDuration = config.getOutputStallDuration(SurfaceTexture.class, size);
+ assertTrue("Opaque output may not have a non-zero stall duration",
+ stallDuration == 0);
- Size[] opaqueSizes = CameraTestUtils.getSupportedSizeForClass(SurfaceTexture.class,
- mIds[counter], mCameraManager);
- assertTrue("Opaque format has no sizes listed",
- opaqueSizes.length > 0);
- for (Size size : opaqueSizes) {
- long stallDuration = config.getOutputStallDuration(SurfaceTexture.class, size);
- assertTrue("Opaque output may not have a non-zero stall duration",
- stallDuration == 0);
+ long minDuration = config.getOutputMinFrameDuration(SurfaceTexture.class, size);
+ if (arrayContains(actualCapabilities,
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
+ assertTrue("MANUAL_SENSOR capability, need positive min frame duration for"
+ + "opaque format",
+ minDuration > 0);
+ } else {
+ assertTrue("Need non-negative min frame duration for opaque format ",
+ minDuration >= 0);
+ }
+ st.setDefaultBufferSize(size.getWidth(), size.getHeight());
- long minDuration = config.getOutputMinFrameDuration(SurfaceTexture.class, size);
- if (arrayContains(actualCapabilities,
- CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
- assertTrue("MANUAL_SENSOR capability, need positive min frame duration for"
- + "opaque format",
- minDuration > 0);
- } else {
- assertTrue("Need non-negative min frame duration for opaque format ",
- minDuration >= 0);
- }
- st.setDefaultBufferSize(size.getWidth(), size.getHeight());
+ assertTrue(
+ String.format("isOutputSupportedFor fails for SurfaceTexture config %s",
+ size.toString()),
+ config.isOutputSupportedFor(surf));
+ } // opaque sizes
+
+ // Try invalid opaque size, should get rounded
+ Size invalidSize = findInvalidSize(opaqueSizes);
+ st.setDefaultBufferSize(invalidSize.getWidth(), invalidSize.getHeight());
assertTrue(
- String.format("isOutputSupportedFor fails for SurfaceTexture config %s",
- size.toString()),
- config.isOutputSupportedFor(surf));
+ String.format("isOutputSupportedFor fails for SurfaceTexture config %s",
+ invalidSize.toString()),
+ config.isOutputSupportedFor(surf));
- } // opaque sizes
+ counter++;
+ }
- // Try invalid opaque size, should get rounded
- Size invalidSize = findInvalidSize(opaqueSizes);
- st.setDefaultBufferSize(invalidSize.getWidth(), invalidSize.getHeight());
- assertTrue(
- String.format("isOutputSupportedFor fails for SurfaceTexture config %s",
- invalidSize.toString()),
- config.isOutputSupportedFor(surf));
-
- counter++;
} // mCharacteristics
-
}
/**
@@ -1036,7 +1066,8 @@
T value = c.get(key);
- if (compareHardwareLevel(actualHwLevel, hwLevel) >= 0) {
+ // For LIMITED-level targeted keys, rely on capability check, not level
+ if ((compareHardwareLevel(actualHwLevel, hwLevel) >= 0) && (hwLevel != LIMITED)) {
mCollector.expectTrue(
String.format("Key (%s) must be in characteristics for this hardware level " +
"(required minimal HW level %s, actual HW level %s)",
@@ -1050,18 +1081,21 @@
toStringHardwareLevel(actualHwLevel)),
allKeys.contains(key));
} else if (arrayContainsAnyOf(actualCapabilities, capabilities)) {
- mCollector.expectTrue(
+ if (!(hwLevel == LIMITED && compareHardwareLevel(actualHwLevel, hwLevel) < 0)) {
+ // Don't enforce LIMITED-starting keys on LEGACY level, even if cap is defined
+ mCollector.expectTrue(
String.format("Key (%s) must be in characteristics for these capabilities " +
"(required capabilities %s, actual capabilities %s)",
key.getName(), Arrays.toString(capabilities),
Arrays.toString(actualCapabilities)),
value != null);
- mCollector.expectTrue(
+ mCollector.expectTrue(
String.format("Key (%s) must be in characteristics list of keys for " +
"these capabilities (required capabilities %s, actual capabilities %s)",
key.getName(), Arrays.toString(capabilities),
Arrays.toString(actualCapabilities)),
allKeys.contains(key));
+ }
} else {
if (actualHwLevel == LEGACY && hwLevel != OPT) {
if (value != null || allKeys.contains(key)) {
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/ImageReaderTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/ImageReaderTest.java
index 8cba6fd..7eb4a0a 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/ImageReaderTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/ImageReaderTest.java
@@ -104,6 +104,30 @@
}
}
+ public void testDepth16() throws Exception {
+ for (String id : mCameraIds) {
+ try {
+ Log.i(TAG, "Testing Camera " + id);
+ openDevice(id);
+ bufferFormatTestByCamera(ImageFormat.DEPTH16, /*repeating*/true);
+ } finally {
+ closeDevice(id);
+ }
+ }
+ }
+
+ public void testDepthPointCloud() throws Exception {
+ for (String id : mCameraIds) {
+ try {
+ Log.i(TAG, "Testing Camera " + id);
+ openDevice(id);
+ bufferFormatTestByCamera(ImageFormat.DEPTH_POINT_CLOUD, /*repeating*/true);
+ } finally {
+ closeDevice(id);
+ }
+ }
+ }
+
public void testJpeg() throws Exception {
for (String id : mCameraIds) {
try {
@@ -185,7 +209,11 @@
try {
Log.v(TAG, "YUV and JPEG testing for camera " + id);
openDevice(id);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id +
+ " does not support color outputs, skipping");
+ continue;
+ }
bufferFormatWithYuvTestByCamera(ImageFormat.JPEG);
} finally {
closeDevice(id);
@@ -202,7 +230,11 @@
try {
Log.v(TAG, "YUV and RAW testing for camera " + id);
openDevice(id);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id +
+ " does not support color outputs, skipping");
+ continue;
+ }
bufferFormatWithYuvTestByCamera(ImageFormat.RAW_SENSOR);
} finally {
closeDevice(id);
@@ -220,6 +252,11 @@
Log.v(TAG, "Testing all YUV image resolutions for camera " + id);
openDevice(id);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
+
// Skip warmup on FULL mode devices.
int warmupCaptureNumber = (mStaticInfo.isHardwareLevelLegacy()) ?
MAX_NUM_IMAGES - 1 : 0;
@@ -594,7 +631,9 @@
}
private void invalidAccessTestAfterClose() throws Exception {
- final int FORMAT = ImageFormat.YUV_420_888;
+ final int FORMAT = mStaticInfo.isColorOutputSupported() ?
+ ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
+
Size[] availableSizes = mStaticInfo.getAvailableSizesForFormatChecked(FORMAT,
StaticMetadata.StreamDirection.Output);
Image img = null;
@@ -753,7 +792,7 @@
if (VERBOSE) Log.v(TAG, "Got the latest image");
CameraTestUtils.validateImage(img, sz.getWidth(), sz.getHeight(), format,
DEBUG_FILE_NAME_BASE);
- if (VERBOSE) Log.v(TAG, "finish vaildation of image " + numImageVerified);
+ if (VERBOSE) Log.v(TAG, "finish validation of image " + numImageVerified);
img.close();
numImageVerified++;
reTryCount = 0;
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/ImageWriterTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/ImageWriterTest.java
index be96aeb..167e637 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/ImageWriterTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/ImageWriterTest.java
@@ -93,6 +93,10 @@
try {
Log.i(TAG, "Testing Camera " + id);
openDevice(id);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
readerWriterFormatTestByCamera(ImageFormat.YUV_420_888);
} finally {
closeDevice(id);
@@ -124,6 +128,10 @@
try {
Log.i(TAG, "Testing Camera " + id);
openDevice(id);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
readerWriterFormatTestByCamera(CAMERA_PRIVATE_FORMAT);
} finally {
closeDevice(id);
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/MultiViewTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/MultiViewTest.java
index 0d0ecb4..dfba587f 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/MultiViewTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/MultiViewTest.java
@@ -47,10 +47,29 @@
public void testTextureViewPreview() throws Exception {
for (String cameraId : mCameraIds) {
- openCamera(cameraId);
- List<TextureView> views = Arrays.asList(mTextureView[0]);
- textureViewPreview(cameraId, views, /*ImageReader*/null);
- closeCamera(cameraId);
+ Exception prior = null;
+
+ try {
+ openCamera(cameraId);
+ if (!getStaticInfo(cameraId).isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + cameraId + " does not support color outputs, skipping");
+ continue;
+ }
+ List<TextureView> views = Arrays.asList(mTextureView[0]);
+ textureViewPreview(cameraId, views, /*ImageReader*/null);
+ } catch (Exception e) {
+ prior = e;
+ } finally {
+ try {
+ closeCamera(cameraId);
+ } catch (Exception e) {
+ if (prior != null) {
+ Log.e(TAG, "Prior exception received: " + prior);
+ }
+ prior = e;
+ }
+ if (prior != null) throw prior; // Rethrow last exception.
+ }
}
}
@@ -63,6 +82,10 @@
try {
openCamera(cameraId);
+ if (!getStaticInfo(cameraId).isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + cameraId + " does not support color outputs, skipping");
+ continue;
+ }
Size previewSize = getOrderedPreviewSizes(cameraId).get(0);
yuvListener =
new ImageVerifierListener(previewSize, ImageFormat.YUV_420_888);
@@ -96,6 +119,10 @@
Exception prior = null;
try {
openCamera(cameraId);
+ if (!getStaticInfo(cameraId).isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + cameraId + " does not support color outputs, skipping");
+ continue;
+ }
int maxNumStreamsProc =
getStaticInfo(cameraId).getMaxNumOutputStreamsProcessedChecked();
if (maxNumStreamsProc < 2) {
@@ -128,6 +155,10 @@
try {
openCamera(cameraId);
+ if (!getStaticInfo(cameraId).isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + cameraId + " does not support color outputs, skipping");
+ continue;
+ }
Size previewSize = getOrderedPreviewSizes(cameraId).get(0);
yuvListener =
new ImageVerifierListener(previewSize, ImageFormat.YUV_420_888);
@@ -165,6 +196,11 @@
try {
for (int i = 0; i < NUM_CAMERAS_TESTED; i++) {
openCamera(mCameraIds[i]);
+ if (!getStaticInfo(mCameraIds[i]).isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
List<TextureView> views = Arrays.asList(mTextureView[i]);
startTextureViewPreview(mCameraIds[i], views, /*ImageReader*/null);
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/PerformanceTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/PerformanceTest.java
index 908e6a5..e1502bc 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/PerformanceTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/PerformanceTest.java
@@ -109,7 +109,9 @@
* case, there is no way for client to know the exact preview frame
* arrival time. To approximate this time, a companion YUV420_888 stream is
* created. The first YUV420_888 Image coming out of the ImageReader is treated
- * as the first preview arrival time.
+ * as the first preview arrival time.</p>
+ * <p>
+ * For depth-only devices, timing is done with the DEPTH16 format instead.
* </p>
*/
public void testCameraLaunch() throws Exception {
@@ -124,7 +126,15 @@
int counter = 0;
for (String id : mCameraIds) {
try {
- initializeImageReader(id, ImageFormat.YUV_420_888);
+ mStaticInfo = new StaticMetadata(mCameraManager.getCameraCharacteristics(id));
+ if (mStaticInfo.isColorOutputSupported()) {
+ initializeImageReader(id, ImageFormat.YUV_420_888);
+ } else {
+ assertTrue("Depth output must be supported if regular output isn't!",
+ mStaticInfo.isDepthOutputSupported());
+ initializeImageReader(id, ImageFormat.DEPTH16);
+ }
+
SimpleImageListener imageListener = null;
long startTimeMs, openTimeMs, configureTimeMs, previewStartedTimeMs;
for (int i = 0; i < NUM_TEST_LOOPS; i++) {
@@ -223,6 +233,12 @@
try {
openDevice(id);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
+
+
boolean partialsExpected = mStaticInfo.getPartialResultCount() > 1;
long startTimeMs;
boolean isPartialTimingValid = partialsExpected;
@@ -656,7 +672,9 @@
CaptureRequest.Builder previewBuilder =
mCamera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
- previewBuilder.addTarget(mPreviewSurface);
+ if (mStaticInfo.isColorOutputSupported()) {
+ previewBuilder.addTarget(mPreviewSurface);
+ }
previewBuilder.addTarget(mReaderSurface);
mSession.setRepeatingRequest(previewBuilder.build(), listener, mHandler);
imageListener.waitForImageAvailable(CameraTestUtils.CAPTURE_IMAGE_TIMEOUT_MS);
@@ -671,7 +689,9 @@
}
mSessionListener = new BlockingSessionCallback();
List<Surface> outputSurfaces = new ArrayList<>();
- outputSurfaces.add(mPreviewSurface);
+ if (mStaticInfo.isColorOutputSupported()) {
+ outputSurfaces.add(mPreviewSurface);
+ }
outputSurfaces.add(mReaderSurface);
mSession = CameraTestUtils.configureCameraSession(mCamera, outputSurfaces,
mSessionListener, mHandler);
@@ -683,8 +703,8 @@
* @param format The format used to create ImageReader instance.
*/
private void initializeImageReader(String cameraId, int format) throws Exception {
- mOrderedPreviewSizes = CameraTestUtils.getSupportedPreviewSizes(
- cameraId, mCameraManager,
+ mOrderedPreviewSizes = CameraTestUtils.getSortedSizesForFormat(
+ cameraId, mCameraManager, format,
CameraTestUtils.getPreviewSizeBound(mWindowManager,
CameraTestUtils.PREVIEW_SIZE_BOUND));
Size maxPreviewSize = mOrderedPreviewSizes.get(0);
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java
index 3885376..fd9955a 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/RecordingTest.java
@@ -109,7 +109,11 @@
// Re-use the MediaRecorder object for the same camera device.
mMediaRecorder = new MediaRecorder();
openDevice(mCameraIds[i]);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
initSupportedVideoSize(mCameraIds[i]);
basicRecordingTestByCamera(mCamcorderProfileList);
@@ -174,7 +178,11 @@
// Re-use the MediaRecorder object for the same camera device.
mMediaRecorder = new MediaRecorder();
openDevice(mCameraIds[i]);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
initSupportedVideoSize(mCameraIds[i]);
recordingSizeTestByCamera();
@@ -271,7 +279,11 @@
// Re-use the MediaRecorder object for the same camera device.
mMediaRecorder = new MediaRecorder();
openDevice(mCameraIds[i]);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
initSupportedVideoSize(mCameraIds[i]);
int minFpsProfileId = -1, minFps = 1000;
@@ -323,7 +335,11 @@
// Re-use the MediaRecorder object for the same camera device.
mMediaRecorder = new MediaRecorder();
openDevice(id);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id +
+ " does not support color outputs, skipping");
+ continue;
+ }
if (!mStaticInfo.isHighSpeedVideoSupported()) {
continue;
}
@@ -708,6 +724,12 @@
openDevice(id);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id +
+ " does not support color outputs, skipping");
+ continue;
+ }
+
initSupportedVideoSize(id);
videoSnapshotTestByCamera(burstTest);
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/RobustnessTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/RobustnessTest.java
index 5e1f399..34f2d85 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/RobustnessTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/RobustnessTest.java
@@ -81,10 +81,17 @@
Log.i(TAG, "Testing Camera " + id);
openDevice(id);
+ List<Size> testSizes = null;
+ int format = mStaticInfo.isColorOutputSupported() ?
+ ImageFormat.YUV_420_888 : ImageFormat.DEPTH16;
+
+ testSizes = CameraTestUtils.getSortedSizesForFormat(id, mCameraManager,
+ format, null);
+
// Find some size not supported by the camera
Size weirdSize = new Size(643, 577);
int count = 0;
- while(mOrderedPreviewSizes.contains(weirdSize)) {
+ while(testSizes.contains(weirdSize)) {
// Really, they can't all be supported...
weirdSize = new Size(weirdSize.getWidth() + 1, weirdSize.getHeight() + 1);
count++;
@@ -93,7 +100,7 @@
// Setup imageReader with invalid dimension
ImageReader imageReader = ImageReader.newInstance(weirdSize.getWidth(),
- weirdSize.getHeight(), ImageFormat.YUV_420_888, 3);
+ weirdSize.getHeight(), format, 3);
// Setup ImageReaderListener
SimpleImageReaderListener imageListener = new SimpleImageReaderListener();
@@ -138,7 +145,7 @@
Size actualSize = new Size(imageWidth, imageHeight);
assertTrue("Camera does not contain outputted image resolution " + actualSize,
- mOrderedPreviewSizes.contains(actualSize));
+ testSizes.contains(actualSize));
} finally {
closeDevice(id);
}
@@ -221,10 +228,12 @@
Log.v(TAG, "StreamConfigurationMap: " + streamConfigurationMapString);
}
- // Always run legacy-level tests
+ // Always run legacy-level tests for color-supporting devices
- for (int[] config : LEGACY_COMBINATIONS) {
- testOutputCombination(id, config, maxSizes);
+ if (mStaticInfo.isColorOutputSupported()) {
+ for (int[] config : LEGACY_COMBINATIONS) {
+ testOutputCombination(id, config, maxSizes);
+ }
}
// Then run higher-level tests if applicable
@@ -233,8 +242,10 @@
// If not legacy, at least limited, so run limited-level tests
- for (int[] config : LIMITED_COMBINATIONS) {
- testOutputCombination(id, config, maxSizes);
+ if (mStaticInfo.isColorOutputSupported()) {
+ for (int[] config : LIMITED_COMBINATIONS) {
+ testOutputCombination(id, config, maxSizes);
+ }
}
// Check for BURST_CAPTURE, FULL and RAW and run those if appropriate
@@ -370,6 +381,11 @@
if (mStaticInfo.isHardwareLevelLegacy() || !mStaticInfo.hasFocuser()) {
continue;
}
+ // Depth-only devices won't support AE
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
int[] availableAfModes = mStaticInfo.getCharacteristics().get(
CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
@@ -506,6 +522,11 @@
if (mStaticInfo.isHardwareLevelLegacy() || !mStaticInfo.hasFocuser()) {
continue;
}
+ // Depth-only devices won't support AE
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
int[] availableAfModes = mStaticInfo.getCharacteristics().get(
CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
@@ -605,6 +626,11 @@
if (mStaticInfo.isHardwareLevelLegacy() || !mStaticInfo.hasFocuser()) {
continue;
}
+ // Depth-only devices won't support AE
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
int[] availableAfModes = mStaticInfo.getCharacteristics().get(
CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
@@ -718,6 +744,11 @@
if (mStaticInfo.isHardwareLevelLegacy() || !mStaticInfo.hasFocuser()) {
continue;
}
+ // Depth-only devices won't support AE
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
int[] availableAfModes = mStaticInfo.getCharacteristics().get(
CameraCharacteristics.CONTROL_AF_AVAILABLE_MODES);
@@ -1043,17 +1074,25 @@
maxRawSize = (rawSizes.length != 0) ? CameraTestUtils.getMaxSize(rawSizes) : null;
- maxPrivSizes[PREVIEW] = getMaxSize(privSizes, maxPreviewSize);
- maxYuvSizes[PREVIEW] = getMaxSize(yuvSizes, maxPreviewSize);
- maxJpegSizes[PREVIEW] = getMaxSize(jpegSizes, maxPreviewSize);
+ if (sm.isColorOutputSupported()) {
+ maxPrivSizes[PREVIEW] = getMaxSize(privSizes, maxPreviewSize);
+ maxYuvSizes[PREVIEW] = getMaxSize(yuvSizes, maxPreviewSize);
+ maxJpegSizes[PREVIEW] = getMaxSize(jpegSizes, maxPreviewSize);
- maxPrivSizes[RECORD] = getMaxRecordingSize(cameraId);
- maxYuvSizes[RECORD] = getMaxRecordingSize(cameraId);
- maxJpegSizes[RECORD] = getMaxRecordingSize(cameraId);
+ maxPrivSizes[RECORD] = getMaxRecordingSize(cameraId);
+ maxYuvSizes[RECORD] = getMaxRecordingSize(cameraId);
+ maxJpegSizes[RECORD] = getMaxRecordingSize(cameraId);
- maxPrivSizes[MAXIMUM] = CameraTestUtils.getMaxSize(privSizes);
- maxYuvSizes[MAXIMUM] = CameraTestUtils.getMaxSize(yuvSizes);
- maxJpegSizes[MAXIMUM] = CameraTestUtils.getMaxSize(jpegSizes);
+ maxPrivSizes[MAXIMUM] = CameraTestUtils.getMaxSize(privSizes);
+ maxYuvSizes[MAXIMUM] = CameraTestUtils.getMaxSize(yuvSizes);
+ maxJpegSizes[MAXIMUM] = CameraTestUtils.getMaxSize(jpegSizes);
+
+ // Must always be supported, add unconditionally
+ final Size vgaSize = new Size(640, 480);
+ maxPrivSizes[VGA] = vgaSize;
+ maxYuvSizes[VGA] = vgaSize;
+ maxJpegSizes[VGA] = vgaSize;
+ }
StreamConfigurationMap configs = sm.getCharacteristics().get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
@@ -1064,11 +1103,6 @@
maxInputYuvSize = yuvInputSizes != null ?
CameraTestUtils.getMaxSize(yuvInputSizes) : null;
- // Must always be supported, add unconditionally
- final Size vgaSize = new Size(640, 480);
- maxPrivSizes[VGA] = vgaSize;
- maxJpegSizes[VGA] = vgaSize;
- maxYuvSizes[VGA] = vgaSize;
}
public final Size[] maxPrivSizes = new Size[RESOLUTION_COUNT];
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataTest.java
index 0bc74b3..4415ecc 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/StaticMetadataTest.java
@@ -73,8 +73,10 @@
Size sensorSize = new Size(activeRect.width(), activeRect.height());
List<Integer> availableCaps = mStaticInfo.getAvailableCapabilitiesChecked();
- mCollector.expectTrue("All device must contains BACKWARD_COMPATIBLE capability",
- availableCaps.contains(REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE));
+ mCollector.expectTrue("All devices must contains BACKWARD_COMPATIBLE capability or " +
+ "DEPTH_OUTPUT capabillity" ,
+ availableCaps.contains(REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE) ||
+ availableCaps.contains(REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT) );
if (mStaticInfo.isHardwareLevelFull()) {
// Capability advertisement must be right.
@@ -91,18 +93,25 @@
mStaticInfo.isPerFrameControlSupported());
}
+ if (mStaticInfo.isHardwareLevelLegacy()) {
+ mCollector.expectTrue("Legacy devices must contain BACKWARD_COMPATIBLE capability",
+ availableCaps.contains(REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE));
+ }
+
if (availableCaps.contains(REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR)) {
mCollector.expectTrue("MANUAL_SENSOR capability always requires " +
"READ_SENSOR_SETTINGS capability as well",
availableCaps.contains(REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS));
}
- // Max jpeg resolution must be very close to sensor resolution
- Size[] jpegSizes = mStaticInfo.getJpegOutputSizesChecked();
- Size maxJpegSize = CameraTestUtils.getMaxSize(jpegSizes);
- mCollector.expectSizesAreSimilar(
+ if (mStaticInfo.isColorOutputSupported()) {
+ // Max jpeg resolution must be very close to sensor resolution
+ Size[] jpegSizes = mStaticInfo.getJpegOutputSizesChecked();
+ Size maxJpegSize = CameraTestUtils.getMaxSize(jpegSizes);
+ mCollector.expectSizesAreSimilar(
"Active array size and max JPEG size should be similar",
sensorSize, maxJpegSize, SIZE_ERROR_MARGIN);
+ }
// TODO: test all the keys mandatory for all capability devices.
}
@@ -127,9 +136,9 @@
mCollector.expectTrue("max number of processed (non-stalling) output streams" +
"must be >= 3 for FULL device",
maxNumStreamsProc >= 3);
- } else {
+ } else if (mStaticInfo.isColorOutputSupported()) {
mCollector.expectTrue("max number of processed (non-stalling) output streams" +
- "must be >= 2 for LIMITED device",
+ "must be >= 2 for devices that support color output",
maxNumStreamsProc >= 2);
}
}
@@ -319,7 +328,7 @@
Boolean contrastCurveModeSupported = false;
Boolean gammaAndPresetModeSupported = false;
Boolean offColorAberrationModeSupported = false;
- if (mStaticInfo.isHardwareLevelLimitedOrBetter()) {
+ if (mStaticInfo.isHardwareLevelLimitedOrBetter() && mStaticInfo.isColorOutputSupported()) {
int[] tonemapModes = mStaticInfo.getAvailableToneMapModesChecked();
List<Integer> modeList = (tonemapModes.length == 0) ?
new ArrayList<Integer>() :
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/StillCaptureTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/StillCaptureTest.java
index d8dda8f..5e931fe 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/StillCaptureTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/StillCaptureTest.java
@@ -83,7 +83,11 @@
try {
Log.i(TAG, "Testing JPEG exif for Camera " + mCameraIds[i]);
openDevice(mCameraIds[i]);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
jpegExifTestByCamera();
} finally {
closeDevice();
@@ -107,7 +111,10 @@
try {
Log.i(TAG, "Testing basic take picture for Camera " + id);
openDevice(id);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
takePictureTestByCamera(/*aeRegions*/null, /*awbRegions*/null, /*afRegions*/null);
} finally {
closeDevice();
@@ -185,7 +192,11 @@
if (!(mStaticInfo.hasFocuser() && maxAfRegions > 0)) {
continue;
}
-
+ // TODO: Relax test to use non-SurfaceView output for depth cases
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
touchForFocusTestByCamera();
} finally {
closeDevice();
@@ -206,7 +217,10 @@
try {
Log.i(TAG, "Testing Still preview capture combination for Camera " + id);
openDevice(id);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
previewStillCombinationTestByCamera();
} finally {
closeDevice();
@@ -235,7 +249,10 @@
Log.i(TAG, "Skipping test on legacy devices");
continue;
}
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
aeCompensationTestByCamera();
} finally {
closeDevice();
@@ -327,6 +344,10 @@
try {
Log.i(TAG, "Testing preview persistence for Camera " + id);
openDevice(id);
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
previewPersistenceTestByCamera();
} finally {
closeDevice();
@@ -346,7 +367,10 @@
Log.i(TAG, "Skipping AE precapture trigger cancel test on legacy devices");
continue;
}
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
takePictureTestByCamera(/*aeRegions*/null, /*awbRegions*/null, /*afRegions*/null,
/*addAeTriggerCancel*/true);
} finally {
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java b/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java
index da9b0ce..06daa51 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/SurfaceViewPreviewTest.java
@@ -77,7 +77,11 @@
try {
Log.i(TAG, "Testing preview for Camera " + mCameraIds[i]);
openDevice(mCameraIds[i]);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
previewTestByCamera();
} finally {
closeDevice();
@@ -97,7 +101,11 @@
try {
Log.i(TAG, "Testing preview for Camera " + mCameraIds[i]);
openDevice(mCameraIds[i]);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
previewTestPatternTestByCamera();
} finally {
closeDevice();
@@ -113,7 +121,10 @@
for (String id : mCameraIds) {
try {
openDevice(id);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + id + " does not support color outputs, skipping");
+ continue;
+ }
previewFpsRangeTestByCamera();
} finally {
closeDevice();
@@ -134,7 +145,11 @@
for (int i = 0; i < mCameraIds.length; i++) {
try {
openDevice(mCameraIds[i]);
-
+ if (!mStaticInfo.isColorOutputSupported()) {
+ Log.i(TAG, "Camera " + mCameraIds[i] +
+ " does not support color outputs, skipping");
+ continue;
+ }
preparePerformanceTestByCamera(mCameraIds[i]);
}
finally {
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java
index e097c18..4310b3a 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/helpers/StaticMetadata.java
@@ -437,7 +437,7 @@
* @return AE max regions supported by the camera device
*/
public int getAeMaxRegionsChecked() {
- Integer regionCount = getValueFromKeyNonNull(CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
+ Integer regionCount = mCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AE);
if (regionCount == null) {
return 0;
}
@@ -450,7 +450,7 @@
* @return AWB max regions supported by the camera device
*/
public int getAwbMaxRegionsChecked() {
- Integer regionCount = getValueFromKeyNonNull(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB);
+ Integer regionCount = mCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AWB);
if (regionCount == null) {
return 0;
}
@@ -463,7 +463,7 @@
* @return AF max regions supported by the camera device
*/
public int getAfMaxRegionsChecked() {
- Integer regionCount = getValueFromKeyNonNull(CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
+ Integer regionCount = mCharacteristics.get(CameraCharacteristics.CONTROL_MAX_REGIONS_AF);
if (regionCount == null) {
return 0;
}
@@ -2190,6 +2190,23 @@
}
/**
+ * Check if depth output is supported, based on the depth capability
+ */
+ public boolean isDepthOutputSupported() {
+ return isCapabilitySupported(
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_DEPTH_OUTPUT);
+ }
+
+ /**
+ * Check if standard outputs (PRIVATE, YUV, JPEG) outputs are supported, based on the
+ * backwards-compatible capability
+ */
+ public boolean isColorOutputSupported() {
+ return isCapabilitySupported(
+ CameraCharacteristics.REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
+ }
+
+ /**
* Get the value in index for a fixed-size array from a given key.
*
* <p>If the camera device is incorrectly reporting values, log a warning and return
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2AndroidTestCase.java b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2AndroidTestCase.java
index 86dbb5b..0108ee6 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2AndroidTestCase.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2AndroidTestCase.java
@@ -192,11 +192,13 @@
mCollector.setCameraId(cameraId);
mStaticInfo = new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId),
CheckLevel.ASSERT, /*collector*/null);
- mOrderedPreviewSizes = getSupportedPreviewSizes(
- cameraId, mCameraManager,
- getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
- mOrderedVideoSizes = getSupportedVideoSizes(cameraId, mCameraManager, PREVIEW_SIZE_BOUND);
- mOrderedStillSizes = getSupportedStillSizes(cameraId, mCameraManager, null);
+ if (mStaticInfo.isColorOutputSupported()) {
+ mOrderedPreviewSizes = getSupportedPreviewSizes(
+ cameraId, mCameraManager,
+ getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
+ mOrderedVideoSizes = getSupportedVideoSizes(cameraId, mCameraManager, PREVIEW_SIZE_BOUND);
+ mOrderedStillSizes = getSupportedStillSizes(cameraId, mCameraManager, null);
+ }
if (VERBOSE) {
Log.v(TAG, "Camera " + cameraId + " is opened");
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2MultiViewTestCase.java b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2MultiViewTestCase.java
index 5d832d6..39bf0a5 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2MultiViewTestCase.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2MultiViewTestCase.java
@@ -362,9 +362,11 @@
mCameraId, mCameraListener, mHandler);
mStaticInfo = new StaticMetadata(mCameraManager.getCameraCharacteristics(mCameraId),
CheckLevel.ASSERT, /*collector*/null);
- mOrderedPreviewSizes = getSupportedPreviewSizes(
- mCameraId, mCameraManager,
- getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
+ if (mStaticInfo.isColorOutputSupported()) {
+ mOrderedPreviewSizes = getSupportedPreviewSizes(
+ mCameraId, mCameraManager,
+ getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
+ }
assertNotNull(String.format("Failed to open camera device ID: %s", mCameraId), mCamera);
}
@@ -414,4 +416,3 @@
}
}
}
-
diff --git a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java
index d25f1f5..7330a4c 100644
--- a/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java
+++ b/tests/tests/hardware/src/android/hardware/camera2/cts/testcases/Camera2SurfaceViewTestCase.java
@@ -563,14 +563,16 @@
mCollector.setCameraId(cameraId);
mStaticInfo = new StaticMetadata(mCameraManager.getCameraCharacteristics(cameraId),
CheckLevel.ASSERT, /*collector*/null);
- mOrderedPreviewSizes = getSupportedPreviewSizes(cameraId, mCameraManager,
- getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
- mOrderedVideoSizes = getSupportedVideoSizes(cameraId, mCameraManager, PREVIEW_SIZE_BOUND);
- mOrderedStillSizes = getSupportedStillSizes(cameraId, mCameraManager, null);
- // Use ImageFormat.YUV_420_888 for now. TODO: need figure out what's format for preview
- // in public API side.
- mMinPreviewFrameDurationMap =
+ if (mStaticInfo.isColorOutputSupported()) {
+ mOrderedPreviewSizes = getSupportedPreviewSizes(cameraId, mCameraManager,
+ getPreviewSizeBound(mWindowManager, PREVIEW_SIZE_BOUND));
+ mOrderedVideoSizes = getSupportedVideoSizes(cameraId, mCameraManager, PREVIEW_SIZE_BOUND);
+ mOrderedStillSizes = getSupportedStillSizes(cameraId, mCameraManager, null);
+ // Use ImageFormat.YUV_420_888 for now. TODO: need figure out what's format for preview
+ // in public API side.
+ mMinPreviewFrameDurationMap =
mStaticInfo.getAvailableMinFrameDurationsForFormatChecked(ImageFormat.YUV_420_888);
+ }
}
/**