Merge "media: test CamcorderProfile.getAll returning null" into tm-dev
diff --git a/apps/CameraITS/tests/sensor_fusion/test_preview_stabilization.py b/apps/CameraITS/tests/sensor_fusion/test_preview_stabilization.py
index 64b22d9..713d577 100644
--- a/apps/CameraITS/tests/sensor_fusion/test_preview_stabilization.py
+++ b/apps/CameraITS/tests/sensor_fusion/test_preview_stabilization.py
@@ -20,9 +20,9 @@
from mobly import test_runner
+import its_base_test
import camera_properties_utils
import image_processing_utils
-import its_base_test
import its_session_utils
import sensor_fusion_utils
import video_processing_utils
@@ -86,7 +86,7 @@
return recording_obj
-class PreviewStabilityTest(its_base_test.ItsBaseTest):
+class PreviewStabilizationTest(its_base_test.ItsBaseTest):
"""Tests if preview is stabilized.
Camera is moved in sensor fusion rig on an arc of 15 degrees.
@@ -115,7 +115,7 @@
camera_properties_utils.skip_unless(
first_api_level >= its_session_utils.ANDROID13_API_LEVEL,
'First API level should be {} or higher. Found {}.'.format(
- its_session_utils.ANDROID13_API_LEVEL, first_api_level))
+ its_session_utils.ANDROID13_API_LEVEL, first_api_level))
supported_stabilization_modes = props[
'android.control.availableVideoStabilizationModes'
@@ -124,7 +124,7 @@
camera_properties_utils.skip_unless(
supported_stabilization_modes is not None
and _PREVIEW_STABILIZATION_MODE_PREVIEW
- in supported_stabilization_modes,
+ in supported_stabilization_modes,
'Preview Stabilization not supported',
)
@@ -140,6 +140,7 @@
# List of video resolutions to test
supported_preview_sizes = cam.get_supported_preview_sizes(self.camera_id)
+ supported_preview_sizes.remove(video_processing_utils.QCIF_SIZE)
logging.debug('Supported preview resolutions: %s',
supported_preview_sizes)
@@ -201,10 +202,9 @@
sensor_fusion_utils.calc_max_rotation_angle(gyro_rots, 'Gyro')
)
logging.debug(
- 'Max deflection (degrees): gyro: %.2f, camera: %.2f',
- max_gyro_angles[-1],
- max_camera_angles[-1],
- )
+ 'Max deflection (degrees) %s: video: %.3f, gyro: %.3f ratio: %.4f',
+ video_size, max_camera_angles[-1], max_gyro_angles[-1],
+ max_camera_angles[-1] / max_gyro_angles[-1])
# Assert phone is moved enough during test
if max_gyro_angles[-1] < _MIN_PHONE_MOVEMENT_ANGLE:
@@ -218,9 +218,10 @@
if max_camera_angle >= max_gyro_angles[i] * _VIDEO_STABILIZATION_FACTOR:
test_failures.append(
f'{supported_preview_sizes[i]} video not stabilized enough! '
- f'Max gyro angle: {max_gyro_angles[i]:.2f}, Max camera angle: '
- f'{max_camera_angle:.2f}, stabilization factor THRESH: '
- f'{_VIDEO_STABILIZATION_FACTOR}.')
+ f'Max video angle: {max_camera_angle:.3f}, '
+ f'Max gyro angle: {max_gyro_angles[i]:.3f}, '
+ f'ratio: {max_camera_angle/max_gyro_angles[i]:.4f} '
+ f'THRESH: {_VIDEO_STABILIZATION_FACTOR}.')
if test_failures:
raise AssertionError(test_failures)
diff --git a/apps/CameraITS/tests/sensor_fusion/test_video_stabilization.py b/apps/CameraITS/tests/sensor_fusion/test_video_stabilization.py
index 3a71fda..9417d18 100644
--- a/apps/CameraITS/tests/sensor_fusion/test_video_stabilization.py
+++ b/apps/CameraITS/tests/sensor_fusion/test_video_stabilization.py
@@ -22,9 +22,9 @@
from mobly import test_runner
import numpy as np
+import its_base_test
import camera_properties_utils
import image_processing_utils
-import its_base_test
import its_session_utils
import sensor_fusion_utils
import video_processing_utils
@@ -115,7 +115,7 @@
return recording_obj
-class VideoStabilityTest(its_base_test.ItsBaseTest):
+class VideoStabilizationTest(its_base_test.ItsBaseTest):
"""Tests if video is stabilized.
Camera is moved in sensor fusion rig on an arc of 15 degrees.
@@ -220,8 +220,10 @@
gyro_rots = _conv_acceleration_to_movement(gyro_events)
max_gyro_angles.append(sensor_fusion_utils.calc_max_rotation_angle(
gyro_rots, 'Gyro'))
- logging.debug('Max deflection (degrees): gyro: %.2f, camera: %.2f',
- max_gyro_angles[-1], max_camera_angles[-1])
+ logging.debug(
+ 'Max deflection (degrees) %s: video: %.3f, gyro: %.3f, ratio: %.4f',
+ video_quality, max_camera_angles[-1], max_gyro_angles[-1],
+ max_camera_angles[-1] / max_gyro_angles[-1])
# Assert phone is moved enough during test
if max_gyro_angles[-1] < _MIN_PHONE_MOVEMENT_ANGLE:
@@ -235,9 +237,10 @@
if max_camera_angle >= max_gyro_angles[i] * _VIDEO_STABILIZATION_FACTOR:
test_failures.append(
f'{tested_video_qualities[i]} video not stabilized enough! '
- f'Max gyro angle: {max_gyro_angles[i]:.2f}, Max camera angle: '
- f'{max_camera_angle:.2f}, stabilization factor THRESH: '
- f'{_VIDEO_STABILIZATION_FACTOR}.')
+ f'Max video angle: {max_camera_angle:.3f}, '
+ f'Max gyro angle: {max_gyro_angles[i]:.3f}, '
+ f'ratio: {max_camera_angle}/{max_gyro_angles[-1]:.3f}, '
+ f'THRESH: {_VIDEO_STABILIZATION_FACTOR}.')
if test_failures:
raise AssertionError(test_failures)
diff --git a/apps/CameraITS/utils/video_processing_utils.py b/apps/CameraITS/utils/video_processing_utils.py
index b82e6c5..98d930b 100644
--- a/apps/CameraITS/utils/video_processing_utils.py
+++ b/apps/CameraITS/utils/video_processing_utils.py
@@ -33,6 +33,7 @@
'LOW',
'VGA'
)
+QCIF_SIZE = '176x144'
def extract_key_frames_from_video(log_path, video_file_name):
@@ -83,7 +84,7 @@
logging.debug('Extracted key frames: %s', key_frame_files)
logging.debug('Length of key_frame_files: %d', len(key_frame_files))
- if not len(key_frame_files):
+ if not key_frame_files:
raise AssertionError('No key frames extracted. Check source video.')
return key_frame_files
@@ -142,7 +143,7 @@
file_list = sorted(
[_ for _ in os.listdir(log_path) if (_.endswith(img_format)
and ffmpeg_image_name in _)])
- if not len(file_list):
+ if not file_list:
raise AssertionError('No frames extracted. Check source video.')
return file_list
diff --git a/apps/CtsVerifier/res/layout-port/camera_video.xml b/apps/CtsVerifier/res/layout-port/camera_video.xml
new file mode 100644
index 0000000..2a90e67
--- /dev/null
+++ b/apps/CtsVerifier/res/layout-port/camera_video.xml
@@ -0,0 +1,136 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<!-- Copyright (C) 2022 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:orientation="vertical"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent">
+
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="fill_parent"
+ android:layout_height="0dp"
+ android:layout_weight="1" >
+
+ <LinearLayout
+ android:orientation="vertical"
+ android:layout_width="0dp"
+ android:layout_height="fill_parent"
+ android:layout_weight="2" >
+
+ <Spinner
+ android:id="@+id/cameras_selection"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"/>
+ <Spinner
+ android:id="@+id/resolution_selection"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"/>
+
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="match_parent"
+ android:layout_height="0dp"
+ android:layout_weight="1" >
+
+ <Button
+ android:id="@+id/record_button"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/record_button_text"/>
+ <Button
+ android:id="@+id/next_button"
+ android:layout_height="wrap_content"
+ android:layout_width="wrap_content"
+ android:text="@string/next_button_text" />
+ </LinearLayout>
+
+ <LinearLayout
+ android:layout_width="match_parent"
+ android:layout_height="0dp"
+ android:layout_weight="2" >
+
+ <TextView
+ android:id="@+id/status_label"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/status_ready"
+ android:padding="2dp"
+ android:textSize="16sp"
+ android:gravity="center" />
+ </LinearLayout>
+
+ </LinearLayout>
+
+ </LinearLayout>
+
+ <LinearLayout
+ android:orientation="horizontal"
+ android:layout_width="fill_parent"
+ android:layout_height="0dp"
+ android:layout_weight="1" >
+
+ <LinearLayout
+ android:orientation="vertical"
+ android:layout_width="0dp"
+ android:layout_height="fill_parent"
+ android:layout_weight="3"
+ android:gravity="center" >
+
+ <TextureView
+ android:id="@+id/video_capture"
+ android:layout_height="0dp"
+ android:layout_width="fill_parent"
+ android:layout_weight="3" />
+ <TextView
+ android:id="@+id/camera_video_capture_label"
+ android:layout_height="wrap_content"
+ android:layout_width="fill_parent"
+ android:text="@string/video_capture_label"
+ android:padding="2dp"
+ android:textSize="16sp"
+ android:gravity="center" />
+
+ </LinearLayout>
+ <LinearLayout
+ android:orientation="vertical"
+ android:layout_width="0dp"
+ android:layout_height="fill_parent"
+ android:layout_weight="3"
+ android:gravity="center" >
+
+ <VideoView
+ android:id="@+id/video_playback"
+ android:layout_height="0dp"
+ android:layout_width="fill_parent"
+ android:layout_weight="3" />
+ <TextView
+ android:id="@+id/camera_video_playback_label"
+ android:layout_height="wrap_content"
+ android:layout_width="fill_parent"
+ android:text="@string/video_playback_label"
+ android:padding="2dp"
+ android:textSize="16sp"
+ android:gravity="center" />
+
+ </LinearLayout>
+
+ </LinearLayout>
+
+ <include layout="@layout/pass_fail_buttons" />
+
+</LinearLayout>
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/camera/fov/PhotoCaptureActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/camera/fov/PhotoCaptureActivity.java
index 406a305..e27ae0a 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/camera/fov/PhotoCaptureActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/camera/fov/PhotoCaptureActivity.java
@@ -23,6 +23,7 @@
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
+import android.graphics.Matrix;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PictureCallback;
@@ -183,8 +184,9 @@
SelectableResolution resolution = mSupportedResolutions.get(position);
switchToCamera(resolution, false);
- // It should be guaranteed that the FOV is correctly updated after setParameters().
- mReportedFovPrePictureTaken = mCamera.getParameters().getHorizontalViewAngle();
+ // It should be guaranteed that the FOV is correctly updated after
+ // setParameters().
+ mReportedFovPrePictureTaken = getCameraFov(resolution.cameraId);
mResolutionSpinnerIndex = position;
startPreview();
@@ -271,8 +273,8 @@
@Override
public void onPictureTaken(byte[] data, Camera camera) {
File pictureFile = getPictureFile(this);
- Camera.Parameters params = mCamera.getParameters();
- mReportedFovDegrees = params.getHorizontalViewAngle();
+
+ mReportedFovDegrees = getCameraFov(mSelectedResolution.cameraId);
// Show error if FOV does not match the value reported before takePicture().
if (mReportedFovPrePictureTaken != mReportedFovDegrees) {
@@ -436,6 +438,36 @@
initializeCamera(true);
}
+ private void setPreviewTransform(Size previewSize) {
+ int sensorRotation = mPreviewOrientation;
+ float selectedPreviewAspectRatio;
+ if (sensorRotation == 0 || sensorRotation == 180) {
+ selectedPreviewAspectRatio = (float) previewSize.width
+ / (float) previewSize.height;
+ } else {
+ selectedPreviewAspectRatio = (float) previewSize.height
+ / (float) previewSize.width;
+ }
+
+ Matrix transform = new Matrix();
+ float viewAspectRatio = (float) mPreviewView.getMeasuredWidth()
+ / (float) mPreviewView.getMeasuredHeight();
+ float scaleX = 1.0f, scaleY = 1.0f;
+ float translateX = 0, translateY = 0;
+ if (selectedPreviewAspectRatio > viewAspectRatio) {
+ scaleY = viewAspectRatio / selectedPreviewAspectRatio;
+ translateY = (float) mPreviewView.getMeasuredHeight() / 2
+ - (float) mPreviewView.getMeasuredHeight() * scaleY / 2;
+ } else {
+ scaleX = selectedPreviewAspectRatio / viewAspectRatio;
+ translateX = (float) mPreviewView.getMeasuredWidth() / 2
+ - (float) mPreviewView.getMeasuredWidth() * scaleX / 2;
+ }
+ transform.postScale(scaleX, scaleY);
+ transform.postTranslate(translateX, translateY);
+ mPreviewView.setTransform(transform);
+ }
+
private void initializeCamera(boolean startPreviewAfterInit) {
if (mCamera == null || mPreviewTexture == null) {
return;
@@ -470,6 +502,7 @@
if (selectedPreviewSize != null) {
params.setPreviewSize(selectedPreviewSize.width, selectedPreviewSize.height);
mCamera.setParameters(params);
+ setPreviewTransform(selectedPreviewSize);
mCameraInitialized = true;
}
@@ -561,21 +594,25 @@
return result;
}
+ private int getDisplayRotation() {
+ int displayRotation = getDisplay().getRotation();
+ int displayRotationDegrees = 0;
+ switch (displayRotation) {
+ case Surface.ROTATION_0: displayRotationDegrees = 0; break;
+ case Surface.ROTATION_90: displayRotationDegrees = 90; break;
+ case Surface.ROTATION_180: displayRotationDegrees = 180; break;
+ case Surface.ROTATION_270: displayRotationDegrees = 270; break;
+ }
+ return displayRotationDegrees;
+ }
+
private void calculateOrientations(Activity activity,
int cameraId, android.hardware.Camera camera) {
android.hardware.Camera.CameraInfo info =
new android.hardware.Camera.CameraInfo();
android.hardware.Camera.getCameraInfo(cameraId, info);
- int rotation = activity.getWindowManager().getDefaultDisplay()
- .getRotation();
- int degrees = 0;
- switch (rotation) {
- case Surface.ROTATION_0: degrees = 0; break;
- case Surface.ROTATION_90: degrees = 90; break;
- case Surface.ROTATION_180: degrees = 180; break;
- case Surface.ROTATION_270: degrees = 270; break;
- }
+ int degrees = getDisplayRotation();
if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
mJpegOrientation = (info.orientation + degrees) % 360;
mPreviewOrientation = (360 - mJpegOrientation) % 360; // compensate the mirror
@@ -603,4 +640,12 @@
}
return false;
}
+
+ private float getCameraFov(int cameraId) {
+ if (mPreviewOrientation == 0 || mPreviewOrientation == 180) {
+ return mCamera.getParameters().getHorizontalViewAngle();
+ } else {
+ return mCamera.getParameters().getVerticalViewAngle();
+ }
+ }
}
diff --git a/tests/media/src/android/mediav2/cts/DecodeGlAccuracyTest.java b/tests/media/src/android/mediav2/cts/DecodeGlAccuracyTest.java
index c6e0ae6..83cc6f4 100644
--- a/tests/media/src/android/mediav2/cts/DecodeGlAccuracyTest.java
+++ b/tests/media/src/android/mediav2/cts/DecodeGlAccuracyTest.java
@@ -33,6 +33,7 @@
import java.io.IOException;
import java.nio.ByteBuffer;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
@@ -45,14 +46,15 @@
* data in compressed YUV format. The output of the decoders is shared with OpenGL
* as external textures. And OpenGL outputs RGB pixels. The class validates whether
* the conversion of input YUV to output RGB is in accordance with the chosen color
- * aspects.
+ * aspects. Video files used in the test do not have any color aspects info coded in
+ * the bitstreams
*/
@RunWith(Parameterized.class)
public class DecodeGlAccuracyTest extends CodecDecoderTestBase {
- private final String LOG_TAG = DecodeGlAccuracyTest.class.getSimpleName();
+ private static final String LOG_TAG = DecodeGlAccuracyTest.class.getSimpleName();
// Allowed color tolerance to account for differences in the conversion process
- private final int ALLOWED_COLOR_DELTA = 8;
+ private static final int ALLOWED_COLOR_DELTA = 8;
// The test video assets were generated with a set of color bars.
// Depending on the color aspects, the values from OpenGL pbuffer
@@ -64,18 +66,7 @@
// RGB = Transpose(FLOOR_CLIP_PIXEL(CONV_CSC * (Transpose(YUV) - LVL_OFFSET)))
// The matrices LVL_OFFSET and CONV_CSC for different color aspects are below.
//
- // YUV values in the 8bit color bar test videos
- // {{126, 191, 230},
- // {98, 104, 204},
- // {180, 20, 168},
- // {121, 109, 60},
- // {114, 179, 172},
- // {133, 138, 118},
- // {183, 93, 153},
- // {203, 20, 33},
- // {147, 131, 183},
- // {40, 177, 202},
- // {170, 82, 96},
+ // YUV values in the 8bit color bar test videos are in COLOR_BARS_YUV below
//
// The color conversion matrices (CONV_CSC) for the RGB equation above:
// MULTIPLY_ROW_WISE_LR = Transpose({255/219, 255/224, 255/224})
@@ -94,8 +85,22 @@
// LVL_OFFSET_LR = Transpose({16, 128, 128})
// LVL_OFFSET_FR = Transpose({0, 128, 128})
+ private static final int[][] COLOR_BARS_YUV = new int[][]{
+ {126, 191, 230},
+ {98, 104, 204},
+ {180, 20, 168},
+ {121, 109, 60},
+ {114, 179, 172},
+ {133, 138, 118},
+ {183, 93, 153},
+ {203, 20, 33},
+ {147, 131, 183},
+ {40, 177, 202},
+ {170, 82, 96},
+ };
+
// Reference RGB values for 601 Limited Range
- private final int[][] mColorBars601LR = new int[][]{
+ private static final int[][] COLOR_BARS_601LR = new int[][]{
{255, 17, 252},
{219, 40, 44},
{255, 196, 0},
@@ -109,7 +114,7 @@
{127, 219, 82},
};
// Reference RGB values for 601 Full Range
- private final int[][] mColorBars601FR = new int[][]{
+ private static final int[][] COLOR_BARS_601FR = new int[][]{
{255, 31, 237},
{204, 51, 55},
{236, 188, 0},
@@ -123,7 +128,7 @@
{125, 208, 88},
};
// Reference RGB values for 709 Limited Range
- private final int[][] mColorBars709LR = new int[][]{
+ private static final int[][] COLOR_BARS_709LR = new int[][]{
{255, 57, 255},
{234, 57, 42},
{255, 188, 0},
@@ -137,6 +142,11 @@
{120, 202, 78},
};
+ // The test videos were generated with the above color bars. Each bar is of width 16.
+ private static final int COLOR_BAR_WIDTH = 16;
+ private static final int COLOR_BAR_OFFSET_X = 8;
+ private static final int COLOR_BAR_OFFSET_Y = 64;
+
private int[][] mColorBars;
private final String mCompName;
@@ -146,45 +156,46 @@
private final int mRange;
private final int mStandard;
private final int mTransferCurve;
+ private final boolean mUseYuvSampling;
private OutputSurface mEGLWindowOutSurface;
-
- // The test videos were generated with the above color bars. Each bar is of
- // width 16.
- private final int mColorBarWidth = 16;
- private final int xOffset = 8;
- private final int yOffset = 64;
private int mBadFrames = 0;
public DecodeGlAccuracyTest(String decoder, String mediaType, String fileName, int range,
- int standard, int transfer) {
+ int standard, int transfer, boolean useYuvSampling) {
super(null, mediaType, null);
mCompName = decoder;
mFileName = fileName;
mRange = range;
mStandard = standard;
mTransferCurve = transfer;
+ mUseYuvSampling = useYuvSampling;
- mColorBars = mColorBars601LR;
- if ((mStandard == MediaFormat.COLOR_STANDARD_BT601_NTSC) &&
- (mRange == MediaFormat.COLOR_RANGE_LIMITED)) {
- mColorBars = mColorBars601LR;
- } else if ((mStandard == MediaFormat.COLOR_STANDARD_BT601_NTSC) &&
- (mRange == MediaFormat.COLOR_RANGE_FULL)) {
- mColorBars = mColorBars601FR;
- } else if ((mStandard == MediaFormat.COLOR_STANDARD_BT709) &&
- (mRange == MediaFormat.COLOR_RANGE_LIMITED)) {
- mColorBars = mColorBars709LR;
+ if (!mUseYuvSampling) {
+ mColorBars = COLOR_BARS_601LR;
+ if ((mStandard == MediaFormat.COLOR_STANDARD_BT601_NTSC) &&
+ (mRange == MediaFormat.COLOR_RANGE_LIMITED)) {
+ mColorBars = COLOR_BARS_601LR;
+ } else if ((mStandard == MediaFormat.COLOR_STANDARD_BT601_NTSC) &&
+ (mRange == MediaFormat.COLOR_RANGE_FULL)) {
+ mColorBars = COLOR_BARS_601FR;
+ } else if ((mStandard == MediaFormat.COLOR_STANDARD_BT709) &&
+ (mRange == MediaFormat.COLOR_RANGE_LIMITED)) {
+ mColorBars = COLOR_BARS_709LR;
+ } else {
+ Log.e(LOG_TAG, "Unsupported Color Aspects.");
+ }
} else {
- Log.e(LOG_TAG, "Unsupported Color Aspects.");
+ mColorBars = COLOR_BARS_YUV;
}
}
- @Parameterized.Parameters(name = "{index}({0}_{1}_{3}_{4}_{5})")
+ @Parameterized.Parameters(name = "{index}({0}_{1}_{3}_{4}_{5}_{6})")
public static Collection<Object[]> input() {
final boolean isEncoder = false;
final boolean needAudio = false;
final boolean needVideo = true;
+
final List<Object[]> argsList = Arrays.asList(new Object[][]{
// mediaType, asset, range, standard, transfer
// 601LR
@@ -255,7 +266,18 @@
// Note: OpenGL is not required to support 709 FR. So we are not testing it.
});
- return CodecTestBase.prepareParamList(argsList, isEncoder, needAudio, needVideo,
+ final List<Object[]> exhaustiveArgsList = new ArrayList<>();
+ for (Object[] arg : argsList) {
+ int argLength = argsList.get(0).length;
+ boolean[] boolStates = {true, false};
+ for (boolean useYuvSampling : boolStates) {
+ Object[] testArgs = new Object[argLength + 1];
+ System.arraycopy(arg, 0, testArgs, 0, argLength);
+ testArgs[argLength] = useYuvSampling;
+ exhaustiveArgsList.add(testArgs);
+ }
+ }
+ return CodecTestBase.prepareParamList(exhaustiveArgsList, isEncoder, needAudio, needVideo,
false);
}
@@ -268,8 +290,8 @@
ByteBuffer pixelBuf = ByteBuffer.allocateDirect(4);
boolean frameFailed = false;
for (int i = 0; i < mColorBars.length; i++) {
- int x = mColorBarWidth * i + xOffset;
- int y = yOffset;
+ int x = COLOR_BAR_WIDTH * i + COLOR_BAR_OFFSET_X;
+ int y = COLOR_BAR_OFFSET_Y;
GLES20.glReadPixels(x, y, 1, 1, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixelBuf);
int r = pixelBuf.get(0) & 0xff;
int g = pixelBuf.get(1) & 0xff;
@@ -337,7 +359,7 @@
mWidth = format.getInteger(MediaFormat.KEY_WIDTH);
mHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
- mEGLWindowOutSurface = new OutputSurface(mWidth, mHeight, false);
+ mEGLWindowOutSurface = new OutputSurface(mWidth, mHeight, false, mUseYuvSampling);
mSurface = mEGLWindowOutSurface.getSurface();
mCodec = MediaCodec.createByCodecName(mCompName);
diff --git a/tests/media/src/android/mediav2/cts/OutputSurface.java b/tests/media/src/android/mediav2/cts/OutputSurface.java
index fb962a7..03856a2 100644
--- a/tests/media/src/android/mediav2/cts/OutputSurface.java
+++ b/tests/media/src/android/mediav2/cts/OutputSurface.java
@@ -65,14 +65,18 @@
* to MediaCodec.configure().
*/
public OutputSurface(int width, int height, boolean useHighBitDepth) {
+ this(width, height, useHighBitDepth, /* useYuvSampling */ false);
+ }
+
+ public OutputSurface(int width, int height, boolean useHighBitDepth, boolean useYuvSampling) {
if (width <= 0 || height <= 0) {
throw new IllegalArgumentException();
}
- eglSetup(width, height, useHighBitDepth);
+ eglSetup(width, height, useHighBitDepth, useYuvSampling);
makeCurrent();
- setup(this);
+ setup(this, useYuvSampling);
}
/**
@@ -80,23 +84,24 @@
* new one). Creates a Surface that can be passed to MediaCodec.configure().
*/
public OutputSurface() {
- setup(this);
+ setup(this, /* useYuvSampling */ false);
}
public OutputSurface(final SurfaceTexture.OnFrameAvailableListener listener) {
- setup(listener);
+ setup(listener, /* useYuvSampling */ false);
}
/**
* Creates instances of TextureRender and SurfaceTexture, and a Surface associated
* with the SurfaceTexture.
*/
- private void setup(SurfaceTexture.OnFrameAvailableListener listener) {
+ private void setup(SurfaceTexture.OnFrameAvailableListener listener, boolean useYuvSampling) {
assertTrue(EGL14.eglGetCurrentContext() != EGL14.EGL_NO_CONTEXT);
assertTrue(EGL14.eglGetCurrentDisplay() != EGL14.EGL_NO_DISPLAY);
assertTrue(EGL14.eglGetCurrentSurface(EGL14.EGL_DRAW) != EGL14.EGL_NO_SURFACE);
assertTrue(EGL14.eglGetCurrentSurface(EGL14.EGL_READ) != EGL14.EGL_NO_SURFACE);
mTextureRender = new TextureRender();
+ mTextureRender.setUseYuvSampling(useYuvSampling);
mTextureRender.surfaceCreated();
// Even if we don't access the SurfaceTexture after the constructor returns, we
@@ -125,7 +130,7 @@
/**
* Prepares EGL. We want a GLES 2.0 context and a surface that supports pbuffer.
*/
- private void eglSetup(int width, int height, boolean useHighBitDepth) {
+ private void eglSetup(int width, int height, boolean useHighBitDepth, boolean useYuvSampling) {
mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
throw new RuntimeException("unable to get EGL14 display");
@@ -156,9 +161,10 @@
throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
}
- // Configure context for OpenGL ES 2.0.
+ // Configure context for OpenGL ES 3.0/2.0.
+ int eglContextClientVersion = useYuvSampling ? 3: 2;
int[] attrib_list = {
- EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
+ EGL14.EGL_CONTEXT_CLIENT_VERSION, eglContextClientVersion,
EGL14.EGL_NONE
};
mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
diff --git a/tests/media/src/android/mediav2/cts/TextureRender.java b/tests/media/src/android/mediav2/cts/TextureRender.java
index 4cda868..548ff03 100644
--- a/tests/media/src/android/mediav2/cts/TextureRender.java
+++ b/tests/media/src/android/mediav2/cts/TextureRender.java
@@ -49,7 +49,7 @@
private FloatBuffer mTriangleVertices;
- private static final String VERTEX_SHADER =
+ private static final String VERTEX_SHADER_RGB =
"uniform mat4 uMVPMatrix;\n" +
"uniform mat4 uSTMatrix;\n" +
"attribute vec4 aPosition;\n" +
@@ -60,7 +60,7 @@
" vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
"}\n";
- private static final String FRAGMENT_SHADER =
+ private static final String FRAGMENT_SHADER_RGB =
"#extension GL_OES_EGL_image_external : require\n" +
"precision mediump float;\n" + // highp here doesn't seem to matter
"varying vec2 vTextureCoord;\n" +
@@ -69,6 +69,30 @@
" gl_FragColor = texture2D(sTexture, vTextureCoord);\n" +
"}\n";
+ private static final String VERTEX_SHADER_YUV =
+ "#version 300 es\n" +
+ "uniform mat4 uMVPMatrix;\n" +
+ "uniform mat4 uSTMatrix;\n" +
+ "in vec4 aPosition;\n" +
+ "in vec4 aTextureCoord;\n" +
+ "out vec2 vTextureCoord;\n" +
+ "void main() {\n" +
+ " gl_Position = uMVPMatrix * aPosition;\n" +
+ " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" +
+ "}\n";
+
+ private static final String FRAGMENT_SHADER_YUV =
+ "#version 300 es\n" +
+ "#extension GL_OES_EGL_image_external : require\n" +
+ "#extension GL_EXT_YUV_target : require\n" +
+ "precision mediump float;\n" + // highp here doesn't seem to matter
+ "uniform __samplerExternal2DY2YEXT uTexSampler;\n" +
+ "in vec2 vTextureCoord;\n" +
+ "out vec4 outColor;\n" +
+ "void main() {\n" +
+ " outColor = texture(uTexSampler, vTextureCoord);\n" +
+ "}\n";
+
private float[] mMVPMatrix = new float[16];
private float[] mSTMatrix = new float[16];
@@ -78,6 +102,7 @@
private int muSTMatrixHandle;
private int maPositionHandle;
private int maTextureHandle;
+ private boolean mUseYuvSampling;
public TextureRender() {
mTriangleVertices = ByteBuffer.allocateDirect(
@@ -86,6 +111,11 @@
mTriangleVertices.put(mTriangleVerticesData).position(0);
Matrix.setIdentityM(mSTMatrix, 0);
+ mUseYuvSampling = false;
+ }
+
+ public void setUseYuvSampling(boolean useYuvSampling) {
+ mUseYuvSampling = useYuvSampling;
}
public int getTextureId() {
@@ -132,7 +162,11 @@
* Initializes GL state. Call this after the EGL surface has been created and made current.
*/
public void surfaceCreated() {
- mProgram = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
+ if (mUseYuvSampling == false) {
+ mProgram = createProgram(VERTEX_SHADER_RGB, FRAGMENT_SHADER_RGB);
+ } else {
+ mProgram = createProgram(VERTEX_SHADER_YUV, FRAGMENT_SHADER_YUV);
+ }
if (mProgram == 0) {
throw new RuntimeException("failed creating program");
}
@@ -183,7 +217,7 @@
*/
public void changeFragmentShader(String fragmentShader) {
GLES20.glDeleteProgram(mProgram);
- mProgram = createProgram(VERTEX_SHADER, fragmentShader);
+ mProgram = createProgram(VERTEX_SHADER_RGB, fragmentShader);
if (mProgram == 0) {
throw new RuntimeException("failed creating program");
}
diff --git a/tests/mediapc/common/src/android/mediapc/cts/common/PerformanceClassEvaluator.java b/tests/mediapc/common/src/android/mediapc/cts/common/PerformanceClassEvaluator.java
index 1c3a4dd..c3a4fb5 100644
--- a/tests/mediapc/common/src/android/mediapc/cts/common/PerformanceClassEvaluator.java
+++ b/tests/mediapc/common/src/android/mediapc/cts/common/PerformanceClassEvaluator.java
@@ -20,10 +20,13 @@
import static org.junit.Assume.assumeTrue;
+import android.media.MediaFormat;
import android.os.Build;
import com.google.common.base.Preconditions;
+import java.util.ArrayList;
+import java.util.Arrays;
import org.junit.rules.TestName;
import java.util.HashSet;
@@ -90,36 +93,17 @@
.setId(RequirementConstants.LONG_RESOLUTION)
.setPredicate(RequirementConstants.INTEGER_GTE)
.addRequiredValue(Build.VERSION_CODES.S, 1920)
- .build();
- RequiredMeasurement<Integer> short_resolution = RequiredMeasurement
- .<Integer>builder()
- .setId(RequirementConstants.SHORT_RESOLUTION)
- .setPredicate(RequirementConstants.INTEGER_GTE)
- .addRequiredValue(Build.VERSION_CODES.S, 1080)
- .build();
-
- return new ResolutionRequirement(RequirementConstants.R7_1_1_1__H_2_1, long_resolution,
- short_resolution);
- }
-
- /**
- * [7.1.1.1/?] MUST have screen resolution of at least 1080p.
- */
- public static ResolutionRequirement createR7_1_1_1__TBD1() {
- RequiredMeasurement<Integer> long_resolution = RequiredMeasurement
- .<Integer>builder()
- .setId(RequirementConstants.LONG_RESOLUTION)
- .setPredicate(RequirementConstants.INTEGER_GTE)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 1920)
.build();
RequiredMeasurement<Integer> short_resolution = RequiredMeasurement
.<Integer>builder()
.setId(RequirementConstants.SHORT_RESOLUTION)
.setPredicate(RequirementConstants.INTEGER_GTE)
+ .addRequiredValue(Build.VERSION_CODES.S, 1080)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 1080)
.build();
- return new ResolutionRequirement(RequirementConstants.RTBD, long_resolution,
+ return new ResolutionRequirement(RequirementConstants.R7_1_1_1__H_2_1, long_resolution,
short_resolution);
}
}
@@ -159,27 +143,14 @@
.setId(RequirementConstants.DISPLAY_DENSITY)
.setPredicate(RequirementConstants.INTEGER_GTE)
.addRequiredValue(Build.VERSION_CODES.S, 400)
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 400)
.build();
return new DensityRequirement(RequirementConstants.R7_1_1_3__H_2_1, display_density);
}
-
- /**
- * [7.1.1.3/?] MUST have screen density of at least 400 dpi.
- */
- public static DensityRequirement createR7_1_1_3__TBD2() {
- RequiredMeasurement<Integer> display_density = RequiredMeasurement
- .<Integer>builder()
- .setId(RequirementConstants.DISPLAY_DENSITY)
- .setPredicate(RequirementConstants.INTEGER_GTE)
- .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 400)
- .build();
-
- return new DensityRequirement(RequirementConstants.RTBD, display_density);
- }
}
- // used for requirements [7.6.1/H-1-1], [7.6.1/H-2-1], [7.6.1/H-3-1]
+ // used for requirements [7.6.1/H-1-1], [7.6.1/H-2-1]
public static class MemoryRequirement extends Requirement {
private static final String TAG = MemoryRequirement.class.getSimpleName();
@@ -208,39 +179,23 @@
}
/**
- * [7.6.1/H-2-1] MUST have at least 6 GB of physical memory.
+ * [7.6.1/H-2-1] MUST have at least 6/8 GB of physical memory.
*/
public static MemoryRequirement createR7_6_1__H_2_1() {
RequiredMeasurement<Long> physical_memory = RequiredMeasurement
.<Long>builder()
.setId(RequirementConstants.PHYSICAL_MEMORY)
.setPredicate(RequirementConstants.LONG_GTE)
- // Media performance requires 6 GB minimum RAM, but keeping the following to 5 GB
- // as activityManager.getMemoryInfo() returns around 5.4 GB on a 6 GB device.
+ // Media performance requires 6/8 GB minimum RAM, but keeping the following to
+ // 5/7 GB as activityManager.getMemoryInfo() returns around 5.4 GB on a 6 GB device.
.addRequiredValue(Build.VERSION_CODES.S, 5L * 1024L)
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 7L * 1024L)
.build();
return new MemoryRequirement(RequirementConstants.R7_6_1__H_2_1, physical_memory);
}
-
- /**
- * [7.6.1/H-3-1] MUST have at least 8 GB of physical memory.
- */
- public static MemoryRequirement createR7_6_1__H_3_1() {
- RequiredMeasurement<Long> physical_memory = RequiredMeasurement
- .<Long>builder()
- .setId(RequirementConstants.PHYSICAL_MEMORY)
- .setPredicate(RequirementConstants.LONG_GTE)
- // Media performance requires 8 GB minimum RAM, but keeping the following to 7 GB
- // as activityManager.getMemoryInfo() returns around 7.4 GB on a 8 GB device.
- .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 7L * 1024L)
- .build();
-
- return new MemoryRequirement(RequirementConstants.R7_6_1__H_3_1, physical_memory);
- }
}
- // used for requirements [2.2.7.1/5.1/H-1-7], [2.2.7.1/5.1/H-1-8], [2.2.7.1/5.1/H-1-?]
public static class CodecInitLatencyRequirement extends Requirement {
private static final String TAG = CodecInitLatencyRequirement.class.getSimpleName();
@@ -293,40 +248,38 @@
codec_init_latency);
}
- // TODO(b/218771970): Update CDD section, change RequirementConstants.RTBD to appropirate
- // requirement id once finalized, ex: RequirementConstants.R5_1__H_1_<something>
/**
- * [2.2.7.1/5.1/H-1-?] Codec initialization latency of 40ms or less for a 1080p or
+ * [2.2.7.1/5.1/H-1-12] Codec initialization latency of 40ms or less for a 1080p or
* smaller video decoding session for all hardware video encoders when under load. Load
* here is defined as a concurrent 1080p to 720p video-only transcoding session using
* hardware video codecs together with the 1080p audio-video recording initialization.
*/
- public static CodecInitLatencyRequirement createR5_1__H_1_TBD1() {
+ public static CodecInitLatencyRequirement createR5_1__H_1_12() {
RequiredMeasurement<Long> codec_init_latency =
RequiredMeasurement.<Long>builder().setId(RequirementConstants.CODEC_INIT_LATENCY)
.setPredicate(RequirementConstants.LONG_LTE)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 40L)
.build();
- return new CodecInitLatencyRequirement(RequirementConstants.RTBD, codec_init_latency);
+ return new CodecInitLatencyRequirement(RequirementConstants.R5_1__H_1_12,
+ codec_init_latency);
}
- // TODO(b/218771970): Update CDD section, change RequirementConstants.RTBD to appropirate
- // requirement id once finalized, ex: RequirementConstants.R5_1__H_1_<something>
/**
- * [2.2.7.1/5.1/H-1-?] Codec initialization latency of 30ms or less for a 128kbps or
+ * [2.2.7.1/5.1/H-1-13] Codec initialization latency of 30ms or less for a 128kbps or
* lower bitrate audio decoding session for all audio encoders when under load. Load here
* is defined as a concurrent 1080p to 720p video-only transcoding session using hardware
* video codecs together with the 1080p audio-video recording initialization.
*/
- public static CodecInitLatencyRequirement createR5_1__H_1_TBD2() {
+ public static CodecInitLatencyRequirement createR5_1__H_1_13() {
RequiredMeasurement<Long> codec_init_latency =
RequiredMeasurement.<Long>builder().setId(RequirementConstants.CODEC_INIT_LATENCY)
.setPredicate(RequirementConstants.LONG_LTE)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 30L)
.build();
- return new CodecInitLatencyRequirement(RequirementConstants.RTBD, codec_init_latency);
+ return new CodecInitLatencyRequirement(RequirementConstants.R5_1__H_1_13,
+ codec_init_latency);
}
}
@@ -455,9 +408,6 @@
}
}
- // TODO(b/218771970): Add cdd annotation, change RequirementConstants.RTBD to appropirate
- // requirement id once finalized
- // used for requirements [?]
public static class VideoCodecRequirement extends Requirement {
private static final String TAG = VideoCodecRequirement.class.getSimpleName();
@@ -478,7 +428,7 @@
}
/**
- * [?] Must have at least 1 HW video decoder supporting 4K60
+ * [2.2.7.1/5.1/H-1-15] Must have at least 1 HW video decoder supporting 4K60
*/
public static VideoCodecRequirement createR4k60HwDecoder() {
RequiredMeasurement<Integer> requirement = RequiredMeasurement
@@ -488,11 +438,11 @@
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 1)
.build();
- return new VideoCodecRequirement(RequirementConstants.RTBD, requirement);
+ return new VideoCodecRequirement(RequirementConstants.R5_1__H_1_15, requirement);
}
/**
- * [?] Must have at least 1 HW video encoder supporting 4K60
+ * [2.2.7.1/5.1/H-1-16] Must have at least 1 HW video encoder supporting 4K60
*/
public static VideoCodecRequirement createR4k60HwEncoder() {
RequiredMeasurement<Integer> requirement = RequiredMeasurement
@@ -502,11 +452,11 @@
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 1)
.build();
- return new VideoCodecRequirement(RequirementConstants.RTBD, requirement);
+ return new VideoCodecRequirement(RequirementConstants.R5_1__H_1_16, requirement);
}
/**
- * [?] AV1 Hardware decoder: Main 10, Level 4.1, Film Grain
+ * [2.2.7.1/5.1/H-1-14] AV1 Hardware decoder: Main 10, Level 4.1, Film Grain
*/
public static VideoCodecRequirement createRAV1DecoderReq() {
RequiredMeasurement<Boolean> requirement = RequiredMeasurement
@@ -516,7 +466,339 @@
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.build();
- return new VideoCodecRequirement(RequirementConstants.RTBD, requirement);
+ return new VideoCodecRequirement(RequirementConstants.R5_1__H_1_14, requirement);
+ }
+ }
+
+ // used for requirements [2.2.7.1/5.1/H-1-1], [2.2.7.1/5.1/H-1-2], [2.2.7.1/5.1/H-1-3],
+ // [2.2.7.1/5.1/H-1-4], [2.2.7.1/5.1/H-1-5], [2.2.7.1/5.1/H-1-6], [2.2.7.1/5.1/H-1-9],
+ // [2.2.7.1/5.1/H-1-10]
+ public static class ConcurrentCodecRequirement extends Requirement {
+ private static final String TAG = ConcurrentCodecRequirement.class.getSimpleName();
+ // allowed tolerance in measured fps vs expected fps in percentage, i.e. codecs achieving
+ // fps that is greater than (FPS_TOLERANCE_FACTOR * expectedFps) will be considered as
+ // passing the test
+ private static final double FPS_TOLERANCE_FACTOR = 0.95;
+ private static final double FPS_30_TOLERANCE = 30.0 * FPS_TOLERANCE_FACTOR;
+ static final int REQUIRED_MIN_CONCURRENT_INSTANCES = 6;
+ static final int REQUIRED_MIN_CONCURRENT_INSTANCES_FOR_VP9 = 2;
+
+ private ConcurrentCodecRequirement(String id, RequiredMeasurement<?> ... reqs) {
+ super(id, reqs);
+ }
+
+ public void setConcurrentInstances(int concurrentInstances) {
+ this.setMeasuredValue(RequirementConstants.CONCURRENT_SESSIONS,
+ concurrentInstances);
+ }
+
+ public void setConcurrentFps(double achievedFps) {
+ this.setMeasuredValue(RequirementConstants.CONCURRENT_FPS, achievedFps);
+ }
+
+ // copied from android.mediapc.cts.getReqMinConcurrentInstances due to build issues on aosp
+ public static int getReqMinConcurrentInstances(int performanceClass, String mimeType1,
+ String mimeType2, int resolution) {
+ ArrayList<String> MEDIAPC_CONCURRENT_CODECS_R = new ArrayList<>(
+ Arrays.asList(MediaFormat.MIMETYPE_VIDEO_AVC, MediaFormat.MIMETYPE_VIDEO_HEVC));
+ ArrayList<String> MEDIAPC_CONCURRENT_CODECS = new ArrayList<>(Arrays
+ .asList(MediaFormat.MIMETYPE_VIDEO_AVC, MediaFormat.MIMETYPE_VIDEO_HEVC,
+ MediaFormat.MIMETYPE_VIDEO_VP9, MediaFormat.MIMETYPE_VIDEO_AV1));
+
+ if (performanceClass >= Build.VERSION_CODES.TIRAMISU) {
+ return resolution >= 1080 ? REQUIRED_MIN_CONCURRENT_INSTANCES : 0;
+ } else if (performanceClass == Build.VERSION_CODES.S) {
+ if (resolution >= 1080) {
+ return 0;
+ }
+ if (MEDIAPC_CONCURRENT_CODECS.contains(mimeType1) && MEDIAPC_CONCURRENT_CODECS
+ .contains(mimeType2)) {
+ if (MediaFormat.MIMETYPE_VIDEO_VP9.equalsIgnoreCase(mimeType1)
+ || MediaFormat.MIMETYPE_VIDEO_VP9.equalsIgnoreCase(mimeType2)) {
+ return REQUIRED_MIN_CONCURRENT_INSTANCES_FOR_VP9;
+ } else {
+ return REQUIRED_MIN_CONCURRENT_INSTANCES;
+ }
+ } else {
+ return 0;
+ }
+ } else if (performanceClass == Build.VERSION_CODES.R) {
+ if (resolution >= 1080) {
+ return 0;
+ }
+ if (MEDIAPC_CONCURRENT_CODECS_R.contains(mimeType1) && MEDIAPC_CONCURRENT_CODECS_R
+ .contains(mimeType2)) {
+ return REQUIRED_MIN_CONCURRENT_INSTANCES;
+ } else {
+ return 0;
+ }
+ } else {
+ return 0;
+ }
+ }
+
+ private static double getReqMinConcurrentFps(int performanceClass, String mimeType1,
+ String mimeType2, int resolution) {
+ return FPS_30_TOLERANCE * getReqMinConcurrentInstances(performanceClass, mimeType1,
+ mimeType2, resolution);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-1] MUST advertise the maximum number of hardware video decoder
+ * sessions that can be run concurrently in any codec combination via the
+ * CodecCapabilities.getMaxSupportedInstances() and VideoCapabilities
+ * .getSupportedPerformancePoints() methods.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_1_720p(String mimeType1,
+ String mimeType2, int resolution) {
+ RequiredMeasurement<Integer> maxInstances = RequiredMeasurement.<Integer>builder()
+ .setId(RequirementConstants.CONCURRENT_SESSIONS)
+ .setPredicate(RequirementConstants.INTEGER_GTE)
+ .addRequiredValue(Build.VERSION_CODES.R,
+ getReqMinConcurrentInstances(Build.VERSION_CODES.R, mimeType1, mimeType2,
+ resolution))
+ .addRequiredValue(Build.VERSION_CODES.S,
+ getReqMinConcurrentInstances(Build.VERSION_CODES.S, mimeType1, mimeType2,
+ resolution))
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_1, maxInstances);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-1] MUST advertise the maximum number of hardware video decoder
+ * sessions that can be run concurrently in any codec combination via the
+ * CodecCapabilities.getMaxSupportedInstances() and VideoCapabilities
+ * .getSupportedPerformancePoints() methods.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_1_1080p() {
+ RequiredMeasurement<Integer> maxInstances = RequiredMeasurement.<Integer>builder()
+ .setId(RequirementConstants.CONCURRENT_SESSIONS)
+ .setPredicate(RequirementConstants.INTEGER_GTE)
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 6)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_1, maxInstances);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-2] MUST support 6 instances of hardware video decoder sessions (AVC,
+ * HEVC, VP9* or later) in any codec combination running concurrently at 720p(R,S)
+ * resolution@30 fps.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_2_720p(String mimeType1,
+ String mimeType2, int resolution) {
+ RequiredMeasurement<Double> reqConcurrentFps = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.CONCURRENT_FPS)
+ .setPredicate(RequirementConstants.DOUBLE_GTE)
+ .addRequiredValue(Build.VERSION_CODES.R,
+ getReqMinConcurrentFps(Build.VERSION_CODES.R, mimeType1, mimeType2, resolution))
+ .addRequiredValue(Build.VERSION_CODES.S,
+ getReqMinConcurrentFps(Build.VERSION_CODES.S, mimeType1, mimeType2, resolution))
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_2,
+ reqConcurrentFps);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-2] MUST support 6 instances of hardware video decoder sessions (AVC,
+ * HEVC, VP9* or later) in any codec combination running concurrently at 1080p(T)
+ * resolution@30 fps.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_2_1080p() {
+ RequiredMeasurement<Double> reqConcurrentFps = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.CONCURRENT_FPS)
+ .setPredicate(RequirementConstants.DOUBLE_GTE)
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 6 * FPS_30_TOLERANCE)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_2,
+ reqConcurrentFps);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-3] MUST advertise the maximum number of hardware video encoder
+ * sessions that can be run concurrently in any codec combination via the
+ * CodecCapabilities.getMaxSupportedInstances() and VideoCapabilities
+ * .getSupportedPerformancePoints() methods.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_3_720p(String mimeType1,
+ String mimeType2, int resolution) {
+ RequiredMeasurement<Integer> maxInstances = RequiredMeasurement.<Integer>builder()
+ .setId(RequirementConstants.CONCURRENT_SESSIONS)
+ .setPredicate(RequirementConstants.INTEGER_GTE)
+ .addRequiredValue(Build.VERSION_CODES.R,
+ getReqMinConcurrentInstances(Build.VERSION_CODES.R, mimeType1, mimeType2,
+ resolution))
+ .addRequiredValue(Build.VERSION_CODES.S,
+ getReqMinConcurrentInstances(Build.VERSION_CODES.S, mimeType1, mimeType2,
+ resolution))
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_3, maxInstances);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-3] MUST advertise the maximum number of hardware video encoder
+ * sessions that can be run concurrently in any codec combination via the
+ * CodecCapabilities.getMaxSupportedInstances() and VideoCapabilities
+ * .getSupportedPerformancePoints() methods.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_3_1080p() {
+ RequiredMeasurement<Integer> maxInstances = RequiredMeasurement.<Integer>builder()
+ .setId(RequirementConstants.CONCURRENT_SESSIONS)
+ .setPredicate(RequirementConstants.INTEGER_GTE)
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 6)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_3, maxInstances);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-4] MUST support 6 instances of hardware video encoder sessions (AVC,
+ * HEVC, VP9* or later) in any codec combination running concurrently at 720p(R,S)
+ * resolution@30 fps.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_4_720p() {
+ RequiredMeasurement<Double> reqConcurrentFps = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.CONCURRENT_FPS)
+ .setPredicate(RequirementConstants.DOUBLE_GTE)
+ // Requirement not asserted since encoder test runs in byte buffer mode
+ .addRequiredValue(Build.VERSION_CODES.R, 0.0)
+ .addRequiredValue(Build.VERSION_CODES.S, 0.0)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_4,
+ reqConcurrentFps);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-4] MUST support 6 instances of hardware video encoder sessions (AVC,
+ * HEVC, VP9* or later) in any codec combination running concurrently at 1080p(T)
+ * resolution@30 fps.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_4_1080p() {
+ RequiredMeasurement<Double> reqConcurrentFps = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.CONCURRENT_FPS)
+ .setPredicate(RequirementConstants.DOUBLE_GTE)
+ // Requirement not asserted since encoder test runs in byte buffer mode
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 0.0)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_4,
+ reqConcurrentFps);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-5] MUST advertise the maximum number of hardware video encoder and
+ * decoder sessions that can be run concurrently in any codec combination via the
+ * CodecCapabilities.getMaxSupportedInstances() and VideoCapabilities
+ * .getSupportedPerformancePoints() methods.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_5_720p(String mimeType1,
+ String mimeType2, int resolution) {
+ RequiredMeasurement<Integer> maxInstances = RequiredMeasurement.<Integer>builder()
+ .setId(RequirementConstants.CONCURRENT_SESSIONS)
+ .setPredicate(RequirementConstants.INTEGER_GTE)
+ .addRequiredValue(Build.VERSION_CODES.R,
+ getReqMinConcurrentInstances(Build.VERSION_CODES.R, mimeType1, mimeType2,
+ resolution))
+ .addRequiredValue(Build.VERSION_CODES.S,
+ getReqMinConcurrentInstances(Build.VERSION_CODES.S, mimeType1, mimeType2,
+ resolution))
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_5, maxInstances);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-5] MUST advertise the maximum number of hardware video encoder and
+ * decoder sessions that can be run concurrently in any codec combination via the
+ * CodecCapabilities.getMaxSupportedInstances() and VideoCapabilities
+ * .getSupportedPerformancePoints() methods.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_5_1080p() {
+ RequiredMeasurement<Integer> maxInstances = RequiredMeasurement.<Integer>builder()
+ .setId(RequirementConstants.CONCURRENT_SESSIONS)
+ .setPredicate(RequirementConstants.INTEGER_GTE)
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 6)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_5, maxInstances);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-6] Support 6 instances of hardware video decoder and hardware video
+ * encoder sessions (AVC, HEVC, VP9 or AV1) in any codec combination running concurrently
+ * at 720p(R,S) /1080p(T) @30fps resolution.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_6_720p(String mimeType1,
+ String mimeType2, int resolution) {
+ RequiredMeasurement<Double> reqConcurrentFps = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.CONCURRENT_FPS)
+ .setPredicate(RequirementConstants.DOUBLE_GTE)
+ // Test transcoding, fps calculated for encoder and decoder combined so req / 2
+ .addRequiredValue(Build.VERSION_CODES.R,
+ getReqMinConcurrentFps(Build.VERSION_CODES.R, mimeType1, mimeType2, resolution)
+ / 2)
+ .addRequiredValue(Build.VERSION_CODES.S,
+ getReqMinConcurrentFps(Build.VERSION_CODES.S, mimeType1, mimeType2, resolution)
+ / 2)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_6,
+ reqConcurrentFps);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-6] Support 6 instances of hardware video decoder and hardware video
+ * encoder sessions (AVC, HEVC, VP9 or AV1) in any codec combination running concurrently
+ * at 720p(R,S) /1080p(T) @30fps resolution.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_6_1080p() {
+ RequiredMeasurement<Double> reqConcurrentFps = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.CONCURRENT_FPS)
+ .setPredicate(RequirementConstants.DOUBLE_GTE)
+ // Test transcoding, fps calculated for encoder and decoder combined so req / 2
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 6 * FPS_30_TOLERANCE / 2)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_6,
+ reqConcurrentFps);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-9] Support 2 instances of secure hardware video decoder sessions
+ * (AVC, HEVC, VP9 or AV1) in any codec combination running concurrently at 1080p
+ * resolution@30fps.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_9() {
+ RequiredMeasurement<Double> reqConcurrentFps = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.CONCURRENT_FPS)
+ .setPredicate(RequirementConstants.DOUBLE_GTE)
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 2 * FPS_30_TOLERANCE)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_9,
+ reqConcurrentFps);
+ }
+
+ /**
+ * [2.2.7.1/5.1/H-1-10] Support 3 instances of non-secure hardware video decoder sessions
+ * together with 1 instance of secure hardware video decoder session (4 instances total)
+ * (AVC, HEVC, VP9 or AV1) in any codec combination running concurrently at 1080p
+ * resolution@30fps.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_10() {
+ RequiredMeasurement<Double> reqConcurrentFps = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.CONCURRENT_FPS)
+ .setPredicate(RequirementConstants.DOUBLE_GTE)
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 4 * FPS_30_TOLERANCE)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.R5_1__H_1_10,
+ reqConcurrentFps);
}
}
@@ -593,8 +875,8 @@
}
/**
- * [?] Must support secure decoder when a corresponding AVC/VP9/HEVC or AV1 hardware
- * decoder is available
+ * [2.2.7.1/5.7/H-1-1] Must support secure decoder when a corresponding AVC/VP9/HEVC or AV1
+ * hardware decoder is available
*/
public static SecureCodecRequirement createRSecureDecodeSupport() {
RequiredMeasurement<Boolean> requirement = RequiredMeasurement
@@ -604,7 +886,7 @@
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.build();
- return new SecureCodecRequirement(RequirementConstants.RTBD, requirement);
+ return new SecureCodecRequirement(RequirementConstants.R5_7__H_1_1, requirement);
}
}
@@ -633,25 +915,13 @@
ResolutionRequirement.createR7_1_1_1__H_2_1());
}
- public ResolutionRequirement addR7_1_1_1__TBD1() {
- return this.<ResolutionRequirement>addRequirement(
- ResolutionRequirement.createR7_1_1_1__TBD1());
- }
-
public DensityRequirement addR7_1_1_3__H_2_1() {
return this.<DensityRequirement>addRequirement(DensityRequirement.createR7_1_1_3__H_2_1());
}
- public DensityRequirement addR7_1_1_3__TBD2() {
- return this.<DensityRequirement>addRequirement(DensityRequirement.createR7_1_1_3__TBD2());
- }
-
public MemoryRequirement addR7_6_1__H_2_1() {
return this.<MemoryRequirement>addRequirement(MemoryRequirement.createR7_6_1__H_2_1());
}
- public MemoryRequirement addR7_6_1__H_3_1() {
- return this.<MemoryRequirement>addRequirement(MemoryRequirement.createR7_6_1__H_3_1());
- }
public FrameDropRequirement addR5_3__H_1_1_R() {
return this.addRequirement(FrameDropRequirement.createR5_3__H_1_1_R());
@@ -677,12 +947,12 @@
return this.addRequirement(CodecInitLatencyRequirement.createR5_1__H_1_8());
}
- public CodecInitLatencyRequirement addR5_1__H_1_TBD1() {
- return this.addRequirement(CodecInitLatencyRequirement.createR5_1__H_1_TBD1());
+ public CodecInitLatencyRequirement addR5_1__H_1_12() {
+ return this.addRequirement(CodecInitLatencyRequirement.createR5_1__H_1_12());
}
- public CodecInitLatencyRequirement addR5_1__H_1_TBD2() {
- return this.addRequirement(CodecInitLatencyRequirement.createR5_1__H_1_TBD2());
+ public CodecInitLatencyRequirement addR5_1__H_1_13() {
+ return this.addRequirement(CodecInitLatencyRequirement.createR5_1__H_1_13());
}
public VideoCodecRequirement addR4k60HwEncoder() {
@@ -705,6 +975,72 @@
return this.addRequirement(SecureCodecRequirement.createRWidevineSupport());
}
+ public ConcurrentCodecRequirement addR5_1__H_1_1_720p(String mimeType1, String mimeType2,
+ int resolution) {
+ return this.addRequirement(
+ ConcurrentCodecRequirement.createR5_1__H_1_1_720p(mimeType1, mimeType2, resolution));
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_1_1080p() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_1_1080p());
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_2_720p(String mimeType1, String mimeType2,
+ int resolution) {
+ return this.addRequirement(
+ ConcurrentCodecRequirement.createR5_1__H_1_2_720p(mimeType1, mimeType2, resolution));
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_2_1080p() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_2_1080p());
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_3_720p(String mimeType1, String mimeType2,
+ int resolution) {
+ return this.addRequirement(
+ ConcurrentCodecRequirement.createR5_1__H_1_3_720p(mimeType1, mimeType2, resolution));
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_3_1080p() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_3_1080p());
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_4_720p() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_4_720p());
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_4_1080p() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_4_1080p());
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_5_720p(String mimeType1, String mimeType2,
+ int resolution) {
+ return this.addRequirement(
+ ConcurrentCodecRequirement.createR5_1__H_1_5_720p(mimeType1, mimeType2, resolution));
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_5_1080p() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_5_1080p());
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_6_720p(String mimeType1, String mimeType2,
+ int resolution) {
+ return this.addRequirement(
+ ConcurrentCodecRequirement.createR5_1__H_1_6_720p(mimeType1, mimeType2, resolution));
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_6_1080p() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_6_1080p());
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_9() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_9());
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_10() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_10());
+ }
+
public void submitAndCheck() {
boolean perfClassMet = true;
for (Requirement req: this.mRequirements) {
diff --git a/tests/mediapc/common/src/android/mediapc/cts/common/RequirementConstants.java b/tests/mediapc/common/src/android/mediapc/cts/common/RequirementConstants.java
index 0de6a99..5cffbda 100644
--- a/tests/mediapc/common/src/android/mediapc/cts/common/RequirementConstants.java
+++ b/tests/mediapc/common/src/android/mediapc/cts/common/RequirementConstants.java
@@ -35,9 +35,18 @@
public static final String R5_1__H_1_6 = "r5_1__h_1_6"; // 5.1/H-1-6
public static final String R5_1__H_1_7 = "r5_1__h_1_7"; // 5.1/H-1-7
public static final String R5_1__H_1_8 = "r5_1__h_1_8"; // 5.1/H-1-8
+ public static final String R5_1__H_1_9 = "r5_1__h_1_9"; // 5.1/H-1-9
+ public static final String R5_1__H_1_10 = "r5_1__h_1_10"; // 5.1/H-1-10
+ public static final String R5_1__H_1_11 = "r5_1__h_1_11"; // 5.1/H-1-11
+ public static final String R5_1__H_1_12 = "r5_1__h_1_12"; // 5.1/H-1-12
+ public static final String R5_1__H_1_13 = "r5_1__h_1_13"; // 5.1/H-1-13
+ public static final String R5_1__H_1_14 = "r5_1__h_1_14"; // 5.1/H-1-14
+ public static final String R5_1__H_1_15 = "r5_1__h_1_15"; // 5.1/H-1-16
+ public static final String R5_1__H_1_16 = "r5_1__h_1_16"; // 5.1/H-1-16
public static final String R5_3__H_1_1 = "r5_3__h_1_1"; // 5.3/H-1-1
public static final String R5_3__H_1_2 = "r5_3__h_1_2"; // 5.3/H-1-2
public static final String R5_6__H_1_1 = "r5_6__h_1_1"; // 5.6/H-1-1
+ public static final String R5_7__H_1_1 = "r5_7__h_1_1"; // 5.7/H-1-1
public static final String R7_5__H_1_1 = "r7_5__h_1_1"; // 7.5/H-1-1
public static final String R7_5__H_1_2 = "r7_5__h_1_2"; // 7.5/H-1-2
public static final String R7_5__H_1_3 = "r7_5__h_1_3"; // 7.5/H-1-3
@@ -63,7 +72,8 @@
public static final String R8_2__H_2_4 = "r8_2__h_2_4"; // 8.2/H-2-4
public static final String RTBD = "tbd"; // placeholder for requirements without a set id
- public static final String MAX_CONCURRENT_SESSIONS = "max_concurrent_sessions";
+ public static final String CONCURRENT_SESSIONS = "concurrent_sessions";
+ public static final String CONCURRENT_FPS = "concurrent_fps";
public static final String SUPPORTED_PERFORMANCE_POINTS = "supported_performance_points";
public static final String FRAMES_DROPPED = "frame_drops_per_30sec";
public static final String FRAME_RATE = "frame_rate";
@@ -92,6 +102,7 @@
public static final BiPredicate<Integer, Integer> INTEGER_LTE = RequirementConstants.lte();
public static final BiPredicate<Double, Double> DOUBLE_EQ = RequirementConstants.eq();
public static final BiPredicate<Boolean, Boolean> BOOLEAN_EQ = RequirementConstants.eq();
+ public static final BiPredicate<Double, Double> DOUBLE_GTE = RequirementConstants.gte();
/**
* Creates a >= predicate.
diff --git a/tests/mediapc/src/android/mediapc/cts/CodecInitializationLatencyTest.java b/tests/mediapc/src/android/mediapc/cts/CodecInitializationLatencyTest.java
index 0379090..5a4822d 100644
--- a/tests/mediapc/src/android/mediapc/cts/CodecInitializationLatencyTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/CodecInitializationLatencyTest.java
@@ -37,6 +37,7 @@
import android.media.MediaRecorder;
import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
+import android.os.SystemClock;
import android.util.Log;
import android.util.Pair;
import android.view.Surface;
@@ -269,7 +270,6 @@
}
}
- // TODO(b/218771970): Add cdd annotation
/**
* This test validates the initialization latency (time for codec create + configure) for
* audio and hw video codecs.
@@ -282,7 +282,9 @@
@Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_LARGE_TEST_MS)
@CddTest(requirements = {
"2.2.7.1/5.1/H-1-7",
- "2.2.7.1/5.1/H-1-8",})
+ "2.2.7.1/5.1/H-1-8",
+ "2.2.7.1/5.1/H-1-12",
+ "2.2.7.1/5.1/H-1-13",})
public void testInitializationLatency() throws Exception {
MediaCodec codec = MediaCodec.createByCodecName(mCodecName);
boolean isEncoder = codec.getCodecInfo().isEncoder();
@@ -347,7 +349,7 @@
PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
PerformanceClassEvaluator.CodecInitLatencyRequirement r5_1__H_1_Latency =
isEncoder ? isAudio ? pce.addR5_1__H_1_8() : pce.addR5_1__H_1_7()
- : isAudio ? pce.addR5_1__H_1_TBD2() : pce.addR5_1__H_1_TBD1();
+ : isAudio ? pce.addR5_1__H_1_13() : pce.addR5_1__H_1_12();
r5_1__H_1_Latency.setCodecInitLatencyMs(initializationLatency);
@@ -435,14 +437,14 @@
MediaCodec.BufferInfo outInfo = new MediaCodec.BufferInfo();
long enqueueTimeStamp = 0;
long dequeueTimeStamp = 0;
- long baseTimeStamp = System.nanoTime();
+ long baseTimeStamp = SystemClock.elapsedRealtimeNanos();
mCodec = MediaCodec.createByCodecName(mEncoderName);
resetContext(mIsAsync, false);
mAsyncHandle.setCallBack(mCodec, mIsAsync);
mCodec.configure(format, null, MediaCodec.CONFIGURE_FLAG_ENCODE, null);
- long configureTimeStamp = System.nanoTime();
+ long configureTimeStamp = SystemClock.elapsedRealtimeNanos();
mCodec.start();
- long startTimeStamp = System.nanoTime();
+ long startTimeStamp = SystemClock.elapsedRealtimeNanos();
if (mIsAsync) {
// We will keep on feeding the input to encoder until we see the first dequeued
// frame.
@@ -452,12 +454,12 @@
int bufferID = element.first;
MediaCodec.BufferInfo info = element.second;
if (info != null) {
- dequeueTimeStamp = System.nanoTime();
+ dequeueTimeStamp = SystemClock.elapsedRealtimeNanos();
dequeueOutput(bufferID, info);
break;
} else {
if (enqueueTimeStamp == 0) {
- enqueueTimeStamp = System.nanoTime();
+ enqueueTimeStamp = SystemClock.elapsedRealtimeNanos();
}
enqueueInput(bufferID);
}
@@ -469,14 +471,14 @@
int inputBufferId = mCodec.dequeueInputBuffer(Q_DEQ_TIMEOUT_US);
if (inputBufferId > 0) {
if (enqueueTimeStamp == 0) {
- enqueueTimeStamp = System.nanoTime();
+ enqueueTimeStamp = SystemClock.elapsedRealtimeNanos();
}
enqueueInput(inputBufferId);
}
}
int outputBufferId = mCodec.dequeueOutputBuffer(outInfo, Q_DEQ_TIMEOUT_US);
if (outputBufferId >= 0) {
- dequeueTimeStamp = System.nanoTime();
+ dequeueTimeStamp = SystemClock.elapsedRealtimeNanos();
dequeueOutput(outputBufferId, outInfo);
break;
}
@@ -531,14 +533,14 @@
MediaFormat format = setUpSource(mTestFile);
long enqueueTimeStamp = 0;
long dequeueTimeStamp = 0;
- long baseTimeStamp = System.nanoTime();
+ long baseTimeStamp = SystemClock.elapsedRealtimeNanos();
mCodec = MediaCodec.createByCodecName(mDecoderName);
resetContext(mIsAsync, false);
mAsyncHandle.setCallBack(mCodec, mIsAsync);
mCodec.configure(format, mSurface, 0, null);
- long configureTimeStamp = System.nanoTime();
+ long configureTimeStamp = SystemClock.elapsedRealtimeNanos();
mCodec.start();
- long startTimeStamp = System.nanoTime();
+ long startTimeStamp = SystemClock.elapsedRealtimeNanos();
if (mIsAsync) {
// We will keep on feeding the input to decoder until we see the first dequeued
// frame.
@@ -548,12 +550,12 @@
int bufferID = element.first;
MediaCodec.BufferInfo info = element.second;
if (info != null) {
- dequeueTimeStamp = System.nanoTime();
+ dequeueTimeStamp = SystemClock.elapsedRealtimeNanos();
dequeueOutput(bufferID, info);
break;
} else {
if (enqueueTimeStamp == 0) {
- enqueueTimeStamp = System.nanoTime();
+ enqueueTimeStamp = SystemClock.elapsedRealtimeNanos();
}
enqueueInput(bufferID);
}
@@ -565,14 +567,14 @@
int inputBufferId = mCodec.dequeueInputBuffer(Q_DEQ_TIMEOUT_US);
if (inputBufferId >= 0) {
if (enqueueTimeStamp == 0) {
- enqueueTimeStamp = System.nanoTime();
+ enqueueTimeStamp = SystemClock.elapsedRealtimeNanos();
}
enqueueInput(inputBufferId);
}
}
int outputBufferId = mCodec.dequeueOutputBuffer(outInfo, Q_DEQ_TIMEOUT_US);
if (outputBufferId >= 0) {
- dequeueTimeStamp = System.nanoTime();
+ dequeueTimeStamp = SystemClock.elapsedRealtimeNanos();
dequeueOutput(outputBufferId, outInfo);
break;
}
diff --git a/tests/mediapc/src/android/mediapc/cts/MultiCodecPerfTestBase.java b/tests/mediapc/src/android/mediapc/cts/MultiCodecPerfTestBase.java
index 98c3347..ca7a17a 100644
--- a/tests/mediapc/src/android/mediapc/cts/MultiCodecPerfTestBase.java
+++ b/tests/mediapc/src/android/mediapc/cts/MultiCodecPerfTestBase.java
@@ -44,10 +44,7 @@
static final int REQUIRED_MIN_CONCURRENT_INSTANCES = 6;
static final int REQUIRED_MIN_CONCURRENT_INSTANCES_FOR_VP9 = 2;
static final int REQUIRED_MIN_CONCURRENT_SECURE_INSTANCES = 2;
- // allowed tolerance in measured fps vs expected fps in percentage, i.e. codecs achieving fps
- // that is greater than (FPS_TOLERANCE_FACTOR * expectedFps) will be considered as
- // passing the test
- static final double FPS_TOLERANCE_FACTOR = 0.95;
+
static ArrayList<String> mMimeList = new ArrayList<>();
static Map<String, String> mTestFiles = new HashMap<>();
static Map<String, String> m720pTestFiles = new HashMap<>();
@@ -89,8 +86,6 @@
String mTestFile;
final boolean mIsAsync;
- double mMaxFrameRate;
-
@Before
public void isPerformanceClassCandidate() {
Utils.assumeDeviceMeetsPerformanceClassPreconditions();
@@ -120,9 +115,11 @@
}
// Returns the max number of 30 fps instances that the given list of mimeCodecPairs
- // supports. It also checks that the each codec supports 180 fps PerformancePoint.
+ // supports. It also checks that the each codec supports a PerformancePoint that covers
+ // required number of 30 fps instances.
public int checkAndGetMaxSupportedInstancesForCodecCombinations(int height, int width,
- ArrayList<Pair<String, String>> mimeCodecPairs) throws IOException {
+ ArrayList<Pair<String, String>> mimeCodecPairs, int requiredMinInstances)
+ throws IOException {
int[] maxInstances = new int[mimeCodecPairs.size()];
int[] maxFrameRates = new int[mimeCodecPairs.size()];
int[] maxMacroBlockRates = new int[mimeCodecPairs.size()];
@@ -135,8 +132,7 @@
assertTrue(pps.size() > 0);
boolean hasVP9 = mimeCodecPair.first.equals(MediaFormat.MIMETYPE_VIDEO_VP9);
- int requiredFrameRate = getRequiredMinConcurrentInstances(hasVP9, mimeCodecPair.second,
- mimeCodecPair.first) * 30;
+ int requiredFrameRate = requiredMinInstances * 30;
maxInstances[loopCount] = cap.getMaxSupportedInstances();
PerformancePoint PPRes = new PerformancePoint(width, height, requiredFrameRate);
@@ -168,26 +164,13 @@
int minOfMaxFrameRates = maxFrameRates[0];
int minOfMaxMacroBlockRates = maxMacroBlockRates[0];
- // Allow a tolerance in expected frame rate
- mMaxFrameRate = minOfMaxFrameRates * FPS_TOLERANCE_FACTOR;
-
// Calculate how many 30fps max instances it can support from it's mMaxFrameRate
// amd maxMacroBlockRate. (assuming 16x16 macroblocks)
return Math.min(minOfMaxInstances, Math.min((int) (minOfMaxFrameRates / 30.0),
(int) (minOfMaxMacroBlockRates / ((width / 16) * (height / 16)) / 30.0)));
}
- public int getRequiredMinConcurrentInstances(boolean hasVP9) throws IOException {
- return getRequiredMinConcurrentInstances(hasVP9, null, null);
- }
-
- public int getRequiredMinConcurrentInstances(boolean hasVP9, String codecName, String mime)
- throws IOException {
- if (codecName != null && mime != null) {
- if (isSecureSupportedCodec(codecName, mime)) {
- return REQUIRED_MIN_CONCURRENT_SECURE_INSTANCES;
- }
- }
+ public int getRequiredMinConcurrentInstances720p(boolean hasVP9) throws IOException {
// Below T, VP9 requires 60 fps at 720p and minimum of 2 instances
if (!Utils.isTPerfClass() && hasVP9) {
return REQUIRED_MIN_CONCURRENT_INSTANCES_FOR_VP9;
@@ -203,21 +186,4 @@
codec.release();
return isSecureSupported;
}
-
- boolean codecSupportsPP(String codecName, String mime, PerformancePoint reqPP)
- throws IOException {
- MediaCodec codec = MediaCodec.createByCodecName(codecName);
- List<PerformancePoint> suppPPs =
- codec.getCodecInfo().getCapabilitiesForType(mime).getVideoCapabilities()
- .getSupportedPerformancePoints();
- assertTrue("Performance point not published by codec: " + codecName, suppPPs != null);
- boolean codecSupportsReqPP = false;
- for (PerformancePoint pp : suppPPs) {
- if (pp.covers(reqPP)) {
- codecSupportsReqPP = true;
- }
- }
- codec.release();
- return codecSupportsReqPP;
- }
}
diff --git a/tests/mediapc/src/android/mediapc/cts/MultiDecoderPairPerfTest.java b/tests/mediapc/src/android/mediapc/cts/MultiDecoderPairPerfTest.java
index 6694a46..0c69346 100644
--- a/tests/mediapc/src/android/mediapc/cts/MultiDecoderPairPerfTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/MultiDecoderPairPerfTest.java
@@ -20,8 +20,8 @@
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
+import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
-import android.os.Build;
import android.util.Pair;
import androidx.test.filters.LargeTest;
@@ -33,7 +33,9 @@
import com.android.compatibility.common.util.ResultUnit;
import org.junit.Assume;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@@ -68,6 +70,9 @@
mSecondPair = secondPair;
}
+ @Rule
+ public final TestName mTestName = new TestName();
+
// Returns the list of params with two hardware (mime - decoder) pairs in both
// sync and async modes.
// Parameters {0}_{1}_{2} -- Pair(Mime DecoderName)_Pair(Mime DecoderName)_isAsync
@@ -110,7 +115,7 @@
boolean hasVP9 = mFirstPair.first.equals(MediaFormat.MIMETYPE_VIDEO_VP9) ||
mSecondPair.first.equals(MediaFormat.MIMETYPE_VIDEO_VP9);
- int requiredMinInstances = getRequiredMinConcurrentInstances(hasVP9);
+ int requiredMinInstances = getRequiredMinConcurrentInstances720p(hasVP9);
testCodec(m720pTestFiles, 720, 1280, requiredMinInstances);
}
@@ -122,99 +127,25 @@
*/
@LargeTest
@Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_LARGE_TEST_MS)
- @CddTest(requirement = "2.2.7.1/5.1/H-1-1,H-1-2")
+ @CddTest(requirements = {
+ "2.2.7.1/5.1/H-1-1",
+ "2.2.7.1/5.1/H-1-2",
+ "2.2.7.1/5.1/H-1-9",
+ "2.2.7.1/5.1/H-1-10",})
public void test1080p() throws Exception {
Assume.assumeTrue(Utils.isTPerfClass() || !Utils.isPerfClass());
- Assume.assumeFalse("Skipping regular performance tests for secure codecs",
- isSecureSupportedCodec(mFirstPair.second, mFirstPair.first) ||
- isSecureSupportedCodec(mSecondPair.second, mSecondPair.first));
- testCodec(m1080pTestFiles, 1080, 1920, REQUIRED_MIN_CONCURRENT_INSTANCES);
- }
+ boolean isFirstSecure = isSecureSupportedCodec(mFirstPair.second, mFirstPair.first);
+ boolean isSecondSecure = isSecureSupportedCodec(mSecondPair.second, mSecondPair.first);
+ boolean onlyOneSecure = isFirstSecure ^ isSecondSecure;
+ boolean bothSecure = isFirstSecure & isSecondSecure;
- /**
- * Validates if hardware decoder pairs where one supports secure decode and required
- * perf are present and tests with concurrent unsecure decoders
- */
- @LargeTest
- @Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_LARGE_TEST_MS)
- // TODO(b/218771970) Add @CddTest annotation
- public void testReqSecureWithUnsecureDecodeSupport() throws Exception {
- Assume.assumeTrue(Utils.isTPerfClass() || !Utils.isPerfClass());
- Assume.assumeTrue("Testing if only one of the pair is secure",
- isSecureSupportedCodec(mFirstPair.second, mFirstPair.first) ^
- isSecureSupportedCodec(mSecondPair.second, mSecondPair.first));
-
- MediaCodecInfo.VideoCapabilities.PerformancePoint reqSecurePP =
- new MediaCodecInfo.VideoCapabilities.PerformancePoint(1920, 1080, 30);
-
- MediaCodecInfo.VideoCapabilities.PerformancePoint reqNonSecurePP =
- new MediaCodecInfo.VideoCapabilities.PerformancePoint(1920, 1080,
- 30 * REQUIRED_CONCURRENT_NON_SECURE_INSTANCES_WITH_SECURE);
-
- boolean codecSupportsReqPP = codecSupportsPP(mFirstPair.second, mFirstPair.first,
- isSecureSupportedCodec(mFirstPair.second, mFirstPair.first) ? reqSecurePP :
- reqNonSecurePP);
-
- codecSupportsReqPP &= codecSupportsPP(mSecondPair.second, mSecondPair.first,
- isSecureSupportedCodec(mSecondPair.second, mSecondPair.first) ? reqSecurePP :
- reqNonSecurePP);
-
- testCodec(m1080pTestFiles, 1080, 1920,
- REQUIRED_CONCURRENT_NON_SECURE_INSTANCES_WITH_SECURE + 1, true);
-
- if (Utils.isTPerfClass()) {
- assertTrue(
- "Required Secure Decode Support required for MPC >= Android T, unsupported " +
- "codec pair: " + mFirstPair.second + "," + mSecondPair.second,
- codecSupportsReqPP);
+ if (bothSecure) {
+ testCodec(null, 1080, 1920, REQUIRED_MIN_CONCURRENT_SECURE_INSTANCES);
+ } else if (onlyOneSecure) {
+ testCodec(m1080pTestFiles, 1080, 1920,
+ REQUIRED_CONCURRENT_NON_SECURE_INSTANCES_WITH_SECURE + 1, true);
} else {
- DeviceReportLog log =
- new DeviceReportLog("MediaPerformanceClassLogs", "SecureDecodeSupport");
- log.addValue("Req Secure Decode Support pair: " + mFirstPair.second + "," +
- mSecondPair.second, codecSupportsReqPP, ResultType.NEUTRAL, ResultUnit.NONE);
- // TODO(b/218771970) Log CDD sections
- log.setSummary("MPC 13: Secure Decode requirements", 0, ResultType.NEUTRAL,
- ResultUnit.NONE);
- log.submit(InstrumentationRegistry.getInstrumentation());
- }
- }
-
- /**
- * Validates if hardware decoder pairs where both supports secure decode and required
- * perf is present
- */
- @LargeTest
- @Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_LARGE_TEST_MS)
- // TODO(b/218771970) Add @CddTest annotation
- public void testReqMultiSecureDecodeSupport() throws Exception {
- Assume.assumeTrue(Utils.isTPerfClass() || !Utils.isPerfClass());
- Assume.assumeTrue("Run test if both are secure codecs",
- isSecureSupportedCodec(mFirstPair.second, mFirstPair.first) &&
- isSecureSupportedCodec(mSecondPair.second, mSecondPair.first));
-
- MediaCodecInfo.VideoCapabilities.PerformancePoint reqSecurePP =
- new MediaCodecInfo.VideoCapabilities.PerformancePoint(1920, 1080, 30);
-
- boolean codecSupportsReqPP =
- codecSupportsPP(mFirstPair.second, mFirstPair.first, reqSecurePP);
- codecSupportsReqPP &= codecSupportsPP(mSecondPair.second, mSecondPair.first, reqSecurePP);
-
- testCodec(null, 1080, 1920, REQUIRED_MIN_CONCURRENT_SECURE_INSTANCES);
-
- if (Utils.isTPerfClass()) {
- assertTrue(
- "Required Secure Decode Support required for MPC >= Android T, unsupported " +
- "codec pair: " + mFirstPair.second + "," + mSecondPair.second,
- codecSupportsReqPP);
- } else {
- DeviceReportLog log =
- new DeviceReportLog("MediaPerformanceClassLogs", "SecureDecodeSupport");
- log.addValue("Req Secure Decode Support pair: " + mFirstPair.second + "," +
- mSecondPair.second, codecSupportsReqPP, ResultType.NEUTRAL, ResultUnit.NONE);
- // TODO(b/218771970) Log CDD sections
- log.setSummary("MPC 13: Secure Decode requirements", 0, ResultType.NEUTRAL,
- ResultUnit.NONE);
- log.submit(InstrumentationRegistry.getInstrumentation());
+ testCodec(m1080pTestFiles, 1080, 1920, REQUIRED_MIN_CONCURRENT_INSTANCES);
}
}
@@ -229,8 +160,9 @@
ArrayList<Pair<String, String>> mimeDecoderPairs = new ArrayList<>();
mimeDecoderPairs.add(mFirstPair);
mimeDecoderPairs.add(mSecondPair);
+ boolean bothSecure = true;
int maxInstances = checkAndGetMaxSupportedInstancesForCodecCombinations(height, width,
- mimeDecoderPairs);
+ mimeDecoderPairs, requiredMinInstances);
double achievedFrameRate = 0.0;
// secure test should not reach this point if secure codec doesn't support PP
if (maxInstances >= requiredMinInstances || secureWithUnsecure) {
@@ -245,6 +177,7 @@
List<Decode> testList = new ArrayList<>();
for (int i = 0; i < firstPairInstances; i++) {
boolean isSecure = isSecureSupportedCodec(mFirstPair.second, mFirstPair.first);
+ bothSecure &= isSecure;
String testFile = isSecure ? m1080pWidevineTestFiles.get(mFirstPair.first) :
mTestFiles.get(mFirstPair.first);
Assume.assumeTrue("Add " + (isSecure ? "secure" : "") + " test vector for mime: " +
@@ -254,6 +187,7 @@
}
for (int i = 0; i < secondPairInstances; i++) {
boolean isSecure = isSecureSupportedCodec(mSecondPair.second, mSecondPair.first);
+ bothSecure &= isSecure;
String testFile = isSecure ? m1080pWidevineTestFiles.get(mSecondPair.first) :
mTestFiles.get(mSecondPair.first);
Assume.assumeTrue("Add " + (isSecure ? "secure" : "") + " test vector for mime: " +
@@ -267,29 +201,30 @@
achievedFrameRate += result.get();
}
}
- if (Utils.isPerfClass()) {
- assertTrue("Decoder pair " + mFirstPair.second + " and " + mSecondPair.second
- + " unable to support minimum concurrent " +
- "instances. act/exp: " + maxInstances + "/" + requiredMinInstances,
- maxInstances >= requiredMinInstances);
- assertTrue("Unable to achieve the maxFrameRate supported. act/exp: " + achievedFrameRate
- + "/" + mMaxFrameRate + " for " + maxInstances + " instances.",
- achievedFrameRate >= mMaxFrameRate);
+ PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
+ if (secureWithUnsecure) {
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_10 =
+ pce.addR5_1__H_1_10();
+ r5_1__H_1_10.setConcurrentFps(achievedFrameRate);
+ } else if (bothSecure) {
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_9 = pce.addR5_1__H_1_9();
+ r5_1__H_1_9.setConcurrentFps(achievedFrameRate);
} else {
- int pc = maxInstances >= requiredMinInstances && achievedFrameRate >= mMaxFrameRate
- ? Build.VERSION_CODES.R : 0;
- DeviceReportLog log = new DeviceReportLog("MediaPerformanceClassLogs",
- "MultiDecoderPairPerf_" + mFirstPair.second);
- log.addValue("decoders",
- mFirstPair.first + "_" + mFirstPair.second + "_" + mSecondPair.first + "_"
- + mSecondPair.second, ResultType.NEUTRAL, ResultUnit.NONE);
- log.addValue("achieved_framerate", achievedFrameRate, ResultType.HIGHER_BETTER,
- ResultUnit.NONE);
- log.addValue("expected_framerate", mMaxFrameRate, ResultType.NEUTRAL, ResultUnit.NONE);
- log.setSummary("CDD 2.2.7.1/5.1/H-1-1,H-1-2 performance_class", pc, ResultType.NEUTRAL,
- ResultUnit.NONE);
- log.submit(InstrumentationRegistry.getInstrumentation());
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_1;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_2;
+ if (height >= 1080) {
+ r5_1__H_1_1 = pce.addR5_1__H_1_1_1080p();
+ r5_1__H_1_2 = pce.addR5_1__H_1_2_1080p();
+ r5_1__H_1_1.setConcurrentInstances(maxInstances);
+ r5_1__H_1_2.setConcurrentFps(achievedFrameRate);
+ } else {
+ r5_1__H_1_1 = pce.addR5_1__H_1_1_720p(mMime, mMime, height);
+ r5_1__H_1_2 = pce.addR5_1__H_1_2_720p(mMime, mMime, height);
+ r5_1__H_1_1.setConcurrentInstances(maxInstances);
+ r5_1__H_1_2.setConcurrentFps(achievedFrameRate);
+ }
}
+ pce.submitAndCheck();
}
}
diff --git a/tests/mediapc/src/android/mediapc/cts/MultiDecoderPerfTest.java b/tests/mediapc/src/android/mediapc/cts/MultiDecoderPerfTest.java
index 62c7f83..13203cc 100644
--- a/tests/mediapc/src/android/mediapc/cts/MultiDecoderPerfTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/MultiDecoderPerfTest.java
@@ -18,10 +18,9 @@
import static org.junit.Assert.assertTrue;
-import android.media.MediaCodecInfo.VideoCapabilities.PerformancePoint;
import android.media.MediaFormat;
+import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
-import android.os.Build;
import android.util.Pair;
import androidx.test.filters.LargeTest;
@@ -33,7 +32,9 @@
import com.android.compatibility.common.util.ResultUnit;
import org.junit.Assume;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@@ -62,6 +63,9 @@
mDecoderName = decoderName;
}
+ @Rule
+ public final TestName mTestName = new TestName();
+
// Returns the params list with the mime and corresponding hardware decoders in
// both sync and async modes.
// Parameters {0}_{1}_{2} -- Mime_DecoderName_isAsync
@@ -92,56 +96,28 @@
Assume.assumeFalse("Skipping regular performance tests for secure codecs",
isSecureSupportedCodec(mDecoderName, mMime));
boolean hasVP9 = mMime.equals(MediaFormat.MIMETYPE_VIDEO_VP9);
- int requiredMinInstances = getRequiredMinConcurrentInstances(hasVP9);
+ int requiredMinInstances = getRequiredMinConcurrentInstances720p(hasVP9);
testCodec(m720pTestFiles, 720, 1280, requiredMinInstances);
}
/**
- * This test validates that the decoder can support at least 6 concurrent 1080p 30fps
- * decoder instances. Also ensures that all the concurrent sessions succeed in decoding
- * with meeting the expected frame rate.
+ * This test validates that the decoder can support at least 6 non-secure/2 secure concurrent
+ * 1080p 30fps decoder instances. Also ensures that all the concurrent sessions succeed in
+ * decoding with meeting the expected frame rate.
*/
@LargeTest
@Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_LARGE_TEST_MS)
- @CddTest(requirement = "2.2.7.1/5.1/H-1-1,H-1-2")
+ @CddTest(requirements = {
+ "2.2.7.1/5.1/H-1-1",
+ "2.2.7.1/5.1/H-1-2",
+ "2.2.7.1/5.1/H-1-9",})
public void test1080p() throws Exception {
Assume.assumeTrue(Utils.isTPerfClass() || !Utils.isPerfClass());
- Assume.assumeFalse("Skipping regular performance tests for secure codecs",
- isSecureSupportedCodec(mDecoderName, mMime));
- testCodec(m1080pTestFiles, 1080, 1920, REQUIRED_MIN_CONCURRENT_INSTANCES);
- }
-
- /**
- * Validates if hardware decoder that supports required secure decode perf is present
- */
- @LargeTest
- @Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_LARGE_TEST_MS)
- // TODO(b/218771970) Add @CddTest annotation
- public void testReqSecureDecodeSupport() throws Exception {
- Assume.assumeTrue(Utils.isTPerfClass() || !Utils.isPerfClass());
- Assume.assumeTrue("Skipping secure decode support tests for non-secure codecs",
- isSecureSupportedCodec(mDecoderName, mMime));
-
- PerformancePoint reqPP =
- new PerformancePoint(1920, 1080, 30 * REQUIRED_MIN_CONCURRENT_SECURE_INSTANCES);
-
- boolean codecSupportsReqPP = codecSupportsPP(mDecoderName, mMime, reqPP);
-
- testCodec(m1080pWidevineTestFiles, 1080, 1920, REQUIRED_MIN_CONCURRENT_SECURE_INSTANCES);
-
- if (Utils.isTPerfClass()) {
- assertTrue(
- "Required Secure Decode Support required for MPC >= Android T, unsupported " +
- "codec: " + mDecoderName, codecSupportsReqPP);
+ if (isSecureSupportedCodec(mDecoderName, mMime)) {
+ testCodec(m1080pWidevineTestFiles, 1080, 1920,
+ REQUIRED_MIN_CONCURRENT_SECURE_INSTANCES);
} else {
- DeviceReportLog log =
- new DeviceReportLog("MediaPerformanceClassLogs", "SecureDecodeSupport");
- log.addValue("Req Secure Decode Support: " + mDecoderName, codecSupportsReqPP,
- ResultType.NEUTRAL, ResultUnit.NONE);
- // TODO(b/218771970) Log CDD sections
- log.setSummary("MPC 13: Secure Decode requirements", 0, ResultType.NEUTRAL,
- ResultUnit.NONE);
- log.submit(InstrumentationRegistry.getInstrumentation());
+ testCodec(m1080pTestFiles, 1080, 1920, REQUIRED_MIN_CONCURRENT_INSTANCES);
}
}
@@ -151,15 +127,15 @@
Assume.assumeTrue("Add test vector for mime: " + mMime, mTestFile != null);
ArrayList<Pair<String, String>> mimeDecoderPairs = new ArrayList<>();
mimeDecoderPairs.add(Pair.create(mMime, mDecoderName));
+ boolean isSecure = isSecureSupportedCodec(mDecoderName, mMime);
int maxInstances =
checkAndGetMaxSupportedInstancesForCodecCombinations(height, width,
- mimeDecoderPairs);
+ mimeDecoderPairs, requiredMinInstances);
double achievedFrameRate = 0.0;
if (maxInstances >= requiredMinInstances) {
ExecutorService pool = Executors.newFixedThreadPool(maxInstances);
List<Decode> testList = new ArrayList<>();
for (int i = 0; i < maxInstances; i++) {
- boolean isSecure = isSecureSupportedCodec(mDecoderName, mMime);
testList.add(new Decode(mMime, mTestFile, mDecoderName, mIsAsync, isSecure));
}
List<Future<Double>> resultList = pool.invokeAll(testList);
@@ -167,26 +143,26 @@
achievedFrameRate += result.get();
}
}
- if (Utils.isPerfClass()) {
- assertTrue("Decoder " + mDecoderName + " unable to support minimum concurrent " +
- "instances. act/exp: " + maxInstances + "/" + requiredMinInstances,
- maxInstances >= requiredMinInstances);
- assertTrue("Unable to achieve the maxFrameRate supported. act/exp: " + achievedFrameRate
- + "/" + mMaxFrameRate + " for " + maxInstances + " instances.",
- achievedFrameRate >= mMaxFrameRate);
+
+ PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
+ if (isSecure) {
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_9 = pce.addR5_1__H_1_9();
+ r5_1__H_1_9.setConcurrentFps(achievedFrameRate);
} else {
- int pc = maxInstances >= requiredMinInstances && achievedFrameRate >= mMaxFrameRate
- ? Build.VERSION_CODES.R : 0;
- DeviceReportLog log = new DeviceReportLog("MediaPerformanceClassLogs",
- "MultiDecoderPerf_" + mDecoderName);
- log.addValue("decoders", mMime + "_" + mDecoderName, ResultType.NEUTRAL,
- ResultUnit.NONE);
- log.addValue("achieved_framerate", achievedFrameRate, ResultType.HIGHER_BETTER,
- ResultUnit.NONE);
- log.addValue("expected_framerate", mMaxFrameRate, ResultType.NEUTRAL, ResultUnit.NONE);
- log.setSummary("CDD 2.2.7.1/5.1/H-1-1,H-1-2 performance_class", pc, ResultType.NEUTRAL,
- ResultUnit.NONE);
- log.submit(InstrumentationRegistry.getInstrumentation());
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_1;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_2;
+ if (height >= 1080) {
+ r5_1__H_1_1 = pce.addR5_1__H_1_1_1080p();
+ r5_1__H_1_2 = pce.addR5_1__H_1_2_1080p();
+ r5_1__H_1_1.setConcurrentInstances(maxInstances);
+ r5_1__H_1_2.setConcurrentFps(achievedFrameRate);
+ } else {
+ r5_1__H_1_1 = pce.addR5_1__H_1_1_720p(mMime, mMime, height);
+ r5_1__H_1_2 = pce.addR5_1__H_1_2_720p(mMime, mMime, height);
+ r5_1__H_1_1.setConcurrentInstances(maxInstances);
+ r5_1__H_1_2.setConcurrentFps(achievedFrameRate);
+ }
}
+ pce.submitAndCheck();
}
}
diff --git a/tests/mediapc/src/android/mediapc/cts/MultiEncoderPairPerfTest.java b/tests/mediapc/src/android/mediapc/cts/MultiEncoderPairPerfTest.java
index 01e68d3..1997c6b 100644
--- a/tests/mediapc/src/android/mediapc/cts/MultiEncoderPairPerfTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/MultiEncoderPairPerfTest.java
@@ -16,24 +16,19 @@
package android.mediapc.cts;
-import static org.junit.Assert.assertTrue;
-
import android.media.MediaFormat;
+import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
-import android.os.Build;
-import android.util.Log;
import android.util.Pair;
import androidx.test.filters.LargeTest;
-import androidx.test.platform.app.InstrumentationRegistry;
import com.android.compatibility.common.util.CddTest;
-import com.android.compatibility.common.util.DeviceReportLog;
-import com.android.compatibility.common.util.ResultType;
-import com.android.compatibility.common.util.ResultUnit;
import org.junit.Assume;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@@ -66,6 +61,9 @@
mSecondPair = secondPair;
}
+ @Rule
+ public final TestName mTestName = new TestName();
+
// Returns the list of params with two hardware (mime - encoder) pairs in both
// sync and async modes.
// Parameters {0}_{1}_{2} -- Pair(Mime EncoderName)_Pair(Mime EncoderName)_isAsync
@@ -105,7 +103,7 @@
boolean hasVP9 = mFirstPair.first.equals(MediaFormat.MIMETYPE_VIDEO_VP9) ||
mSecondPair.first.equals(MediaFormat.MIMETYPE_VIDEO_VP9);
- int requiredMinInstances = getRequiredMinConcurrentInstances(hasVP9);
+ int requiredMinInstances = getRequiredMinConcurrentInstances720p(hasVP9);
testCodec(720, 1280, 4000000, requiredMinInstances);
}
@@ -129,7 +127,7 @@
mimeEncoderPairs.add(mFirstPair);
mimeEncoderPairs.add(mSecondPair);
int maxInstances = checkAndGetMaxSupportedInstancesForCodecCombinations(height, width,
- mimeEncoderPairs);
+ mimeEncoderPairs, requiredMinInstances);
double achievedFrameRate = 0.0;
if (maxInstances >= requiredMinInstances) {
int secondPairInstances = maxInstances / 2;
@@ -151,22 +149,22 @@
achievedFrameRate += result.get();
}
}
- if (Utils.isPerfClass()) {
- assertTrue("Encoder pair " + mFirstPair.second + " and " + mSecondPair.second
- + " unable to support minimum concurrent instances. act/exp: " + maxInstances
- + "/" + requiredMinInstances, maxInstances >= requiredMinInstances);
- Log.v(LOG_TAG, "Achieved fps: " + achievedFrameRate +
- "\nAchieved frame rate is not compared as this test runs in byte buffer mode");
+ PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_3;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_4;
+ // Achieved frame rate is not compared as this test runs in byte buffer mode.
+ if (height >= 1080) {
+ r5_1__H_1_3 = pce.addR5_1__H_1_3_1080p();
+ r5_1__H_1_4 = pce.addR5_1__H_1_4_1080p();
+ r5_1__H_1_3.setConcurrentInstances(maxInstances);
+ r5_1__H_1_4.setConcurrentFps(achievedFrameRate);
} else {
- int pc = maxInstances >= requiredMinInstances ? Build.VERSION_CODES.R : 0;
- DeviceReportLog log = new DeviceReportLog("MediaPerformanceClassLogs",
- "MultiEncoderPairPerf_" + mFirstPair.second);
- log.addValue("encoders",
- mFirstPair.first + "_" + mFirstPair.second + "_" + mSecondPair.first + "_"
- + mSecondPair.second, ResultType.NEUTRAL, ResultUnit.NONE);
- log.setSummary("CDD 2.2.7.1/5.1/H-1-3,H-1-4 performance_class", pc, ResultType.NEUTRAL,
- ResultUnit.NONE);
- log.submit(InstrumentationRegistry.getInstrumentation());
+ r5_1__H_1_3 = pce.addR5_1__H_1_3_720p(mMime, mMime, height);
+ r5_1__H_1_4 = pce.addR5_1__H_1_4_720p();
+ r5_1__H_1_3.setConcurrentInstances(maxInstances);
+ r5_1__H_1_4.setConcurrentFps(achievedFrameRate);
}
+
+ pce.submitAndCheck();
}
}
diff --git a/tests/mediapc/src/android/mediapc/cts/MultiEncoderPerfTest.java b/tests/mediapc/src/android/mediapc/cts/MultiEncoderPerfTest.java
index 28c7fbd..66afa89 100644
--- a/tests/mediapc/src/android/mediapc/cts/MultiEncoderPerfTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/MultiEncoderPerfTest.java
@@ -16,24 +16,19 @@
package android.mediapc.cts;
-import static org.junit.Assert.assertTrue;
-
import android.media.MediaFormat;
+import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
-import android.os.Build;
-import android.util.Log;
import android.util.Pair;
import androidx.test.filters.LargeTest;
-import androidx.test.platform.app.InstrumentationRegistry;
import com.android.compatibility.common.util.CddTest;
-import com.android.compatibility.common.util.DeviceReportLog;
-import com.android.compatibility.common.util.ResultType;
-import com.android.compatibility.common.util.ResultUnit;
import org.junit.Assume;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@@ -62,6 +57,9 @@
mEncoderName = encoderName;
}
+ @Rule
+ public final TestName mTestName = new TestName();
+
// Returns the params list with the mime and their hardware encoders in
// both sync and async modes.
// Parameters {0}_{2}_{3} -- Mime_EncoderName_isAsync
@@ -90,7 +88,7 @@
Assume.assumeTrue(Utils.isSPerfClass() || Utils.isRPerfClass() || !Utils.isPerfClass());
boolean hasVP9 = mMime.equals(MediaFormat.MIMETYPE_VIDEO_VP9);
- int requiredMinInstances = getRequiredMinConcurrentInstances(hasVP9);
+ int requiredMinInstances = getRequiredMinConcurrentInstances720p(hasVP9);
testCodec(720, 1280, 4000000, requiredMinInstances);
}
@@ -111,7 +109,7 @@
ArrayList<Pair<String, String>> mimeEncoderPairs = new ArrayList<>();
mimeEncoderPairs.add(Pair.create(mMime, mEncoderName));
int maxInstances = checkAndGetMaxSupportedInstancesForCodecCombinations(height, width,
- mimeEncoderPairs);
+ mimeEncoderPairs, requiredMinInstances);
double achievedFrameRate = 0.0;
if (maxInstances >= requiredMinInstances) {
ExecutorService pool = Executors.newFixedThreadPool(maxInstances);
@@ -124,22 +122,22 @@
achievedFrameRate += result.get();
}
}
+ PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_3;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_4;
// Achieved frame rate is not compared as this test runs in byte buffer mode.
- if (Utils.isPerfClass()) {
- assertTrue("Encoder " + mEncoderName + " unable to support minimum concurrent " +
- "instances. act/exp: " + maxInstances + "/" + requiredMinInstances,
- maxInstances >= requiredMinInstances);
- Log.v(LOG_TAG, "Achieved fps: " + achievedFrameRate +
- "\nAchieved frame rate is not compared as this test runs in byte buffer mode");
+ if (height >= 1080) {
+ r5_1__H_1_3 = pce.addR5_1__H_1_3_1080p();
+ r5_1__H_1_4 = pce.addR5_1__H_1_4_1080p();
+ r5_1__H_1_3.setConcurrentInstances(maxInstances);
+ r5_1__H_1_4.setConcurrentFps(achievedFrameRate);
} else {
- int pc = maxInstances >= requiredMinInstances ? Build.VERSION_CODES.R : 0;
- DeviceReportLog log = new DeviceReportLog("MediaPerformanceClassLogs",
- "MultiEncoderPerf_" + mEncoderName);
- log.addValue("encoder", mMime + "_" + mEncoderName, ResultType.NEUTRAL,
- ResultUnit.NONE);
- log.setSummary("CDD 2.2.7.1/5.1/H-1-3,H-1-4 performance_class", pc, ResultType.NEUTRAL,
- ResultUnit.NONE);
- log.submit(InstrumentationRegistry.getInstrumentation());
+ r5_1__H_1_3 = pce.addR5_1__H_1_3_720p(mMime, mMime, height);
+ r5_1__H_1_4 = pce.addR5_1__H_1_4_720p();
+ r5_1__H_1_3.setConcurrentInstances(maxInstances);
+ r5_1__H_1_4.setConcurrentFps(achievedFrameRate);
}
+
+ pce.submitAndCheck();
}
}
diff --git a/tests/mediapc/src/android/mediapc/cts/MultiTranscoderPerfTest.java b/tests/mediapc/src/android/mediapc/cts/MultiTranscoderPerfTest.java
index 9312637..34ffc9b 100644
--- a/tests/mediapc/src/android/mediapc/cts/MultiTranscoderPerfTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/MultiTranscoderPerfTest.java
@@ -19,23 +19,20 @@
import static org.junit.Assert.assertTrue;
import android.media.MediaFormat;
+import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
-import android.os.Build;
import android.util.Pair;
import android.view.Surface;
import androidx.test.filters.LargeTest;
-import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.rule.ActivityTestRule;
import com.android.compatibility.common.util.CddTest;
-import com.android.compatibility.common.util.DeviceReportLog;
-import com.android.compatibility.common.util.ResultType;
-import com.android.compatibility.common.util.ResultUnit;
import org.junit.Assume;
import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@@ -73,6 +70,9 @@
mEncoderPair = encoderPair;
}
+ @Rule
+ public final TestName mTestName = new TestName();
+
// Parameters {0}_{1}_{2} -- Pair(Mime DecoderName)_Pair(Mime EncoderName)_isAsync
@Parameterized.Parameters(name = "{index}({0}_{1}_{2})")
public static Collection<Object[]> inputParams() {
@@ -115,7 +115,7 @@
boolean hasVP9 = mDecoderPair.first.equals(MediaFormat.MIMETYPE_VIDEO_VP9)
|| mEncoderPair.first.equals(MediaFormat.MIMETYPE_VIDEO_VP9);
- int requiredMinInstances = getRequiredMinConcurrentInstances(hasVP9) / 2;
+ int requiredMinInstances = getRequiredMinConcurrentInstances720p(hasVP9);
testCodec(m720pTestFiles, 720, 1280, requiredMinInstances);
}
@@ -131,7 +131,7 @@
@CddTest(requirement = "2.2.7.1/5.1/H-1-5,H-1-6")
public void test1080p() throws Exception {
Assume.assumeTrue(Utils.isTPerfClass() || !Utils.isPerfClass());
- testCodec(m1080pTestFiles, 1080, 1920, REQUIRED_MIN_CONCURRENT_INSTANCES / 2);
+ testCodec(m1080pTestFiles, 1080, 1920, REQUIRED_MIN_CONCURRENT_INSTANCES);
}
private void testCodec(Map<String, String> testFiles, int height, int width,
@@ -141,7 +141,8 @@
mimeCodecPairs.add(mDecoderPair);
mimeCodecPairs.add(mEncoderPair);
int maxInstances =
- checkAndGetMaxSupportedInstancesForCodecCombinations(height, width, mimeCodecPairs);
+ checkAndGetMaxSupportedInstancesForCodecCombinations(height, width, mimeCodecPairs,
+ requiredMinInstances);
double achievedFrameRate = 0.0;
if (maxInstances >= requiredMinInstances) {
ExecutorService pool =
@@ -175,29 +176,22 @@
}
}
}
- if (Utils.isPerfClass()) {
- assertTrue("DecodeMime: " + mDecoderPair.first + ", Decoder " + mDecoderPair.second +
- ", EncodeMime: " + mEncoderPair.first + ", Encoder: " + mEncoderPair.second +
- ", unable to support minimum concurrent instances. act/exp: " + maxInstances +
- "/" + requiredMinInstances, maxInstances >= requiredMinInstances);
- assertTrue("Unable to achieve the maxFrameRate supported. act/exp: " + achievedFrameRate
- + "/" + mMaxFrameRate / 2 + " for " + maxInstances + " instances.",
- achievedFrameRate >= mMaxFrameRate / 2);
+ PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_5;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_6;
+ if (height >= 1080) {
+ r5_1__H_1_5 = pce.addR5_1__H_1_5_1080p();
+ r5_1__H_1_6 = pce.addR5_1__H_1_6_1080p();
+ r5_1__H_1_5.setConcurrentInstances(maxInstances);
+ r5_1__H_1_6.setConcurrentFps(achievedFrameRate);
} else {
- int pc = maxInstances >= requiredMinInstances && achievedFrameRate >= mMaxFrameRate / 2
- ? Build.VERSION_CODES.R : 0;
- DeviceReportLog log = new DeviceReportLog("MediaPerformanceClassLogs",
- "MultiTranscoderPairPerf_" + mDecoderPair.second);
- log.addValue("decoders", mDecoderPair.first + "_" + mDecoderPair.second + "_"
- + mEncoderPair.first + "_" + mEncoderPair.second, ResultType.NEUTRAL,
- ResultUnit.NONE);
- log.addValue("achieved_framerate", achievedFrameRate, ResultType.HIGHER_BETTER,
- ResultUnit.NONE);
- log.addValue("expected_framerate", mMaxFrameRate, ResultType.NEUTRAL, ResultUnit.NONE);
- log.setSummary("CDD 2.2.7.1/5.1/H-1-5,H-1-6 performance_class", pc, ResultType.NEUTRAL,
- ResultUnit.NONE);
- log.submit(InstrumentationRegistry.getInstrumentation());
+ r5_1__H_1_5 = pce.addR5_1__H_1_5_720p(mDecoderPair.first, mEncoderPair.first, height);
+ r5_1__H_1_6 = pce.addR5_1__H_1_6_720p(mDecoderPair.first, mEncoderPair.first, height);
+ r5_1__H_1_5.setConcurrentInstances(maxInstances);
+ r5_1__H_1_6.setConcurrentFps(achievedFrameRate);
}
+
+ pce.submitAndCheck();
}
}
diff --git a/tests/mediapc/src/android/mediapc/cts/PerformanceClassTest.java b/tests/mediapc/src/android/mediapc/cts/PerformanceClassTest.java
index 24b22ea..402a956 100644
--- a/tests/mediapc/src/android/mediapc/cts/PerformanceClassTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/PerformanceClassTest.java
@@ -190,19 +190,15 @@
PerformanceClassEvaluator.DensityRequirement r7_1_1_3__h_1_1 = pce.addR7_1_1_3__H_1_1();
PerformanceClassEvaluator.ResolutionRequirement r7_1_1_1__h_2_1 = pce.addR7_1_1_1__H_2_1();
PerformanceClassEvaluator.DensityRequirement r7_1_1_3__h_2_1 = pce.addR7_1_1_3__H_2_1();
- PerformanceClassEvaluator.ResolutionRequirement r7_1_1_1__tbd1 = pce.addR7_1_1_1__TBD1();
- PerformanceClassEvaluator.DensityRequirement r7_1_1_3__tbd2 = pce.addR7_1_1_3__TBD2();
r7_1_1_1__h_1_1.setLongResolution(longPix);
r7_1_1_1__h_2_1.setLongResolution(longPix);
- r7_1_1_1__tbd1.setLongResolution(longPix);
+
r7_1_1_1__h_1_1.setShortResolution(shortPix);
r7_1_1_1__h_2_1.setShortResolution(shortPix);
- r7_1_1_1__tbd1.setShortResolution(shortPix);
r7_1_1_3__h_1_1.setDisplayDensity(density);
r7_1_1_3__h_2_1.setDisplayDensity(density);
- r7_1_1_3__tbd2.setDisplayDensity(density);
pce.submitAndCheck();
}
@@ -210,8 +206,7 @@
@Test
@CddTest(requirements={
"2.2.7.3/7.6.1/H-1-1",
- "2.2.7.3/7.6.1/H-2-1",
- "2.2.7.3/7.6.1/H-3-1"})
+ "2.2.7.3/7.6.1/H-2-1",})
public void testMinimumMemory() {
Context context = InstrumentationRegistry.getInstrumentation().getContext();
@@ -224,11 +219,9 @@
PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
PerformanceClassEvaluator.MemoryRequirement r7_6_1_h_1_1 = pce.addR7_6_1__H_1_1();
PerformanceClassEvaluator.MemoryRequirement r7_6_1_h_2_1 = pce.addR7_6_1__H_2_1();
- PerformanceClassEvaluator.MemoryRequirement r7_6_1_h_3_1 = pce.addR7_6_1__H_3_1();
r7_6_1_h_1_1.setPhysicalMemory(totalMemoryMb);
r7_6_1_h_2_1.setPhysicalMemory(totalMemoryMb);
- r7_6_1_h_3_1.setPhysicalMemory(totalMemoryMb);
pce.submitAndCheck();
}
diff --git a/tests/mediapc/src/android/mediapc/cts/VideoCodecRequirementsTest.java b/tests/mediapc/src/android/mediapc/cts/VideoCodecRequirementsTest.java
index 4a9d39f..bbe26dc 100644
--- a/tests/mediapc/src/android/mediapc/cts/VideoCodecRequirementsTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/VideoCodecRequirementsTest.java
@@ -29,6 +29,7 @@
import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.util.Log;
import androidx.test.filters.LargeTest;
+import com.android.compatibility.common.util.CddTest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
@@ -78,7 +79,7 @@
*/
@LargeTest
@Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_LARGE_TEST_MS)
- // TODO(b/218771970) Add @CddTest annotation
+ @CddTest(requirement = "2.2.7.1/5.1/H-1-14")
public void testAV1HwDecoderRequirements() throws Exception {
MediaFormat format = MediaFormat.createVideoFormat(MIMETYPE_VIDEO_AV1, 1920, 1080);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 60);
@@ -107,7 +108,7 @@
*/
@LargeTest
@Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_LARGE_TEST_MS)
- // TODO(b/218771970) Add @CddTest annotation
+ @CddTest(requirement = "2.2.7.1/5.1/H-1-15")
public void test4k60Decoder() throws IOException {
Set<String> decoderSet = get4k60HwCodecSet(false);
@@ -123,7 +124,7 @@
*/
@LargeTest
@Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_LARGE_TEST_MS)
- // TODO(b/218771970) Add @CddTest annotation
+ @CddTest(requirement = "2.2.7.1/5.1/H-1-16")
public void test4k60Encoder() throws IOException {
Set<String> encoderSet = get4k60HwCodecSet(true);
diff --git a/tests/tests/content/src/android/content/pm/cts/ChecksumsTest.java b/tests/tests/content/src/android/content/pm/cts/ChecksumsTest.java
index 5cb59c2..4359d7d 100644
--- a/tests/tests/content/src/android/content/pm/cts/ChecksumsTest.java
+++ b/tests/tests/content/src/android/content/pm/cts/ChecksumsTest.java
@@ -52,10 +52,12 @@
import android.content.pm.PackageManager;
import android.content.pm.Signature;
import android.content.pm.cts.util.AbandonAllPackageSessionsRule;
+import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.IBinder;
import android.os.ParcelFileDescriptor;
+import android.os.SystemProperties;
import android.platform.test.annotations.AppModeFull;
import android.util.ExceptionUtils;
@@ -71,6 +73,7 @@
import org.junit.After;
import org.junit.Assert;
+import org.junit.Assume;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@@ -152,6 +155,11 @@
new Checksum(TYPE_WHOLE_SHA256, hexStringToBytes(TEST_FIXED_APK_SHA256)),
new Checksum(TYPE_WHOLE_MD5, hexStringToBytes(TEST_FIXED_APK_MD5))};
+ /** Default is to not use fs-verity since it depends on kernel support. */
+ private static final int FSVERITY_DISABLED = 0;
+
+ /** Standard fs-verity. */
+ private static final int FSVERITY_ENABLED = 2;
private static final byte[] NO_SIGNATURE = null;
@@ -315,6 +323,7 @@
@LargeTest
@Test
public void testFixedFSVerityDefaultChecksums() throws Exception {
+ Assume.assumeTrue(isApkVerityEnabled());
installApkWithFSVerity(TEST_FIXED_APK_FSVERITY, TEST_FIXED_APK_FSVERITY_FSVSIG);
assertTrue(isAppInstalled(FIXED_FSVERITY_PACKAGE_NAME));
@@ -1513,6 +1522,13 @@
return getPackageManager().hasSystemFeature(PackageManager.FEATURE_INCREMENTAL_DELIVERY);
}
+ // From PackageManagerServiceUtils.
+ private static boolean isApkVerityEnabled() {
+ return Build.VERSION.DEVICE_INITIAL_SDK_INT >= Build.VERSION_CODES.R
+ || SystemProperties.getInt("ro.apk_verity.mode", FSVERITY_DISABLED)
+ == FSVERITY_ENABLED;
+ }
+
private byte[] readSignature() throws IOException {
return readSignature(TEST_FIXED_APK_DIGESTS_SIGNATURE);
}