Merge "Increase testMultiAtlasGlyphsWithColorSpace (CTS) timeout to 60s" into main
diff --git a/common/device-side/util-axt/src/com/android/compatibility/common/util/MediaUtils.java b/common/device-side/util-axt/src/com/android/compatibility/common/util/MediaUtils.java
index e832885..1479634 100644
--- a/common/device-side/util-axt/src/com/android/compatibility/common/util/MediaUtils.java
+++ b/common/device-side/util-axt/src/com/android/compatibility/common/util/MediaUtils.java
@@ -42,6 +42,7 @@
import android.util.DisplayMetrics;
import android.util.Log;
import android.util.Range;
+import android.util.Size;
import android.view.WindowManager;
import androidx.test.platform.app.InstrumentationRegistry;
@@ -415,6 +416,13 @@
return false;
}
+ /**
+ * returns true if the given codec component of mediaType supports the given resolution
+ */
+ public static boolean supports(String codecName, String mediaType, Size size) {
+ return supports(codecName, mediaType, size.getWidth(), size.getHeight());
+ }
+
public static boolean supports(String codecName, MediaFormat format) {
MediaCodec codec;
try {
diff --git a/hostsidetests/hdmicec/src/android/hdmicec/cts/RemoteControlPassthrough.java b/hostsidetests/hdmicec/src/android/hdmicec/cts/RemoteControlPassthrough.java
index 296284b..eb25c1b 100644
--- a/hostsidetests/hdmicec/src/android/hdmicec/cts/RemoteControlPassthrough.java
+++ b/hostsidetests/hdmicec/src/android/hdmicec/cts/RemoteControlPassthrough.java
@@ -342,6 +342,19 @@
hdmiCecClient.sendUserControlPressAndRelease(
sourceDevice, dutLogicalAddress, cecKeycode, false);
+ // KEYCODE_SETUP_MENU might trigger the notification panel quitting the activity
+ // HdmiCecKeyEventCapture.
+ if (cecKeycode == HdmiCecConstants.CEC_KEYCODE_SETUP_MENU) {
+ try {
+ LogHelper.waitForLog(device, "ActivityTaskManager", 5,
+ "TOGGLE_NOTIFICATION_HANDLER_PANEL");
+ return;
+ } catch (Exception e) {
+ // We have to send the key again since logcat was cleared.
+ hdmiCecClient.sendUserControlPressAndRelease(
+ sourceDevice, dutLogicalAddress, cecKeycode, false);
+ }
+ }
LogHelper.assertLog(device, CLASS, "Short press KEYCODE_" + androidKeycode);
}
}
diff --git a/hostsidetests/os/OWNERS b/hostsidetests/os/OWNERS
index 45bbbc4..867f59d 100644
--- a/hostsidetests/os/OWNERS
+++ b/hostsidetests/os/OWNERS
@@ -1,7 +1,6 @@
# Bug component: 25692
michaelwr@google.com
santoscordon@google.com
-per-file InattentiveSleepTests.java=robhor@google.com
-per-file QuiescentBootTests.java=robhor@google.com
+per-file InattentiveSleepTests.java=qingxun@google.com
+per-file QuiescentBootTests.java=qingxun@google.com
per-file StaticSharedLibsHostTests.java=patb@google.com
-
diff --git a/hostsidetests/packagemanager/dynamicmime/src/android/dynamicmime/cts/PreferredActivitiesTestCases.java b/hostsidetests/packagemanager/dynamicmime/src/android/dynamicmime/cts/PreferredActivitiesTestCases.java
index 0951be3..d11b6bc 100644
--- a/hostsidetests/packagemanager/dynamicmime/src/android/dynamicmime/cts/PreferredActivitiesTestCases.java
+++ b/hostsidetests/packagemanager/dynamicmime/src/android/dynamicmime/cts/PreferredActivitiesTestCases.java
@@ -16,7 +16,10 @@
package android.dynamicmime.cts;
+import static org.junit.Assume.assumeTrue;
+
import com.android.compatibility.common.util.ApiTest;
+import com.android.compatibility.common.util.PropertyUtil;
import com.android.tradefed.device.DeviceNotAvailableException;
import com.android.tradefed.testtype.DeviceJUnit4ClassRunner;
import com.android.tradefed.testtype.junit4.BaseHostJUnit4Test;
@@ -39,8 +42,17 @@
public class PreferredActivitiesTestCases extends BaseHostJUnit4Test {
private static final String PACKAGE_TEST_APP = "android.dynamicmime.testapp";
+ private boolean isShippedAtLeastS() {
+ try {
+ return PropertyUtil.getFirstApiLevel(getDevice()) > 30 /* BUILD.VERSION_CODES.R */;
+ } catch (Exception e) {
+ return false;
+ }
+ }
+
@Before
public void setUp() throws DeviceNotAvailableException {
+ assumeTrue("The device shipped at least OS S", isShippedAtLeastS());
// wake up and unlock device
getDevice().executeShellCommand("input keyevent KEYCODE_WAKEUP");
getDevice().disableKeyguard();
diff --git a/hostsidetests/scopedstorage/host/src/android/scopedstorage/cts/host/LegacyStorageHostTest.java b/hostsidetests/scopedstorage/host/src/android/scopedstorage/cts/host/LegacyStorageHostTest.java
index e0dc946..4e2f1bd 100644
--- a/hostsidetests/scopedstorage/host/src/android/scopedstorage/cts/host/LegacyStorageHostTest.java
+++ b/hostsidetests/scopedstorage/host/src/android/scopedstorage/cts/host/LegacyStorageHostTest.java
@@ -18,6 +18,8 @@
import static com.google.common.truth.Truth.assertThat;
+import static org.junit.Assume.assumeFalse;
+
import android.platform.test.annotations.AppModeFull;
import com.android.tradefed.device.contentprovider.ContentProviderHandler;
@@ -86,6 +88,9 @@
@Before
public void setup() throws Exception {
+ // Ignore tests on automotive devices b/319785789
+ assumeFalse(hasDeviceFeature("android.hardware.type.automotive"));
+
mContentProviderHandler = new ContentProviderHandler(getDevice());
mContentProviderHandler.setUp();
setupExternalStorage();
diff --git a/hostsidetests/scopedstorage/host/src/android/scopedstorage/cts/host/ScopedStorageHostTest.java b/hostsidetests/scopedstorage/host/src/android/scopedstorage/cts/host/ScopedStorageHostTest.java
index 37f8ee7..6c2b836 100644
--- a/hostsidetests/scopedstorage/host/src/android/scopedstorage/cts/host/ScopedStorageHostTest.java
+++ b/hostsidetests/scopedstorage/host/src/android/scopedstorage/cts/host/ScopedStorageHostTest.java
@@ -18,6 +18,8 @@
import static com.google.common.truth.Truth.assertThat;
+import static org.junit.Assume.assumeFalse;
+
import android.platform.test.annotations.AppModeFull;
import com.android.modules.utils.build.testing.DeviceSdkLevel;
@@ -71,6 +73,9 @@
@Before
public void setup() throws Exception {
+ // Ignore tests on automotive devices b/319785789
+ assumeFalse(hasDeviceFeature("android.hardware.type.automotive"));
+
setupExternalStorage();
executeShellCommand("mkdir /sdcard/Android/data/com.android.shell -m 2770");
executeShellCommand("mkdir /sdcard/Android/data/com.android.shell/files -m 2770");
diff --git a/hostsidetests/security/src/android/security/cts/SELinuxHostTest.java b/hostsidetests/security/src/android/security/cts/SELinuxHostTest.java
index 8713d56..341024c 100644
--- a/hostsidetests/security/src/android/security/cts/SELinuxHostTest.java
+++ b/hostsidetests/security/src/android/security/cts/SELinuxHostTest.java
@@ -231,7 +231,6 @@
return builtPolicyFile;
}
-
builtPolicyFile = createTempFile(tmpFileName, ".tmp");
File secilc = copyResourceToTempFile("/secilc");
@@ -239,14 +238,31 @@
File systemSepolicyCilFile = createTempFile("plat_sepolicy", ".cil");
File fileContextsFile = createTempFile("file_contexts", ".txt");
-
assertTrue(device.pullFile("/system/etc/selinux/plat_sepolicy.cil", systemSepolicyCilFile));
- String errorString = tryRunCommand(secilc.getAbsolutePath(),
- "-m", "-M", "true", "-c", "30",
- "-o", builtPolicyFile.getAbsolutePath(),
- "-f", fileContextsFile.getAbsolutePath(),
- systemSepolicyCilFile.getAbsolutePath());
+ List<String> command = new ArrayList<>(Arrays.asList(
+ secilc.getAbsolutePath(),
+ "-m",
+ "-M",
+ "true",
+ "-c",
+ "30",
+ "-o",
+ builtPolicyFile.getAbsolutePath(),
+ "-f",
+ fileContextsFile.getAbsolutePath(),
+ systemSepolicyCilFile.getAbsolutePath()));
+
+ File systemExtCilFile = createTempFile("system_ext_sepolicy", ".cil");
+ File productCilFile = createTempFile("product_sepolicy", ".cil");
+ if (device.pullFile("/system_ext/etc/selinux/system_ext_sepolicy.cil", systemExtCilFile)) {
+ command.add(systemExtCilFile.getAbsolutePath());
+ }
+ if (device.pullFile("/product/etc/selinux/product_sepolicy.cil", productCilFile)) {
+ command.add(productCilFile.getAbsolutePath());
+ }
+
+ String errorString = tryRunCommand(command.toArray(new String[0]));
assertTrue(errorString, errorString.length() == 0);
synchronized (cache) {
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2015-1805/poc.c b/hostsidetests/securitybulletin/securityPatch/CVE-2015-1805/poc.c
index c80b5ed..5d418ed 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2015-1805/poc.c
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2015-1805/poc.c
@@ -33,8 +33,9 @@
struct iovec *iovs = NULL;
void *func_evil(void *data) {
- munmap((void *)(0x45678000), PAGE_SIZE);
- mmap((void *)(0x45678000), PAGE_SIZE, PROT_READ | PROT_WRITE,
+ const size_t page_size = getpagesize();
+ munmap((void *)(0x45678000), page_size);
+ mmap((void *)(0x45678000), page_size, PROT_READ | PROT_WRITE,
MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED, -1, 0);
return data;
}
@@ -45,6 +46,7 @@
}
int main() {
+ const size_t page_size = getpagesize();
int ret = -1, i;
void *bufs[BUFS];
time_t test_started = start_timer();
@@ -62,7 +64,7 @@
goto __close_pipe;
}
- bufs[MAGIC] = mmap((void *)(0x45678000), PAGE_SIZE, PROT_READ | PROT_WRITE,
+ bufs[MAGIC] = mmap((void *)(0x45678000), page_size, PROT_READ | PROT_WRITE,
MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED, -1, 0);
if (bufs[MAGIC] == MAP_FAILED) {
goto __close_pipe;
@@ -70,7 +72,7 @@
for (size_t i = 0; i < sizeof(bufs) / sizeof(bufs[0]); i++) {
if (i == MAGIC) continue;
- bufs[i] = mmap(NULL, PAGE_SIZE, PROT_READ | PROT_WRITE,
+ bufs[i] = mmap(NULL, page_size, PROT_READ | PROT_WRITE,
MAP_SHARED | MAP_ANONYMOUS, -1, 0);
if (bufs[i] == MAP_FAILED) {
goto __free_bufs;
@@ -87,7 +89,7 @@
i = 0;
while (timer_active(test_started)) {
- write(fd[1], bufs[0], PAGE_SIZE);
+ write(fd[1], bufs[0], page_size);
pthread_create(&thr_evil, NULL, func_evil, NULL);
pthread_create(&thr_readv, NULL, func_readv, NULL);
@@ -98,7 +100,7 @@
__free_bufs:
for (size_t i = 0; i < sizeof(bufs) / sizeof(bufs[0]); i++) {
- if (bufs[i]) munmap(bufs[i], PAGE_SIZE);
+ if (bufs[i]) munmap(bufs[i], page_size);
}
__close_pipe:
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2016-8431/poc.c b/hostsidetests/securitybulletin/securityPatch/CVE-2016-8431/poc.c
index 1cc0f29..75f55d8 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2016-8431/poc.c
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2016-8431/poc.c
@@ -47,7 +47,7 @@
submit_c.cmdbufs = (__u64)cmdbufs;
submit_c.num_relocs = CMD_NUM;
submit_c.relocs = (__u64)relocs;
- gem_create.size = PAGE_SIZE;
+ gem_create.size = getpagesize();
return 0;
}
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2016-8432/poc.c b/hostsidetests/securitybulletin/securityPatch/CVE-2016-8432/poc.c
index 52b48f2..0a0e80e 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2016-8432/poc.c
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2016-8432/poc.c
@@ -82,7 +82,7 @@
submit_c.num_relocs = CMD_NUM;
submit_c.relocs = (__u64)relocs;
- gem_create.size = PAGE_SIZE;
+ gem_create.size = getpagesize();
return 0;
}
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2016-8482/poc.c b/hostsidetests/securitybulletin/securityPatch/CVE-2016-8482/poc.c
index 41862a5..84c8325 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2016-8482/poc.c
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2016-8482/poc.c
@@ -135,7 +135,7 @@
int i, j, ret;
int dma1, dma2;
struct nvmap_create_handle args = {
- .size = PAGE_SIZE
+ .size = getpagesize()
};
struct nvmap_alloc_handle alloc = {
.heap_mask = 0xFFFFFFFF
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2019-2013/poc.cpp b/hostsidetests/securitybulletin/securityPatch/CVE-2019-2013/poc.cpp
index 9f41b93..bb418dc 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2019-2013/poc.cpp
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2019-2013/poc.cpp
@@ -31,10 +31,11 @@
struct sigaction new_action, old_action;
void sigsegv_handler(int signum, siginfo_t *info, void* context) {
if (testInProgress && info->si_signo == SIGSEGV) {
- size_t pageSize = getpagesize();
- if (pageSize) {
- char *vulnPtrGuardPage = (char *) ((size_t) vulnPtr & PAGE_MASK) + pageSize;
- char *faultPage = (char *) ((size_t) info->si_addr & PAGE_MASK);
+ const size_t page_size = getpagesize();
+ const size_t page_mask = (~(page_size - 1));
+ if (page_size) {
+ char *vulnPtrGuardPage = (char *) ((size_t) vulnPtr & page_mask) + page_size;
+ char *faultPage = (char *) ((size_t) info->si_addr & page_mask);
if (faultPage == vulnPtrGuardPage) {
(*old_action.sa_sigaction)(signum, info, context);
return;
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2019-2021/poc.cpp b/hostsidetests/securitybulletin/securityPatch/CVE-2019-2021/poc.cpp
index 5205d05..a74a362 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2019-2021/poc.cpp
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2019-2021/poc.cpp
@@ -28,10 +28,11 @@
struct sigaction new_action, old_action;
void sigsegv_handler(int signum, siginfo_t *info, void* context) {
if (testInProgress && info->si_signo == SIGSEGV) {
- size_t pageSize = getpagesize();
- if (pageSize) {
- char *vulnPtrGuardPage = (char *) ((size_t) vulnPtr & PAGE_MASK) + pageSize;
- char *faultPage = (char *) ((size_t) info->si_addr & PAGE_MASK);
+ const size_t page_size = getpagesize();
+ const size_t page_mask = (~(page_size - 1));
+ if (page_size) {
+ char *vulnPtrGuardPage = (char *) ((size_t) vulnPtr & page_mask) + page_size;
+ char *faultPage = (char *) ((size_t) info->si_addr & page_mask);
if (faultPage == vulnPtrGuardPage) {
(*old_action.sa_sigaction)(signum, info, context);
return;
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2019-2022/poc.cpp b/hostsidetests/securitybulletin/securityPatch/CVE-2019-2022/poc.cpp
index b9252c5..fc2bcd1 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2019-2022/poc.cpp
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2019-2022/poc.cpp
@@ -31,10 +31,11 @@
struct sigaction new_action, old_action;
void sigsegv_handler(int signum, siginfo_t *info, void* context) {
if (testInProgress && info->si_signo == SIGSEGV) {
- size_t pageSize = getpagesize();
- if (pageSize) {
- char *vulnPtrGuardPage = (char *) ((size_t) vulnPtr & PAGE_MASK) + pageSize;
- char *faultPage = (char *) ((size_t) info->si_addr & PAGE_MASK);
+ const size_t page_size = getpagesize();
+ const size_t page_mask = (~(page_size - 1));
+ if (page_size) {
+ char *vulnPtrGuardPage = (char *) ((size_t) vulnPtr & page_mask) + page_size;
+ char *faultPage = (char *) ((size_t) info->si_addr & page_mask);
if (faultPage == vulnPtrGuardPage) {
(*old_action.sa_sigaction)(signum, info, context);
return;
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2020-0213/poc.cpp b/hostsidetests/securitybulletin/securityPatch/CVE-2020-0213/poc.cpp
index fd10060..c766dd9 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2020-0213/poc.cpp
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2020-0213/poc.cpp
@@ -351,7 +351,7 @@
}
work->input.buffers.clear();
- auto alignedSize = ALIGN(size, PAGE_SIZE);
+ auto alignedSize = ALIGN(size, getpagesize());
if (size) {
std::shared_ptr<C2LinearBlock> block;
if (linearPool->fetchLinearBlock(alignedSize,
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2020-11173/poc.c b/hostsidetests/securitybulletin/securityPatch/CVE-2020-11173/poc.c
index 726584ae..1f64eee 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2020-11173/poc.c
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2020-11173/poc.c
@@ -73,7 +73,7 @@
struct fastrpc_ioctl_init init = {0};
init.filefd = dma_fd;
- init.filelen = PAGE_SIZE;
+ init.filelen = getpagesize();
init.mem = (void *)0xdeadbeef;
init.flags = FASTRPC_INIT_CREATE;
@@ -92,7 +92,7 @@
unmap.va = 0;
unmap.fd = dma_fd;
- unmap.len = PAGE_SIZE;
+ unmap.len = getpagesize();
set_affinity(cpu);
@@ -133,7 +133,7 @@
goto out_dev;
}
- dma_fd = ion_alloc(PAGE_SIZE);
+ dma_fd = ion_alloc(getpagesize());
if (dma_fd < 0) {
ret = -1;
goto out_dev;
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2021-0925/poc.cpp b/hostsidetests/securitybulletin/securityPatch/CVE-2021-0925/poc.cpp
index 085f6e8..a9d3dff 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2021-0925/poc.cpp
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2021-0925/poc.cpp
@@ -31,10 +31,11 @@
struct sigaction new_action, old_action;
void sigsegv_handler(int signum, siginfo_t *info, void *context) {
if (testInProgress && info->si_signo == SIGSEGV) {
- size_t pageSize = getpagesize();
- if (pageSize) {
- char *vulnPtrGuardPage = (char *)((size_t)vulnPtr & PAGE_MASK) + pageSize;
- char *faultPage = (char *)((size_t)info->si_addr & PAGE_MASK);
+ const size_t page_size = getpagesize();
+ const size_t page_mask = (~(page_size - 1));
+ if (page_size) {
+ char *vulnPtrGuardPage = (char *)((size_t)vulnPtr & page_mask) + page_size;
+ char *faultPage = (char *)((size_t)info->si_addr & page_mask);
if (faultPage == vulnPtrGuardPage) {
(*old_action.sa_sigaction)(signum, info, context);
return;
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2021-1906/poc.c b/hostsidetests/securitybulletin/securityPatch/CVE-2021-1906/poc.c
index f8eaee4..56cfe6d 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2021-1906/poc.c
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2021-1906/poc.c
@@ -141,29 +141,31 @@
}
void trigger(int fd, uintptr_t start, uintptr_t end) {
- void *hostptr = mmap((void *)start, 2 * PAGE_SIZE, PROT_READ | PROT_WRITE,
+ const size_t page_size = getpagesize();
+ void *hostptr = mmap((void *)start, 2 * page_size, PROT_READ | PROT_WRITE,
MAP_PRIVATE | MAP_ANONYMOUS | MAP_FIXED, -1, 0);
- mprotect((void *)((uintptr_t)hostptr + PAGE_SIZE), PAGE_SIZE, PROT_NONE);
+ mprotect((void *)((uintptr_t)hostptr + page_size), page_size, PROT_NONE);
gpu_map_user_mem(fd, (uintptr_t)hostptr, end - start, 0,
KGSL_MEMFLAGS_USE_CPU_MAP, NULL);
- munmap(hostptr, 2 * PAGE_SIZE);
+ munmap(hostptr, 2 * page_size);
}
int main(void) {
+ const size_t page_size = getpagesize();
int kgsl_fd = kgsl_init();
unsigned long gpu_addr = 0;
unsigned long next_gpu_addr = 0;
FAIL_CHECK(!(kgsl_fd < 0));
- if (create_code_page(kgsl_fd, 4 * PAGE_SIZE, &code_page_cpu_addr,
+ if (create_code_page(kgsl_fd, 4 * page_size, &code_page_cpu_addr,
&code_page_gpu_addr) < 0) {
close(kgsl_fd);
return EXIT_FAILURE;
}
- next_gpu_addr = gpu_mem_alloc(kgsl_fd, PAGE_SIZE, 0);
+ next_gpu_addr = gpu_mem_alloc(kgsl_fd, page_size, 0);
gpu_sharedmem_free(kgsl_fd, next_gpu_addr);
trigger(kgsl_fd, next_gpu_addr, EXPLOIT_VULN_ADDR);
gpu_addr = gpu_mem_alloc(kgsl_fd, 0x600000, 0);
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2021-39623/poc.cpp b/hostsidetests/securitybulletin/securityPatch/CVE-2021-39623/poc.cpp
index d9e38ba..280253d 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2021-39623/poc.cpp
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2021-39623/poc.cpp
@@ -43,14 +43,15 @@
void *mmap(void *addr, size_t length, int prot, int flags, int fd,
off_t offset) {
+ const size_t page_size = getpagesize();
real_mmap = (mmap_t)dlsym(RTLD_NEXT, "mmap");
if (!real_mmap) {
exit(EXIT_FAILURE);
}
if (length == kTargetBufferSize) {
- char *tmp_ptr = (char *)real_mmap(addr, length + PAGE_SIZE, prot,
+ char *tmp_ptr = (char *)real_mmap(addr, length + page_size, prot,
flags | MAP_ANONYMOUS, -1, offset);
- mprotect(tmp_ptr + length, PAGE_SIZE, PROT_NONE);
+ mprotect(tmp_ptr + length, page_size, PROT_NONE);
return tmp_ptr;
}
return real_mmap(addr, length, prot, flags, fd, offset);
diff --git a/hostsidetests/securitybulletin/securityPatch/CVE-2023-4272/poc.c b/hostsidetests/securitybulletin/securityPatch/CVE-2023-4272/poc.c
index 2d07bd8..7c32888 100644
--- a/hostsidetests/securitybulletin/securityPatch/CVE-2023-4272/poc.c
+++ b/hostsidetests/securitybulletin/securityPatch/CVE-2023-4272/poc.c
@@ -40,6 +40,7 @@
#define KBASE_IOCTL_MEM_IMPORT _IOWR(KBASE_IOCTL_TYPE, 22, union kbase_ioctl_mem_import)
int main(void) {
+ const size_t page_size = getpagesize();
int mali_fd = open("/dev/mali0", O_RDWR);
if (mali_fd < 0) {
printf("Failed to open /dev/mali0!");
@@ -59,7 +60,7 @@
// make a single-page anonymous mapping
void *anon_mapping =
- mmap(NULL, PAGE_SIZE, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
+ mmap(NULL, page_size, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
if (anon_mapping == MAP_FAILED) {
printf("mmap failed!");
teardown(&gpu_info);
@@ -80,7 +81,7 @@
int result = ioctl(mali_fd, KBASE_IOCTL_MEM_IMPORT, &mi);
// free the allocated memory
- munmap(anon_mapping, PAGE_SIZE);
+ munmap(anon_mapping, page_size);
teardown(&gpu_info);
if (result == 0) {
diff --git a/hostsidetests/securitybulletin/utils/mali_gpu_utils/src/mali_gpu_utils.c b/hostsidetests/securitybulletin/utils/mali_gpu_utils/src/mali_gpu_utils.c
index cf47d27..e44a167 100644
--- a/hostsidetests/securitybulletin/utils/mali_gpu_utils/src/mali_gpu_utils.c
+++ b/hostsidetests/securitybulletin/utils/mali_gpu_utils/src/mali_gpu_utils.c
@@ -44,8 +44,8 @@
}
// Map tracking page
- gpu_info->tracking_page =
- mmap(NULL, PAGE_SIZE, PROT_NONE, MAP_SHARED, mali_fd, BASE_MEM_MAP_TRACKING_HANDLE);
+ gpu_info->tracking_page = mmap(NULL, getpagesize(), PROT_NONE, MAP_SHARED,
+ mali_fd, BASE_MEM_MAP_TRACKING_HANDLE);
if (gpu_info->tracking_page == MAP_FAILED) {
printf("Failed to map tracking page!");
return EXIT_FAILURE;
@@ -89,7 +89,7 @@
void teardown(struct mali_gpu_info* gpu_info) {
if (!(gpu_info->tracking_page)) {
- munmap(gpu_info->tracking_page, PAGE_SIZE);
+ munmap(gpu_info->tracking_page, getpagesize());
gpu_info->tracking_page = NULL;
}
}
diff --git a/hostsidetests/statsdatom/src/android/cts/statsdatom/wifi/OWNERS b/hostsidetests/statsdatom/src/android/cts/statsdatom/wifi/OWNERS
index 6601b06..1e47a8e 100644
--- a/hostsidetests/statsdatom/src/android/cts/statsdatom/wifi/OWNERS
+++ b/hostsidetests/statsdatom/src/android/cts/statsdatom/wifi/OWNERS
@@ -1,5 +1,2 @@
-narcisaam@google.com
-dorindrimus@google.com
-vtrifonov@google.com
-afaraone@google.com
-leoneljeronimo@google.com
+xincheny@google.com
+arjundhaliwal@google.com
\ No newline at end of file
diff --git a/hostsidetests/theme/OWNERS b/hostsidetests/theme/OWNERS
index aaea84f..32162ef 100644
--- a/hostsidetests/theme/OWNERS
+++ b/hostsidetests/theme/OWNERS
@@ -1,2 +1 @@
# Bug component: 25700
-aelias@google.com
\ No newline at end of file
diff --git a/hostsidetests/videoencodingminimum/Android.bp b/hostsidetests/videoencodingminimum/Android.bp
index d6ca539..543f3b1 100644
--- a/hostsidetests/videoencodingminimum/Android.bp
+++ b/hostsidetests/videoencodingminimum/Android.bp
@@ -20,16 +20,20 @@
name: "CtsVideoQualityFloorHostTestCases",
defaults: ["cts_defaults"],
srcs: ["src/**/*.java"],
+ static_libs: [
+ "cts-host-utils",
+ ],
libs: [
"cts-tradefed",
"cts-shim-host-lib",
"tradefed",
],
test_suites: [
- // need to straighten out host tool dependencies before it goes back into CTS
- // (ffmpeg and mediainfo used in scripts)
- // "cts",
+ "cts",
"cts-shim-host-lib",
"tradefed",
],
+ data: [
+ ":VideoEncodingMinApp",
+ ],
}
diff --git a/hostsidetests/videoencodingminimum/AndroidTest.xml b/hostsidetests/videoencodingminimum/AndroidTest.xml
index c0533e6..fb771d8 100644
--- a/hostsidetests/videoencodingminimum/AndroidTest.xml
+++ b/hostsidetests/videoencodingminimum/AndroidTest.xml
@@ -20,6 +20,10 @@
<option name="config-descriptor:metadata" key="parameter" value="not_instant_app" />
<option name="config-descriptor:metadata" key="parameter" value="not_multi_abi" />
<option name="config-descriptor:metadata" key="parameter" value="secondary_user" />
+ <target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
+ <option name="cleanup-apks" value="true" />
+ <option name="test-file-name" value="VideoEncodingMinApp.apk" />
+ </target_preparer>
<test class="com.android.compatibility.common.tradefed.testtype.JarHostTest" >
<option name="jar" value="CtsVideoQualityFloorHostTestCases.jar" />
</test>
diff --git a/hostsidetests/videoencodingminimum/OWNERS b/hostsidetests/videoencodingminimum/OWNERS
index 75c3cdb..a267ef9 100644
--- a/hostsidetests/videoencodingminimum/OWNERS
+++ b/hostsidetests/videoencodingminimum/OWNERS
@@ -1,5 +1,4 @@
# Bug component: 1344
+include platform/frameworks/av:/media/janitors/avic_OWNERS
include platform/frameworks/av:/media/OWNERS
essick@google.com
-dichenzhang@google.com
-
diff --git a/hostsidetests/videoencodingminimum/app/Android.bp b/hostsidetests/videoencodingminimum/app/Android.bp
new file mode 100644
index 0000000..32144b4
--- /dev/null
+++ b/hostsidetests/videoencodingminimum/app/Android.bp
@@ -0,0 +1,36 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+android_test {
+ name: "VideoEncodingMinApp",
+ defaults: ["cts_defaults"],
+ compile_multilib: "both",
+ static_libs: [
+ "compatibility-device-util-axt",
+ "ctsmediav2common",
+ "ctstestrunner-axt",
+ ],
+ platform_apis: true,
+ srcs: ["src/**/*.java"],
+ // Tag this module as a cts test artifact
+ test_suites: [
+ "cts",
+ "general-tests",
+ ],
+ min_sdk_version: "29",
+}
diff --git a/hostsidetests/videoencodingminimum/app/AndroidManifest.xml b/hostsidetests/videoencodingminimum/app/AndroidManifest.xml
new file mode 100644
index 0000000..3d25235
--- /dev/null
+++ b/hostsidetests/videoencodingminimum/app/AndroidManifest.xml
@@ -0,0 +1,44 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!--
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ -->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ package="android.videoencodingmin.app">
+
+ <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
+ <uses-permission android:name="android.permission.ACCESS_MEDIA_LOCATION" />
+ <uses-permission android:name="android.permission.READ_MEDIA_AUDIO" />
+ <uses-permission android:name="android.permission.READ_MEDIA_VIDEO" />
+ <uses-permission android:name="android.permission.READ_MEDIA_IMAGES" />
+ <uses-permission android:name="android.permission.READ_MEDIA_VISUAL_USER_SELECTED" />
+
+ <application
+ android:debuggable="true"
+ android:allowClearUserData="true"
+ android:allowBackup="true"
+ android:requestLegacyExternalStorage="true"
+ android:usesCleartextTraffic="true">
+ </application>
+ <uses-sdk android:minSdkVersion="29" android:targetSdkVersion="31" />
+ <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
+ android:targetPackage="android.videoencodingmin.app"
+ android:label="Video encoding app for android.media" >
+ <meta-data
+ android:name="listener"
+ android:value="com.android.cts.runner.CtsTestRunListener" />
+ </instrumentation>
+</manifest>
diff --git a/hostsidetests/videoencodingminimum/app/README.md b/hostsidetests/videoencodingminimum/app/README.md
new file mode 100644
index 0000000..5fb5877
--- /dev/null
+++ b/hostsidetests/videoencodingminimum/app/README.md
@@ -0,0 +1,7 @@
+## Video Quality Floor CTS Tests Apk
+These tests are not run directly. But these are run as part of the host side test CtsVideoQualityFloorHostTestCases. The host side test sends an input clip and encoding configuration parameters via json file to this apk. This apk parses the information sent and performs encoding. The encoded output is stored to disk. Host side test pulls this for further analysis.
+
+### Commands
+```sh
+$ atest CtsVideoQualityFloorHostTestCases -- --module-arg CtsVideoQualityFloorHostTestCases:instrumentation-arg:conf-json:=test.json
+```
diff --git a/hostsidetests/videoencodingminimum/app/src/android/videoencodingmin/app/VideoTranscoderTest.java b/hostsidetests/videoencodingminimum/app/src/android/videoencodingmin/app/VideoTranscoderTest.java
new file mode 100644
index 0000000..b2b8611
--- /dev/null
+++ b/hostsidetests/videoencodingminimum/app/src/android/videoencodingmin/app/VideoTranscoderTest.java
@@ -0,0 +1,202 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.videoencodingmin.app;
+
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
+import static android.mediav2.common.cts.CodecTestBase.CONTEXT;
+import static android.mediav2.common.cts.CodecTestBase.MEDIA_CODEC_LIST_REGULAR;
+import static android.mediav2.common.cts.CodecTestBase.selectCodecs;
+import static android.mediav2.common.cts.DecodeStreamToYuv.getFormatInStream;
+import static android.os.Environment.buildPath;
+
+import android.Manifest;
+import android.content.pm.PackageManager;
+import android.media.MediaFormat;
+import android.mediav2.common.cts.CodecEncoderSurfaceTestBase;
+import android.mediav2.common.cts.CodecTestBase;
+import android.mediav2.common.cts.EncoderConfigParams;
+import android.mediav2.common.cts.OutputManager;
+
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import androidx.test.filters.LargeTest;
+import androidx.test.platform.app.InstrumentationRegistry;
+
+import com.android.compatibility.common.util.Preconditions;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.junit.Assert;
+import org.junit.Assume;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+
+/**
+ * Test transcoding using media codec api.
+ * <p>
+ * The test decodes an input clip to surface. This decoded output is fed as input to encoder.
+ * Assuming no frame drops, the test expects,
+ * <ul>
+ * <li>The number of encoded frames to be identical to number of frames present in input clip
+ * .</li>
+ * <li>The encoder output timestamps list should be identical to decoder input timestamp list
+ * .</li>
+ * </ul>
+ * <p>
+ * The test has provision to validate the encoded output by computing PSNR against input. This is
+ * however disabled as VMAF is chosen for analysis. This analysis is done on host side.
+ */
+@RunWith(AndroidJUnit4.class)
+public class VideoTranscoderTest {
+ private static final String MEDIA_DIR = "/sdcard/vqf/input/";
+ public static final String ENC_CONFIG_JSON = "conf-json";
+ private static final String ENC_CONFIG_FILE;
+ private static String PATH_PREFIX;
+ public static final int DEFAULT_TEST_TIMEOUT_MS = 360000;
+
+ private String mEncoderName;
+ private String mEncMediaType;
+ private String mDecoderName;
+ private String mTestFileMediaType;
+ private String mTestFile;
+ private EncoderConfigParams[] mEncCfgParams;
+ private String[] mOutputFileNames;
+ private int mDecColorFormat;
+
+ static {
+ android.os.Bundle args = InstrumentationRegistry.getArguments();
+ ENC_CONFIG_FILE = args.getString(ENC_CONFIG_JSON);
+ }
+
+ static class TestTranscode extends CodecEncoderSurfaceTestBase {
+ private final String mOutputFileName;
+
+ TestTranscode(String encoder, String mediaType, String decoder, String testFileMediaType,
+ String testFile, EncoderConfigParams encCfgParams, String outputFileName,
+ int decColorFormat, boolean isOutputToneMapped, boolean usePersistentSurface,
+ String allTestParams) {
+ super(encoder, mediaType, decoder, testFileMediaType, testFile, encCfgParams,
+ decColorFormat, isOutputToneMapped, usePersistentSurface, allTestParams);
+ mOutputFileName = outputFileName;
+ }
+
+ @Override
+ public void setUpCodecEncoderSurfaceTestBase()
+ throws IOException, CloneNotSupportedException {
+ super.setUpCodecEncoderSurfaceTestBase();
+ mEncoderFormat = mEncCfgParams.getFormat();
+ }
+
+ private String getTempFilePath(String infix) throws IOException {
+ String totalPath = PATH_PREFIX + infix + ".mp4";
+ new FileOutputStream(totalPath).close();
+ return totalPath;
+ }
+
+ public void doTranscode()
+ throws IOException, InterruptedException, CloneNotSupportedException {
+ try {
+ setUpCodecEncoderSurfaceTestBase();
+ encodeToMemory(false, false, false, new OutputManager(), true,
+ getTempFilePath(mOutputFileName));
+ } finally {
+ tearDownCodecEncoderSurfaceTestBase();
+ }
+ }
+ }
+
+ private void parseEncoderConfigurationFile(String jsonPath) throws JSONException, IOException {
+ Preconditions.assertTestFileExists(jsonPath);
+ String jsonString =
+ new String(Files.readAllBytes(Paths.get(jsonPath)), StandardCharsets.UTF_8);
+ JSONArray jsonArray = new JSONArray(jsonString);
+ JSONObject obj = jsonArray.getJSONObject(0);
+ mTestFile = MEDIA_DIR + "samples/" + obj.getString("RefFileName");
+ mTestFileMediaType = obj.getString("RefMediaType");
+ mEncMediaType = obj.getString("TestMediaType");
+ int width = obj.getInt("Width");
+ int height = obj.getInt("Height");
+ String componentType = obj.getString("EncoderType");
+ CodecTestBase.ComponentClass cType = CodecTestBase.ComponentClass.ALL;
+ if (componentType.equals("hw")) {
+ cType = CodecTestBase.ComponentClass.HARDWARE;
+ } else if (componentType.equals("sw")) {
+ cType = CodecTestBase.ComponentClass.SOFTWARE;
+ }
+ mDecColorFormat = COLOR_FormatSurface;
+ JSONArray codecConfigs = obj.getJSONArray("CodecConfigs");
+ mEncCfgParams = new EncoderConfigParams[codecConfigs.length()];
+ mOutputFileNames = new String[codecConfigs.length()];
+ for (int i = 0; i < codecConfigs.length(); i++) {
+ JSONObject codecConfig = codecConfigs.getJSONObject(i);
+ mEncCfgParams[i] = new EncoderConfigParams.Builder(mEncMediaType)
+ .setWidth(width)
+ .setHeight(height)
+ .setKeyFrameInterval(codecConfig.getInt("KeyFrameInterval"))
+ .setMaxBFrames(codecConfig.getInt("MaxBFrames"))
+ .setBitRate(codecConfig.getInt("BitRate"))
+ .setProfile(codecConfig.getInt("Profile"))
+ .setLevel(codecConfig.getInt("Level"))
+ .setColorFormat(COLOR_FormatSurface)
+ .build();
+ String outFileName = codecConfig.getString("EncodedFileName");
+ mOutputFileNames[i] = outFileName.substring(0, outFileName.lastIndexOf('.'));
+ }
+ MediaFormat format = getFormatInStream(mTestFileMediaType, mTestFile);
+ mDecoderName = MEDIA_CODEC_LIST_REGULAR.findDecoderForFormat(format);
+ ArrayList<MediaFormat> formats = new ArrayList<>();
+ for (EncoderConfigParams param : mEncCfgParams) {
+ formats.add(param.getFormat());
+ }
+ ArrayList<String> codecs = selectCodecs(mEncMediaType, formats, null, true, cType);
+ if (!codecs.isEmpty()) mEncoderName = codecs.get(0);
+ }
+
+ @LargeTest
+ @Test(timeout = DEFAULT_TEST_TIMEOUT_MS)
+ public void testTranscode() throws IOException, InterruptedException,
+ JSONException, CloneNotSupportedException {
+ Assume.assumeTrue("Test did not receive config file for encoding", ENC_CONFIG_FILE != null);
+ parseEncoderConfigurationFile(MEDIA_DIR + "json/" + ENC_CONFIG_FILE);
+ Assume.assumeTrue("Found no encoder supporting the config file", mEncoderName != null);
+ Assume.assumeTrue("Found no decoder supporting the config file", mDecoderName != null);
+ Assert.assertEquals("Apk does not have permissions to write to external storage",
+ PackageManager.PERMISSION_GRANTED,
+ CONTEXT.checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE));
+ File pub = new File("/sdcard/vqf/output/");
+ File dir = buildPath(pub,
+ "output_" + ENC_CONFIG_FILE.substring(0, ENC_CONFIG_FILE.lastIndexOf('.')));
+ if (!dir.exists()) {
+ Assert.assertTrue("Unable to create dir " + dir.getAbsolutePath(), dir.mkdirs());
+ }
+ PATH_PREFIX = dir.getAbsolutePath() + File.separator;
+ for (int i = 0; i < mEncCfgParams.length; i++) {
+ TestTranscode ep = new TestTranscode(mEncoderName, mEncMediaType,
+ mDecoderName, mTestFileMediaType, mTestFile, mEncCfgParams[i],
+ mOutputFileNames[i], mDecColorFormat, false, false, "");
+ ep.doTranscode();
+ }
+ }
+}
diff --git a/hostsidetests/videoencodingminimum/src/android/videoqualityfloor/cts/CtsVideoQualityFloorHostTest.java b/hostsidetests/videoencodingminimum/src/android/videoqualityfloor/cts/CtsVideoQualityFloorHostTest.java
index 90d8e88..83c6047 100644
--- a/hostsidetests/videoencodingminimum/src/android/videoqualityfloor/cts/CtsVideoQualityFloorHostTest.java
+++ b/hostsidetests/videoencodingminimum/src/android/videoqualityfloor/cts/CtsVideoQualityFloorHostTest.java
@@ -13,77 +13,110 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
+
package android.videoqualityfloor.cts;
-import android.platform.test.annotations.Presubmit;
+import android.cts.host.utils.DeviceJUnit4ClassRunnerWithParameters;
+import android.cts.host.utils.DeviceJUnit4Parameterized;
+import android.platform.test.annotations.AppModeFull;
-import com.android.tradefed.build.IBuildInfo;
+import com.android.ddmlib.testrunner.RemoteAndroidTestRunner;
+import com.android.tradefed.config.Option;
+import com.android.tradefed.config.OptionClass;
+import com.android.tradefed.device.DeviceNotAvailableException;
import com.android.tradefed.device.ITestDevice;
import com.android.tradefed.log.LogUtil;
-import com.android.tradefed.testtype.DeviceJUnit4ClassRunner;
-import com.android.tradefed.testtype.IAbi;
-import com.android.tradefed.testtype.IAbiReceiver;
-import com.android.tradefed.testtype.IBuildReceiver;
+import com.android.tradefed.result.CollectingTestListener;
+import com.android.tradefed.result.TestDescription;
+import com.android.tradefed.result.TestResult;
+import com.android.tradefed.result.TestRunResult;
import com.android.tradefed.testtype.IDeviceTest;
+import org.json.JSONArray;
+import org.json.JSONObject;
import org.junit.Assert;
+import org.junit.Assume;
import org.junit.Test;
import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.UseParametersRunnerFactory;
import java.io.BufferedReader;
import java.io.File;
+import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.Condition;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
-/**
- * Run the host-side video quality floor tests.
- * These encode a set of videos at particular resolutions/bitrates and
- * measure the quality of the output.
- * Measurement is with the VMAF tool.
- *
- * The body of this test is implemented in a test script, not within the java here.
- * This java code acquires the videos and the test script, unpacks them, executes the
- * script (which encodes and measures).
- */
-@RunWith(DeviceJUnit4ClassRunner.class)
-public class CtsVideoQualityFloorHostTest implements IAbiReceiver, IBuildReceiver, IDeviceTest {
+import javax.annotation.Nullable;
- static final String TAG = CtsVideoQualityFloorHostTest.class.getSimpleName();
- static final String BASE_URL =
- "https://dl.google.com/android/xts/cts/hostsidetests/videoqualityfloor/";
- static final String TEST_SCRIPT_TARBALL = "tests-20231004.tgz";
- static final String[] testClips = {
- "Fireworks.mp4",
- "Motorcycle.mp4",
- "MountainBike.mp4",
- "TreesAndGrass.mp4"
- };
- // test is not valid before sdk 31, aka Android 12, aka Android S
- static final int MINIMUM_VALID_SDK = 31;
+@AppModeFull(reason = "Instant apps cannot access the SD card")
+@RunWith(DeviceJUnit4Parameterized.class)
+@UseParametersRunnerFactory(DeviceJUnit4ClassRunnerWithParameters.RunnerFactory.class)
+@OptionClass(alias = "pc-veq-test")
+public class CtsVideoQualityFloorHostTest implements IDeviceTest {
+ private static final String RES_URL =
+ "https://storage.googleapis.com/android_media/cts/hostsidetests/videoqualityfloor/tests-1.0.tar.gz";
- /**
- * A reference to the build info.
- */
- private IBuildInfo mBuildInfo;
+ // variables related to host-side of the test
+ private static final int MINIMUM_VALID_SDK = 31;
+ // test is not valid before sdk 31, aka Android 12, aka Android S
- /**
- * A reference to the device under test.
- */
+ private static final Lock sLock = new ReentrantLock();
+ private static final Condition sCondition = sLock.newCondition();
+ private static boolean sIsTestSetUpDone = false;
+ // install apk, push necessary resources to device to run the test. lock/condition
+ // pair is to keep setupTestEnv() thread safe
+ private static File sHostWorkDir;
+
+ // Variables related to device-side of the test. These need to kept in sync with definitions of
+ // VideoEncodingMinApp.apk
+ private static final String DEVICE_IN_DIR = "/sdcard/vqf/input/";
+ private static final String DEVICE_OUT_DIR = "/sdcard/vqf/output/";
+ private static final String DEVICE_SIDE_TEST_PACKAGE = "android.videoencodingmin.app";
+ private static final String DEVICE_SIDE_TEST_CLASS =
+ "android.videoencodingmin.app.VideoTranscoderTest";
+ private static final String RUNNER = "androidx.test.runner.AndroidJUnitRunner";
+ private static final String TEST_CONFIG_INST_ARGS_KEY = "conf-json";
+ private static final long DEFAULT_SHELL_TIMEOUT_MILLIS = TimeUnit.MINUTES.toMillis(5);
+ private static final String TEST_TIMEOUT_INST_ARGS_KEY = "timeout_msec";
+ private static final long DEFAULT_TEST_TIMEOUT_MILLIS = TimeUnit.MINUTES.toMillis(3);
+
+ // local variables related to host-side of the test
+ private final String mJsonName;
private ITestDevice mDevice;
- /**
- * A reference to the ABI under test.
- */
- private IAbi mAbi;
+ @Option(name = "reset", description = "Start with a fresh directory.")
+ private boolean mReset = false;
- @Override
- public void setAbi(IAbi abi) {
- mAbi = abi;
+ public CtsVideoQualityFloorHostTest(String jsonName) {
+ mJsonName = jsonName;
}
- @Override
- public void setBuild(IBuildInfo buildInfo) {
- mBuildInfo = buildInfo;
+ @Parameterized.Parameters(name = "{index}_{0}")
+ public static List<String> input() {
+ final List<String> args = new ArrayList<>();
+ String[] clips = {"Fireworks", "MountainBike", "Motorcycle", "TreesAndGrass"};
+ String[] resolutions = {"1080p", "720p", "540p", "480p"};
+ String[] codecInfos = {"avcBaseline3", "avcHigh4", "avcHigh52", "hevcMain3"};
+
+ for (String clip : clips) {
+ for (String res : resolutions) {
+ for (String info : codecInfos) {
+ args.add(res + "-" + clip + "-" + info + ".json");
+ }
+ }
+ }
+ return args;
}
@Override
@@ -96,144 +129,231 @@
return mDevice;
}
- private String getProperty(String prop) throws Exception {
- return mDevice.executeShellCommand("getprop " + prop).replace("\n", "");
+ /**
+ * Sets up the necessary environment for the video encoding quality test.
+ */
+ public void setupTestEnv() throws Exception {
+ String sdkAsString = getDevice().getProperty("ro.build.version.sdk");
+ int sdk = Integer.parseInt(sdkAsString);
+ Assume.assumeTrue("Test requires sdk >= " + MINIMUM_VALID_SDK
+ + " test device has sdk = " + sdk, sdk >= MINIMUM_VALID_SDK);
+
+ Assert.assertTrue("Failed to install package on device : " + DEVICE_SIDE_TEST_PACKAGE,
+ getDevice().isPackageInstalled(DEVICE_SIDE_TEST_PACKAGE));
+
+ // set up host-side working directory
+ String tmpBase = System.getProperty("java.io.tmpdir");
+ String dirName = "CtsVideoQualityFloorHostTest_" + getDevice().getSerialNumber();
+ String tmpDir = tmpBase + "/" + dirName;
+ LogUtil.CLog.i("tmpBase= " + tmpBase + " tmpDir =" + tmpDir);
+ sHostWorkDir = new File(tmpDir);
+ if (mReset || sHostWorkDir.isFile()) {
+ File cwd = new File(".");
+ runCommand("rm -rf " + tmpDir, cwd);
+ }
+ try {
+ if (!sHostWorkDir.isDirectory()) {
+ Assert.assertTrue("Failed to create directory : " + sHostWorkDir.getAbsolutePath(),
+ sHostWorkDir.mkdirs());
+ }
+ } catch (SecurityException e) {
+ LogUtil.CLog.e("Unable to establish temp directory " + sHostWorkDir.getPath());
+ }
+
+ // Clean up output folders before starting the test
+ runCommand("rm -rf " + "output_*", sHostWorkDir);
+
+ // Download the test suite tar file.
+ downloadFile(RES_URL, sHostWorkDir);
+
+ // Unpack the test suite tar file.
+ String fileName = RES_URL.substring(RES_URL.lastIndexOf('/') + 1);
+ int result = runCommand("tar xvzf " + fileName, sHostWorkDir);
+ Assert.assertEquals("Failed to untar " + fileName, 0, result);
+
+ // Push input files to device
+ Assert.assertNotNull("Failed to create directory " + DEVICE_IN_DIR + " on device ",
+ getDevice().executeAdbCommand("shell", "mkdir", "-p", DEVICE_IN_DIR));
+ Assert.assertTrue("Failed to push json files to " + DEVICE_IN_DIR + " on device ",
+ getDevice().syncFiles(new File(sHostWorkDir.getPath() + "/json/"), DEVICE_IN_DIR));
+ Assert.assertTrue("Failed to push mp4 files to " + DEVICE_IN_DIR + " on device ",
+ getDevice().syncFiles(new File(sHostWorkDir.getPath() + "/samples/"),
+ DEVICE_IN_DIR));
+
+ sIsTestSetUpDone = true;
}
/**
- * TODO: Add JavaDoc
+ * Verify the video encoding quality requirements for the devices running Android 12/S or above.
*/
@Test
public void testEncoding() throws Exception {
+ // set up test environment
+ sLock.lock();
+ try {
+ if (!sIsTestSetUpDone) setupTestEnv();
+ sCondition.signalAll();
+ } finally {
+ sLock.unlock();
+ }
- String sdkAsString = getProperty("ro.build.version.sdk");
- int sdk = Integer.parseInt(sdkAsString);
- if (sdk < MINIMUM_VALID_SDK) {
- LogUtil.CLog.d("Test requires sdk >= " + MINIMUM_VALID_SDK
- + " test device has sdk =" + sdk );
+ // transcode input
+ runDeviceTests(DEVICE_SIDE_TEST_PACKAGE, DEVICE_SIDE_TEST_CLASS, "testTranscode");
+
+ // copy the encoded output from the device to the host.
+ String outDir = "output_" + mJsonName.substring(0, mJsonName.indexOf('.'));
+ File outHostPath = new File(sHostWorkDir, outDir);
+ try {
+ if (!outHostPath.isDirectory()) {
+ Assert.assertTrue("Failed to create directory : " + outHostPath.getAbsolutePath(),
+ outHostPath.mkdirs());
+ }
+ } catch (SecurityException e) {
+ LogUtil.CLog.e("Unable to establish output host directory : " + outHostPath.getPath());
+ }
+ String outDevPath = DEVICE_OUT_DIR + outDir;
+ Assert.assertTrue("Failed to pull mp4 files from " + outDevPath
+ + " to " + outHostPath.getPath(), getDevice().pullDir(outDevPath, outHostPath));
+ getDevice().deleteFile(outDevPath);
+
+ // Parse json file
+ String jsonPath = sHostWorkDir.getPath() + "/json/" + mJsonName;
+ String jsonString =
+ new String(Files.readAllBytes(Paths.get(jsonPath)), StandardCharsets.UTF_8);
+ JSONArray jsonArray = new JSONArray(jsonString);
+ JSONObject obj = jsonArray.getJSONObject(0);
+ String refFileName = obj.getString("RefFileName");
+
+ // Compute Vmaf
+ JSONArray codecConfigs = obj.getJSONArray("CodecConfigs");
+ int th = Runtime.getRuntime().availableProcessors() / 2;
+ th = Math.min(Math.max(1, th), 8);
+ String filter = "libvmaf=feature=name=psnr:model=version=vmaf_v0.6.1:n_threads=" + th;
+ for (int i = 0; i < codecConfigs.length(); i++) {
+ JSONObject codecConfig = codecConfigs.getJSONObject(i);
+ String outputName = codecConfig.getString("EncodedFileName");
+ outputName = outputName.substring(0, outputName.lastIndexOf("."));
+ String outputVmafPath = outDir + "/" + outputName + ".txt";
+ String cmd = "./bin/ffmpeg";
+ cmd += " -hide_banner";
+ cmd += " -i " + outDir + "/" + outputName + ".mp4" + " -an";
+ cmd += " -i " + "samples/" + refFileName + " -an";
+ cmd += " -filter_complex " + "\"" + filter + "\"";
+ cmd += " -f null -";
+ cmd += " > " + outputVmafPath + " 2>&1";
+ LogUtil.CLog.i("ffmpeg command : " + cmd);
+ int result = runCommand(cmd, sHostWorkDir);
+ Assert.assertEquals("Encountered error during vmaf computation.", 0, result);
+
+ String vmafLine = "";
+ try (BufferedReader reader = new BufferedReader(
+ new FileReader(sHostWorkDir.getPath() + "/" + outputVmafPath))) {
+ String token = "VMAF score: ";
+ String line;
+ while ((line = reader.readLine()) != null) {
+ if (line.contains(token)) {
+ line = line.substring(line.indexOf(token));
+ double vmaf_score = Double.parseDouble(line.substring(token.length()));
+ Assert.assertTrue("Video encoding failed for " + outputName
+ + " with vmaf score of " + vmaf_score, vmaf_score >= 70);
+ LogUtil.CLog.i(vmafLine);
+ break;
+ }
+ }
+ } catch (IOException e) {
+ throw new AssertionError("Unexpected IOException: " + e.getMessage());
+ }
+ }
+ LogUtil.CLog.i("Finished executing the process.");
+ }
+
+ private int runCommand(String command, File dir) throws IOException, InterruptedException {
+ Process p = new ProcessBuilder("/bin/sh", "-c", command)
+ .directory(dir)
+ .redirectErrorStream(true)
+ .redirectOutput(ProcessBuilder.Redirect.INHERIT)
+ .start();
+
+ BufferedReader stdInput = new BufferedReader(new InputStreamReader(p.getInputStream()));
+ BufferedReader stdError = new BufferedReader(new InputStreamReader(p.getErrorStream()));
+ String line;
+ while ((line = stdInput.readLine()) != null || (line = stdError.readLine()) != null) {
+ LogUtil.CLog.i(line + "\n");
+ }
+ return p.waitFor();
+ }
+
+ // Download the indicated file (within the base_url folder) to our desired destination
+ // simple caching -- if file exists, we do not re-download
+ private void downloadFile(String url, File destDir) {
+ String fileName = url.substring(RES_URL.lastIndexOf('/') + 1);
+ File destination = new File(destDir, fileName);
+
+ // save bandwidth, also allows a user to manually preload files
+ LogUtil.CLog.i("Do we already have a copy of file " + destination.getPath());
+ if (destination.isFile()) {
+ LogUtil.CLog.i("Skipping re-download of file " + destination.getPath());
return;
}
- Runtime runtime = Runtime.getRuntime();
- Process subproc;
- String tmpBase = System.getProperty("java.io.tmpdir");
- String dirName = "CtsVideoQualityFloorHostTest";
- String tmpDir = tmpBase + "/" + dirName;
+ String cmd = "wget -O " + destination.getPath() + " " + url;
+ LogUtil.CLog.i("wget_cmd = " + cmd);
- LogUtil.CLog.d("tmpBase= " + tmpBase + " tmpDir =" + tmpDir);
-
- if (false) {
- // start with a fresh directory
- File cwd = new File(".");
- runCmd("rm -fr " + tmpDir, cwd);
- }
-
- // set up test directory, make sure it exists
- File destination = new File(tmpDir);
- try {
- if (!destination.isDirectory()) {
- destination.mkdirs();
- }
- } catch (SecurityException e) {
- LogUtil.CLog.d("Unable to establish temp directory " + destination.getPath());
- }
- Assert.assertTrue(destination.isDirectory());
-
- /*
- *
- * https://dl.google.com/android/xts/cts/hostsidetests/videoqualityfloor/
- * + the name of the file
- *
- * turning the filename into the appropriate URL is done within downloadFile()
- */
-
- for (String clip : testClips) {
- LogUtil.CLog.d("downloading " + clip);
- downloadFile(clip, destination);
- }
-
- // get the test script
- LogUtil.CLog.d("downloading " + TEST_SCRIPT_TARBALL);
- downloadFile(TEST_SCRIPT_TARBALL, destination);
-
- // we *always* unpack and setup, even if the downloads were cached.
- // this avoids any /tmp cleanup problems.
- //
- // unpack the test scripts
- runCmd("tar xzf " + TEST_SCRIPT_TARBALL, destination);
-
- // run the setup scripts
- LogUtil.CLog.d("running set_up");
- runCmd("./set_up.sh", destination);
-
- // run the test script
- // return code is # of failures, so 0 == success
- String clipArgs = "";
- for (String clip : testClips) {
- clipArgs= clipArgs + " " + clip;
- }
- String targetSerial = getDevice().getSerialNumber();
- String testOutput = runCmd("./testit.sh -serial " + targetSerial + clipArgs, destination);
-
- LogUtil.CLog.d("(Successful) Output from testit.sh:\n\n" + testOutput);
-
- }
-
- // run the indicated command, in the indicated directory
- // returns the command output
- // asserts if the command finishes with non-zero exit code.
- private String runCmd(String cmd, File cwd) {
- Runtime runtime = Runtime.getRuntime();
- Process subproc;
- LogUtil.CLog.d("runCmd() cmd=" + cmd + " cwd=" + cwd.getPath());
int result = 0;
- String childStdout = "";
try {
- subproc = runtime.exec(cmd, null, cwd);
- subproc.waitFor(); // may wait forever; there are versions with timeout
- BufferedReader kidstream =
- new BufferedReader(new InputStreamReader(subproc.getInputStream()));
- String line;
- StringBuilder kidStdout = new StringBuilder();
- while ((line = kidstream.readLine()) != null) {
- kidStdout.append(line);
- kidStdout.append("\n");
- }
- childStdout = kidStdout.toString();
- result = subproc.exitValue();
+ result = runCommand(cmd, destDir);
} catch (IOException e) {
result = -2;
} catch (InterruptedException e) {
result = -3;
}
- Assert.assertTrue("runCmd(" + cmd + ") fails result= " + result
- + " and output:\n" + childStdout, result == 0);
-
- return childStdout;
+ Assert.assertEquals("download file failed.\n", 0, result);
}
- // download the indicated file (within the base_url folder) to
- // our desired destination/fileName.
- // simple caching -- if file exists, we do not redownload
- private void downloadFile(String fileName, File destDir) {
- Runtime runtime = Runtime.getRuntime();
- Process subproc;
+ private void runDeviceTests(String pkgName, @Nullable String testClassName,
+ @Nullable String testMethodName) throws DeviceNotAvailableException {
+ RemoteAndroidTestRunner testRunner = getTestRunner(pkgName, testClassName, testMethodName);
+ CollectingTestListener listener = new CollectingTestListener();
+ Assert.assertTrue(getDevice().runInstrumentationTests(testRunner, listener));
+ assertTestsPassed(listener.getCurrentRunResults());
+ }
- File destination = new File(destDir, fileName);
-
- // save bandwidth, also allows a user to manually preload files
- LogUtil.CLog.d("Do we already have a copy of file " + destination.getPath());
- if (destination.isFile()) {
- LogUtil.CLog.d("Skipping re-download of file " + destination.getPath());
- return;
+ private RemoteAndroidTestRunner getTestRunner(String pkgName, String testClassName,
+ String testMethodName) {
+ if (testClassName != null && testClassName.startsWith(".")) {
+ testClassName = pkgName + testClassName;
}
-
- String url = BASE_URL + fileName;
- // wget doesn't do 'file'
- // String cmd = "wget -O " + destination.getPath() + " " + url;
- String cmd = "curl --silent --output " + destination.getPath() + " " + url;
- runCmd(cmd, destDir);
+ RemoteAndroidTestRunner testRunner =
+ new RemoteAndroidTestRunner(pkgName, RUNNER, getDevice().getIDevice());
+ testRunner.setMaxTimeToOutputResponse(DEFAULT_SHELL_TIMEOUT_MILLIS, TimeUnit.MILLISECONDS);
+ testRunner.addInstrumentationArg(TEST_TIMEOUT_INST_ARGS_KEY,
+ Long.toString(DEFAULT_TEST_TIMEOUT_MILLIS));
+ testRunner.addInstrumentationArg(TEST_CONFIG_INST_ARGS_KEY, mJsonName);
+ if (testClassName != null && testMethodName != null) {
+ testRunner.setMethodName(testClassName, testMethodName);
+ } else if (testClassName != null) {
+ testRunner.setClassName(testClassName);
+ }
+ return testRunner;
}
+ private void assertTestsPassed(TestRunResult testRunResult) {
+ if (testRunResult.isRunFailure()) {
+ throw new AssertionError("Failed to successfully run device tests for "
+ + testRunResult.getName() + ": " + testRunResult.getRunFailureMessage());
+ }
+ if (testRunResult.getNumTests() != testRunResult.getPassedTests().size()) {
+ StringBuilder errorBuilder = new StringBuilder("On-device tests failed:\n");
+ for (Map.Entry<TestDescription, TestResult> resultEntry :
+ testRunResult.getTestResults().entrySet()) {
+ if (!resultEntry.getValue().getStatus()
+ .equals(com.android.ddmlib.testrunner.TestResult.TestStatus.PASSED)) {
+ errorBuilder.append(resultEntry.getKey().toString());
+ errorBuilder.append(":\n");
+ errorBuilder.append(resultEntry.getValue().getStackTrace());
+ }
+ }
+ throw new AssertionError(errorBuilder.toString());
+ }
+ }
}
-
diff --git a/tests/autofillservice/Android.bp b/tests/autofillservice/Android.bp
index 9b74f11..04e8ce3 100644
--- a/tests/autofillservice/Android.bp
+++ b/tests/autofillservice/Android.bp
@@ -50,4 +50,9 @@
":CtsMockInputMethod",
],
per_testcase_directory: true,
+ errorprone: {
+ javacflags: [
+ "-Xep:BareDotMetacharacter:WARN",
+ ],
+ },
}
diff --git a/tests/camera/src/android/hardware/camera2/cts/PerformanceTest.java b/tests/camera/src/android/hardware/camera2/cts/PerformanceTest.java
index 48d1580..fef6849 100644
--- a/tests/camera/src/android/hardware/camera2/cts/PerformanceTest.java
+++ b/tests/camera/src/android/hardware/camera2/cts/PerformanceTest.java
@@ -74,6 +74,7 @@
import java.util.Arrays;
import java.util.List;
import java.util.ListIterator;
+import java.util.Objects;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
@@ -1148,7 +1149,7 @@
mTestRule.openDevice(cameraId);
for (Range<Integer> fpsRange : aeFpsRanges) {
- if (fpsRange.getLower() == fpsRange.getUpper()) {
+ if (Objects.equals(fpsRange.getLower(), fpsRange.getUpper())) {
testPreviewJitterForFpsRange(cameraId,
HardwareBuffer.USAGE_COMPOSER_OVERLAY,
/*reduceJitter*/false, fpsRange);
@@ -1189,7 +1190,7 @@
mTestRule.openDevice(cameraId);
for (Range<Integer> fpsRange : aeFpsRanges) {
- if (fpsRange.getLower() == fpsRange.getUpper()) {
+ if (Objects.equals(fpsRange.getLower(), fpsRange.getUpper())) {
testPreviewJitterForFpsRange(cameraId,
HardwareBuffer.USAGE_GPU_SAMPLED_IMAGE,
/*reduceJitter*/false, fpsRange);
diff --git a/tests/devicepolicy/src/android/devicepolicy/cts/utils/PolicySetResultUtils.java b/tests/devicepolicy/src/android/devicepolicy/cts/utils/PolicySetResultUtils.java
index c32839b..8ee390c 100644
--- a/tests/devicepolicy/src/android/devicepolicy/cts/utils/PolicySetResultUtils.java
+++ b/tests/devicepolicy/src/android/devicepolicy/cts/utils/PolicySetResultUtils.java
@@ -46,7 +46,7 @@
.isEqualTo(resultKey)
.whereIntent().extras().key(EXTRA_POLICY_TARGET_USER_ID).integerValue()
.isEqualTo(targetUser)
- .waitForEvent(Duration.ofMinutes(1)).intent();
+ .waitForEvent(Duration.ofMinutes(2)).intent();
assertThat(receivedIntent).isNotNull();
// TODO: add checks on bundle values.
diff --git a/tests/framework/base/biometrics/src/android/server/biometrics/BiometricActivityTests.java b/tests/framework/base/biometrics/src/android/server/biometrics/BiometricActivityTests.java
index 7a54d0d..1c569db 100644
--- a/tests/framework/base/biometrics/src/android/server/biometrics/BiometricActivityTests.java
+++ b/tests/framework/base/biometrics/src/android/server/biometrics/BiometricActivityTests.java
@@ -27,6 +27,7 @@
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;
import android.hardware.biometrics.BiometricPrompt;
@@ -378,6 +379,8 @@
throws Exception {
assumeTrue(Utils.isFirstApiLevel29orGreater());
assumeTrue(mSensorProperties.isEmpty());
+ //TODO: b/331955301 need to update Auto biometric UI
+ assumeFalse(isCar());
try (CredentialSession credentialSession = new CredentialSession()) {
credentialSession.setCredential();
try (ActivitySession activitySession =
diff --git a/tests/framework/base/biometrics/src/android/server/biometrics/BiometricSimpleTests.java b/tests/framework/base/biometrics/src/android/server/biometrics/BiometricSimpleTests.java
index 4d7f066..b3e41ae 100644
--- a/tests/framework/base/biometrics/src/android/server/biometrics/BiometricSimpleTests.java
+++ b/tests/framework/base/biometrics/src/android/server/biometrics/BiometricSimpleTests.java
@@ -25,6 +25,7 @@
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
@@ -276,6 +277,8 @@
@Test
public void testWhenCredentialEnrolled() throws Exception {
assumeTrue(Utils.isFirstApiLevel29orGreater());
+ //TODO: b/331955301 need to update Auto biometric UI
+ assumeFalse(isCar());
try (CredentialSession session = new CredentialSession()) {
session.setCredential();
@@ -474,6 +477,8 @@
@Test
public void testSimpleCredentialAuth() throws Exception {
assumeTrue(Utils.isFirstApiLevel29orGreater());
+ //TODO: b/331955301 need to update Auto biometric UI
+ assumeFalse(isCar());
try (CredentialSession session = new CredentialSession()){
session.setCredential();
diff --git a/tests/framework/base/windowmanager/OWNERS b/tests/framework/base/windowmanager/OWNERS
index e6d90c4..a82e360 100644
--- a/tests/framework/base/windowmanager/OWNERS
+++ b/tests/framework/base/windowmanager/OWNERS
@@ -30,7 +30,6 @@
# Bug template url: https://b.corp.google.com/issues/new?component=316125&template=1018199
include platform/frameworks/base:/services/core/java/com/android/server/wm/OWNERS
-brufino@google.com
charlesccchen@google.com
lus@google.com
natanieljr@google.com
\ No newline at end of file
diff --git a/tests/framework/base/windowmanager/jetpack/src/android/server/wm/jetpack/embedding/ActivityEmbeddingBoundsTests.java b/tests/framework/base/windowmanager/jetpack/src/android/server/wm/jetpack/embedding/ActivityEmbeddingBoundsTests.java
index 4fd5ab2..37ad6f0 100644
--- a/tests/framework/base/windowmanager/jetpack/src/android/server/wm/jetpack/embedding/ActivityEmbeddingBoundsTests.java
+++ b/tests/framework/base/windowmanager/jetpack/src/android/server/wm/jetpack/embedding/ActivityEmbeddingBoundsTests.java
@@ -34,6 +34,7 @@
import android.server.wm.jetpack.utils.TestActivity;
import android.server.wm.jetpack.utils.TestActivityWithId;
import android.server.wm.jetpack.utils.TestConfigChangeHandlingActivity;
+import android.support.test.uiautomator.UiDevice;
import android.util.Pair;
import android.util.Size;
@@ -106,12 +107,13 @@
// Shrink the display by 10% to make the activities stacked
mReportedDisplayMetrics.setSize(new Size((int) (originalDisplaySize.getWidth() * 0.9),
(int) (originalDisplaySize.getHeight() * 0.9)));
+ UiDevice.getInstance(mInstrumentation).waitForIdle();
waitAndAssertResumedAndFillsTask(secondaryActivity);
waitAndAssertNotVisible(primaryActivity);
// Return the display to its original size and verify that the activities are split
mReportedDisplayMetrics.setSize(originalDisplaySize);
- mInstrumentation.waitForIdleSync();
+ UiDevice.getInstance(mInstrumentation).waitForIdle();
assertValidSplit(primaryActivity, secondaryActivity, splitPairRule);
}
}
diff --git a/tests/framework/base/windowmanager/jetpack/src/android/server/wm/jetpack/embedding/ActivityEmbeddingPlaceholderTests.java b/tests/framework/base/windowmanager/jetpack/src/android/server/wm/jetpack/embedding/ActivityEmbeddingPlaceholderTests.java
index 123135f..95fc965 100644
--- a/tests/framework/base/windowmanager/jetpack/src/android/server/wm/jetpack/embedding/ActivityEmbeddingPlaceholderTests.java
+++ b/tests/framework/base/windowmanager/jetpack/src/android/server/wm/jetpack/embedding/ActivityEmbeddingPlaceholderTests.java
@@ -34,6 +34,7 @@
import android.server.wm.jetpack.utils.ActivityEmbeddingUtil;
import android.server.wm.jetpack.utils.TestActivity;
import android.server.wm.jetpack.utils.TestActivityWithId;
+import android.support.test.uiautomator.UiDevice;
import android.util.Pair;
import android.util.Size;
import android.view.WindowMetrics;
@@ -289,6 +290,7 @@
final Size currentSize = mReportedDisplayMetrics.getSize();
mReportedDisplayMetrics.setSize(new Size((int) (currentSize.getWidth() * 0.9),
(int) (currentSize.getHeight() * 0.9)));
+ UiDevice.getInstance(mInstrumentation).waitForIdle();
// Verify that the placeholder was not finished and fills the task
waitAndAssertResumedAndFillsTask(placeholderActivity);
diff --git a/tests/framework/base/windowmanager/jetpack/src/android/server/wm/jetpack/utils/ActivityEmbeddingUtil.java b/tests/framework/base/windowmanager/jetpack/src/android/server/wm/jetpack/utils/ActivityEmbeddingUtil.java
index bc6bec1..a830746 100644
--- a/tests/framework/base/windowmanager/jetpack/src/android/server/wm/jetpack/utils/ActivityEmbeddingUtil.java
+++ b/tests/framework/base/windowmanager/jetpack/src/android/server/wm/jetpack/utils/ActivityEmbeddingUtil.java
@@ -401,9 +401,6 @@
public static Rect getTaskBounds(@NonNull Activity activity, boolean shouldWaitForResume) {
final WindowManagerStateHelper wmState = new WindowManagerStateHelper();
final ComponentName activityName = activity.getComponentName();
- // Wait for display idle before getting the task bounds since the display may be still
- // resizing.
- wmState.waitForAppTransitionIdleOnDisplay(activity.getDisplayId());
if (shouldWaitForResume) {
wmState.waitAndAssertActivityState(activityName, STATE_RESUMED);
} else {
diff --git a/tests/media/common/src/android/mediav2/common/cts/CodecAsyncHandler.java b/tests/media/common/src/android/mediav2/common/cts/CodecAsyncHandler.java
index 344ce75..523fafd 100644
--- a/tests/media/common/src/android/mediav2/common/cts/CodecAsyncHandler.java
+++ b/tests/media/common/src/android/mediav2/common/cts/CodecAsyncHandler.java
@@ -116,6 +116,20 @@
}
@Override
+ public void onCryptoError(@NonNull MediaCodec codec, @NonNull MediaCodec.CryptoException e) {
+ mErrorMsg = "################### Crypto Error Details #####################\n";
+ mErrorMsg += e.getMessage() + "\n";
+ mLock.lock();
+ try {
+ mSignalledError = true;
+ mCondition.signalAll();
+ } finally {
+ mLock.unlock();
+ }
+ Log.e(LOG_TAG, "received media codec crypto error : " + e.getMessage());
+ }
+
+ @Override
public void onOutputFormatChanged(@NonNull MediaCodec codec, @NonNull MediaFormat format) {
mLock.lock();
try {
diff --git a/tests/media/common/src/android/mediav2/common/cts/CodecDecoderBlockModelDrmTestBase.java b/tests/media/common/src/android/mediav2/common/cts/CodecDecoderBlockModelDrmTestBase.java
new file mode 100644
index 0000000..5c33c49
--- /dev/null
+++ b/tests/media/common/src/android/mediav2/common/cts/CodecDecoderBlockModelDrmTestBase.java
@@ -0,0 +1,147 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediav2.common.cts;
+
+import static android.mediav2.common.cts.CodecDecoderDrmTestBase.setUpMediaDrmAndCrypto;
+
+import static org.junit.Assert.assertNotNull;
+
+import android.annotation.RequiresApi;
+import android.media.MediaCodec;
+import android.media.MediaCrypto;
+import android.media.MediaCryptoException;
+import android.media.MediaDrm;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.NotProvisionedException;
+import android.media.ResourceBusyException;
+import android.media.UnsupportedSchemeException;
+import android.os.Build;
+import android.util.Log;
+import android.util.Pair;
+
+import org.junit.After;
+
+import java.util.UUID;
+
+/**
+ * Wrapper class for trying and testing secure mediacodec decoder components in block model mode
+ */
+@RequiresApi(api = Build.VERSION_CODES.R)
+public class CodecDecoderBlockModelDrmTestBase extends CodecDecoderBlockModelTestBase {
+ private static final String LOG_TAG = CodecDecoderBlockModelDrmTestBase.class.getSimpleName();
+
+ protected MediaDrm mDrm;
+ protected MediaCrypto mCrypto;
+
+ public CodecDecoderBlockModelDrmTestBase(String codecName, String mediaType, String testFile,
+ String allTestParams) {
+ super(codecName, mediaType, testFile, allTestParams);
+ }
+
+ @After
+ public void tearDownCodecDecoderBlockModelDrmTestBase() {
+ tearDownCrypto();
+ }
+
+ public void setUpCrypto(UUID uuidCryptoScheme, byte[] drmInitData, byte[][] keys)
+ throws UnsupportedSchemeException, NotProvisionedException, ResourceBusyException,
+ MediaCryptoException {
+ Pair<MediaDrm, MediaCrypto> cryptoPair = setUpMediaDrmAndCrypto(uuidCryptoScheme,
+ drmInitData, keys);
+ assertNotNull("failed to set up crypto session \n" + mTestConfig + mTestEnv, cryptoPair);
+ mDrm = cryptoPair.first;
+ mCrypto = cryptoPair.second;
+ }
+
+ public void tearDownCrypto() {
+ if (mCrypto != null) {
+ mCrypto.release();
+ mCrypto = null;
+ }
+ if (mDrm != null) {
+ mDrm.close();
+ mDrm = null;
+ }
+ }
+
+
+ @Override
+ protected void configureCodec(MediaFormat format, boolean isAsyncUnUsed,
+ boolean signalEOSWithLastFrameUnUsed, boolean isEncoder, int flags) {
+ if (ENABLE_LOGS) {
+ if (!isAsyncUnUsed) {
+ Log.d(LOG_TAG, "Ignoring synchronous mode of operation request");
+ }
+ if (!signalEOSWithLastFrameUnUsed) {
+ Log.d(LOG_TAG, "Ignoring signal eos separately request");
+ }
+ }
+ flags |= MediaCodec.CONFIGURE_FLAG_USE_BLOCK_MODEL;
+ configureCodecCommon(format, true, true, isEncoder, flags);
+ mCodec.configure(format, mSurface, mCrypto, flags);
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "codec configured");
+ }
+ }
+
+ @Override
+ protected void enqueueInput(int bufferIndex) {
+ int sampleSize = (int) mExtractor.getSampleSize();
+ mLinearInputBlock.allocateBlock(mCodecName, sampleSize);
+ long pts = mExtractor.getSampleTime();
+ mExtractor.readSampleData(mLinearInputBlock.getBuffer(), mLinearInputBlock.getOffset());
+ int extractorFlags = mExtractor.getSampleFlags();
+ MediaCodec.CryptoInfo cryptoInfo = null;
+ if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0) {
+ cryptoInfo = new MediaCodec.CryptoInfo();
+ mExtractor.getSampleCryptoInfo(cryptoInfo);
+ }
+ int codecFlags = 0;
+ if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_KEY_FRAME;
+ }
+ if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_PARTIAL_FRAME) != 0) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_PARTIAL_FRAME;
+ }
+ if (!mExtractor.advance() && mSignalEOSWithLastFrame) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
+ mSawInputEOS = true;
+ }
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "input: id: " + bufferIndex + " size: " + sampleSize + " pts: " + pts
+ + " flags: " + codecFlags);
+ }
+ MediaCodec.QueueRequest request = mCodec.getQueueRequest(bufferIndex);
+ if (cryptoInfo != null) {
+ request.setEncryptedLinearBlock(mLinearInputBlock.getBlock(),
+ mLinearInputBlock.getOffset(), sampleSize, cryptoInfo);
+ } else {
+ request.setLinearBlock(mLinearInputBlock.getBlock(), mLinearInputBlock.getOffset(),
+ sampleSize);
+ }
+ request.setPresentationTimeUs(pts);
+ request.setFlags(codecFlags);
+ request.queue();
+ if (sampleSize > 0 && (codecFlags & (MediaCodec.BUFFER_FLAG_CODEC_CONFIG
+ | MediaCodec.BUFFER_FLAG_PARTIAL_FRAME)) == 0) {
+ mOutputBuff.saveInPTS(pts);
+ mInputCount++;
+ mLinearInputBlock.setOffset(mLinearInputBlock.getOffset() + sampleSize);
+ }
+ }
+}
diff --git a/tests/media/common/src/android/mediav2/common/cts/CodecDecoderBlockModelMultiAccessUnitDrmTestBase.java b/tests/media/common/src/android/mediav2/common/cts/CodecDecoderBlockModelMultiAccessUnitDrmTestBase.java
new file mode 100644
index 0000000..c0cf1ac
--- /dev/null
+++ b/tests/media/common/src/android/mediav2/common/cts/CodecDecoderBlockModelMultiAccessUnitDrmTestBase.java
@@ -0,0 +1,185 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediav2.common.cts;
+
+import static android.mediav2.common.cts.CodecDecoderDrmTestBase.setUpMediaDrmAndCrypto;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import android.annotation.RequiresApi;
+import android.media.MediaCodec;
+import android.media.MediaCrypto;
+import android.media.MediaCryptoException;
+import android.media.MediaDrm;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.NotProvisionedException;
+import android.media.ResourceBusyException;
+import android.media.UnsupportedSchemeException;
+import android.os.Build;
+import android.util.Log;
+import android.util.Pair;
+
+import org.junit.After;
+
+import java.util.ArrayDeque;
+import java.util.Locale;
+import java.util.UUID;
+
+/**
+ * Wrapper class for trying and testing secure mediacodec decoder components in block model large
+ * audio buffer mode
+ */
+@RequiresApi(api = Build.VERSION_CODES.R)
+public class CodecDecoderBlockModelMultiAccessUnitDrmTestBase
+ extends CodecDecoderBlockModelMultiAccessUnitTestBase {
+ private static final String LOG_TAG =
+ CodecDecoderBlockModelMultiAccessUnitDrmTestBase.class.getSimpleName();
+
+ protected MediaDrm mDrm;
+ protected MediaCrypto mCrypto;
+
+ public CodecDecoderBlockModelMultiAccessUnitDrmTestBase(String codecName, String mediaType,
+ String testFile, String allTestParams) {
+ super(codecName, mediaType, testFile, allTestParams);
+ }
+
+ @After
+ public void tearDownCodecDecoderBlockModelMultiAccessUnitDrmTestBase() {
+ tearDownCrypto();
+ }
+
+ public void setUpCrypto(UUID uuidCryptoScheme, byte[] drmInitData, byte[][] keys)
+ throws UnsupportedSchemeException, NotProvisionedException, ResourceBusyException,
+ MediaCryptoException {
+ Pair<MediaDrm, MediaCrypto> cryptoPair = setUpMediaDrmAndCrypto(uuidCryptoScheme,
+ drmInitData, keys);
+ assertNotNull("failed to set up crypto session \n" + mTestConfig + mTestEnv, cryptoPair);
+ mDrm = cryptoPair.first;
+ mCrypto = cryptoPair.second;
+ }
+
+ public void tearDownCrypto() {
+ if (mCrypto != null) {
+ mCrypto.release();
+ mCrypto = null;
+ }
+ if (mDrm != null) {
+ mDrm.close();
+ mDrm = null;
+ }
+ }
+
+ @Override
+ protected void configureCodec(MediaFormat format, boolean isAsyncUnUsed,
+ boolean signalEOSWithLastFrameUnUsed, boolean isEncoder, int flags) {
+ if (ENABLE_LOGS) {
+ if (!isAsyncUnUsed) {
+ Log.d(LOG_TAG, "Ignoring synchronous mode of operation request");
+ }
+ if (!signalEOSWithLastFrameUnUsed) {
+ Log.d(LOG_TAG, "Ignoring signal eos separately request");
+ }
+ }
+ flags |= MediaCodec.CONFIGURE_FLAG_USE_BLOCK_MODEL;
+ configureCodecCommon(format, true, true, isEncoder, flags);
+ mCodec.configure(format, mSurface, mCrypto, flags);
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "codec configured");
+ }
+ }
+
+ @Override
+ protected void enqueueInput(int bufferIndex) {
+ ArrayDeque<MediaCodec.BufferInfo> bufferInfos = new ArrayDeque<>();
+ ArrayDeque<MediaCodec.CryptoInfo> cryptoInfos = new ArrayDeque<>();
+ mLinearInputBlock.allocateBlock(mCodecName, mMaxInputSize);
+ int basePts = (int) mExtractor.getSampleTime();
+ boolean baseEncrypted =
+ ((mExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0);
+ while (true) {
+ int size = (int) mExtractor.getSampleSize();
+ if (size <= 0) break;
+ int deltaPts = (int) mExtractor.getSampleTime() - basePts;
+ assertTrue("Difference between basePts: " + basePts + " and current pts: "
+ + mExtractor.getSampleTime() + " should be greater than or equal "
+ + "to zero.\n" + mTestConfig + mTestEnv, deltaPts >= 0);
+ if (deltaPts / 1000 > mMaxInputLimitMs) {
+ break;
+ }
+ if (mLinearInputBlock.getOffset() + size <= mLinearInputBlock.getBufferCapacity()) {
+ mExtractor.readSampleData(mLinearInputBlock.getBuffer(),
+ mLinearInputBlock.getOffset());
+ } else {
+ if (mLinearInputBlock.getOffset() == 0) {
+ throw new RuntimeException(String.format(Locale.getDefault(),
+ "access unit size %d exceeds capacity of the buffer %d, unable to "
+ + "queue input", size, mLinearInputBlock.getBufferCapacity()));
+ }
+ break;
+ }
+ int extractorFlags = mExtractor.getSampleFlags();
+ long pts = mExtractor.getSampleTime();
+ boolean currEncrypted = (extractorFlags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0;
+ if (mLinearInputBlock.getOffset() != 0) {
+ if (baseEncrypted != currEncrypted) break;
+ }
+ if (currEncrypted) {
+ MediaCodec.CryptoInfo cryptoInfo = new MediaCodec.CryptoInfo();
+ mExtractor.getSampleCryptoInfo(cryptoInfo);
+ cryptoInfos.add(cryptoInfo);
+ }
+ int codecFlags = 0;
+ if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_KEY_FRAME;
+ }
+ if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_PARTIAL_FRAME) != 0) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_PARTIAL_FRAME;
+ }
+ if (!mExtractor.advance()) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
+ mSawInputEOS = true;
+ }
+ MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
+ bufferInfo.set(mLinearInputBlock.getOffset(), size, pts, codecFlags);
+ mLinearInputBlock.setOffset(mLinearInputBlock.getOffset() + bufferInfo.size);
+ bufferInfos.add(bufferInfo);
+ }
+ if (bufferInfos.size() > 0) {
+ MediaCodec.QueueRequest request = mCodec.getQueueRequest(bufferIndex);
+ if (baseEncrypted) {
+ request.setMultiFrameEncryptedLinearBlock(mLinearInputBlock.getBlock(), bufferInfos,
+ cryptoInfos);
+ } else {
+ request.setMultiFrameLinearBlock(mLinearInputBlock.getBlock(), bufferInfos);
+ }
+ request.queue();
+ for (MediaCodec.BufferInfo info : bufferInfos) {
+ if (info.size > 0 && (info.flags & (MediaCodec.BUFFER_FLAG_CODEC_CONFIG
+ | MediaCodec.BUFFER_FLAG_PARTIAL_FRAME)) == 0) {
+ mOutputBuff.saveInPTS(info.presentationTimeUs);
+ mInputCount++;
+ }
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "input: id: " + bufferIndex + " size: " + info.size
+ + " pts: " + info.presentationTimeUs + " flags: " + info.flags);
+ }
+ }
+ }
+ }
+}
diff --git a/tests/media/common/src/android/mediav2/common/cts/CodecDecoderBlockModelMultiAccessUnitTestBase.java b/tests/media/common/src/android/mediav2/common/cts/CodecDecoderBlockModelMultiAccessUnitTestBase.java
index 9332650..639a133 100644
--- a/tests/media/common/src/android/mediav2/common/cts/CodecDecoderBlockModelMultiAccessUnitTestBase.java
+++ b/tests/media/common/src/android/mediav2/common/cts/CodecDecoderBlockModelMultiAccessUnitTestBase.java
@@ -64,6 +64,12 @@
}
@Override
+ protected void flushCodec() {
+ super.flushCodec();
+ mAsyncHandleMultiAccessUnits.clearQueues();
+ }
+
+ @Override
protected void resetContext(boolean isAsync, boolean signalEOSWithLastFrame) {
super.resetContext(isAsync, signalEOSWithLastFrame);
mMaxOutputSizeBytes = 0;
diff --git a/tests/media/common/src/android/mediav2/common/cts/CodecDecoderDrmTestBase.java b/tests/media/common/src/android/mediav2/common/cts/CodecDecoderDrmTestBase.java
new file mode 100644
index 0000000..aa8a744
--- /dev/null
+++ b/tests/media/common/src/android/mediav2/common/cts/CodecDecoderDrmTestBase.java
@@ -0,0 +1,243 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediav2.common.cts;
+
+import static org.junit.Assert.assertNotNull;
+
+import android.media.MediaCodec;
+import android.media.MediaCrypto;
+import android.media.MediaCryptoException;
+import android.media.MediaDrm;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.NotProvisionedException;
+import android.media.ResourceBusyException;
+import android.media.UnsupportedSchemeException;
+import android.util.Base64;
+import android.util.Log;
+import android.util.Pair;
+
+import org.json.JSONArray;
+import org.json.JSONException;
+import org.json.JSONObject;
+import org.junit.After;
+
+import java.nio.ByteBuffer;
+import java.nio.charset.StandardCharsets;
+import java.util.UUID;
+import java.util.Vector;
+
+/**
+ * Wrapper class for trying and testing mediacodec decoder components in secure mode.
+ */
+public class CodecDecoderDrmTestBase extends CodecDecoderTestBase {
+ private static final String LOG_TAG = CodecDecoderDrmTestBase.class.getSimpleName();
+
+ protected MediaDrm mDrm;
+ protected MediaCrypto mCrypto;
+
+ public CodecDecoderDrmTestBase(String codecName, String mediaType, String testFile,
+ String allTestParams) {
+ super(codecName, mediaType, testFile, allTestParams);
+ }
+
+ @After
+ public void tearDownCodecDecoderDrmTestBase() {
+ tearDownCrypto();
+ }
+
+ private static int getKeyIds(byte[] keyRequestBlob, Vector<String> keyIds) {
+ if (0 == keyRequestBlob.length || keyIds == null) {
+ return 0;
+ }
+
+ String jsonLicenseRequest = new String(keyRequestBlob);
+ keyIds.clear();
+
+ try {
+ JSONObject license = new JSONObject(jsonLicenseRequest);
+ final JSONArray ids = license.getJSONArray("kids");
+ for (int i = 0; i < ids.length(); ++i) {
+ keyIds.add(ids.getString(i));
+ }
+ } catch (JSONException e) {
+ Log.e(LOG_TAG, "Invalid JSON license = " + jsonLicenseRequest);
+ return 0;
+ }
+ return keyIds.size();
+ }
+
+ private static String createJsonWebKeySet(Vector<String> keyIds, Vector<String> keys,
+ int keyType) {
+ StringBuilder jwkSet = new StringBuilder("{\"keys\":[");
+ for (int i = 0; i < keyIds.size(); ++i) {
+ String id = new String(keyIds.get(i).getBytes(StandardCharsets.UTF_8));
+ String key = new String(keys.get(i).getBytes(StandardCharsets.UTF_8));
+ jwkSet.append("{\"kty\":\"oct\",\"kid\":\"").append(id).append("\",\"k\":\"")
+ .append(key).append("\"}");
+ }
+ jwkSet.append("], \"type\":");
+ if (keyType == MediaDrm.KEY_TYPE_OFFLINE || keyType == MediaDrm.KEY_TYPE_RELEASE) {
+ jwkSet.append("\"persistent-license\" }");
+ } else {
+ jwkSet.append("\"temporary\" }");
+ }
+ return jwkSet.toString();
+ }
+
+ private static byte[] retrieveKeys(MediaDrm drm, String initDataType, byte[] sessionId,
+ byte[] drmInitData, int keyType, byte[][] clearKeyIds) {
+ MediaDrm.KeyRequest drmRequest = null;
+ try {
+ drmRequest = drm.getKeyRequest(sessionId, drmInitData, initDataType, keyType, null);
+ } catch (Exception e) {
+ e.printStackTrace();
+ Log.i(LOG_TAG, "Failed to get key request: " + e);
+ }
+ if (drmRequest == null) {
+ Log.e(LOG_TAG, "Failed getKeyRequest");
+ return null;
+ }
+
+ Vector<String> keyIds = new Vector<>();
+ if (0 == getKeyIds(drmRequest.getData(), keyIds)) {
+ Log.e(LOG_TAG, "No key ids found in initData");
+ return null;
+ }
+
+ if (clearKeyIds.length != keyIds.size()) {
+ Log.e(LOG_TAG, "Mismatch number of key ids and keys: ids=" + keyIds.size() + ", keys="
+ + clearKeyIds.length);
+ return null;
+ }
+
+ // Base64 encodes clearkeys. Keys are known to the application.
+ Vector<String> keys = new Vector<>();
+ for (byte[] clearKeyId : clearKeyIds) {
+ String clearKey = Base64.encodeToString(clearKeyId, Base64.NO_PADDING | Base64.NO_WRAP);
+ keys.add(clearKey);
+ }
+
+ String jwkSet = createJsonWebKeySet(keyIds, keys, keyType);
+ byte[] jsonResponse = jwkSet.getBytes(StandardCharsets.UTF_8);
+
+ try {
+ try {
+ return drm.provideKeyResponse(sessionId, jsonResponse);
+ } catch (IllegalStateException e) {
+ Log.e(LOG_TAG, "Failed to provide key response: " + e);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ Log.e(LOG_TAG, "Failed to provide key response: " + e);
+ }
+ return null;
+ }
+
+ static Pair<MediaDrm, MediaCrypto> setUpMediaDrmAndCrypto(UUID uuidCryptoScheme,
+ byte[] drmInitData, byte[][] keys) throws UnsupportedSchemeException,
+ NotProvisionedException, ResourceBusyException, MediaCryptoException {
+ MediaDrm drm = new MediaDrm(uuidCryptoScheme);
+ drm.setOnEventListener(
+ (MediaDrm mediaDrm, byte[] sessionId, int event, int extra, byte[] data) -> {
+ if (event == MediaDrm.EVENT_KEY_REQUIRED
+ || event == MediaDrm.EVENT_KEY_EXPIRED) {
+ retrieveKeys(mediaDrm, "cenc", sessionId, drmInitData,
+ MediaDrm.KEY_TYPE_STREAMING, keys);
+ }
+ });
+ byte[] sessionId = drm.openSession();
+ retrieveKeys(drm, "cenc", sessionId, drmInitData, MediaDrm.KEY_TYPE_STREAMING, keys);
+ if (sessionId != null) {
+ return Pair.create(drm, new MediaCrypto(uuidCryptoScheme, sessionId));
+ }
+ return null;
+ }
+
+ public void setUpCrypto(UUID uuidCryptoScheme, byte[] drmInitData, byte[][] keys)
+ throws UnsupportedSchemeException, NotProvisionedException, ResourceBusyException,
+ MediaCryptoException {
+ Pair<MediaDrm, MediaCrypto> cryptoPair = setUpMediaDrmAndCrypto(uuidCryptoScheme,
+ drmInitData, keys);
+ assertNotNull("failed to set up crypto session \n" + mTestConfig + mTestEnv, cryptoPair);
+ mDrm = cryptoPair.first;
+ mCrypto = cryptoPair.second;
+ }
+
+ public void tearDownCrypto() {
+ if (mCrypto != null) {
+ mCrypto.release();
+ mCrypto = null;
+ }
+ if (mDrm != null) {
+ mDrm.close();
+ mDrm = null;
+ }
+ }
+
+ @Override
+ protected void configureCodec(MediaFormat format, boolean isAsync,
+ boolean signalEosWithLastFrame, boolean isEncoder, int flags) {
+ configureCodecCommon(format, isAsync, signalEosWithLastFrame, isEncoder, flags);
+ mCodec.configure(format, mSurface, mCrypto, flags);
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "codec configured");
+ }
+ }
+
+ protected void enqueueInput(int bufferIndex) {
+ if (mExtractor.getSampleSize() < 0) {
+ enqueueEOS(bufferIndex);
+ } else {
+ ByteBuffer inputBuffer = mCodec.getInputBuffer(bufferIndex);
+ mExtractor.readSampleData(inputBuffer, 0);
+ int size = (int) mExtractor.getSampleSize();
+ long pts = mExtractor.getSampleTime();
+ int extractorFlags = mExtractor.getSampleFlags();
+ MediaCodec.CryptoInfo cryptoInfo = null;
+ if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0) {
+ cryptoInfo = new MediaCodec.CryptoInfo();
+ mExtractor.getSampleCryptoInfo(cryptoInfo);
+ }
+ int codecFlags = 0;
+ if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_KEY_FRAME;
+ }
+ if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_PARTIAL_FRAME) != 0) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_PARTIAL_FRAME;
+ }
+ if (!mExtractor.advance() && mSignalEOSWithLastFrame) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
+ mSawInputEOS = true;
+ }
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "input: id: " + bufferIndex + " size: " + size + " pts: " + pts
+ + " flags: " + codecFlags);
+ }
+ if (cryptoInfo != null) {
+ mCodec.queueSecureInputBuffer(bufferIndex, 0, cryptoInfo, pts, codecFlags);
+ } else {
+ mCodec.queueInputBuffer(bufferIndex, 0, size, pts, codecFlags);
+ }
+ if (size > 0 && (codecFlags & (MediaCodec.BUFFER_FLAG_CODEC_CONFIG
+ | MediaCodec.BUFFER_FLAG_PARTIAL_FRAME)) == 0) {
+ mOutputBuff.saveInPTS(pts);
+ mInputCount++;
+ }
+ }
+ }
+}
diff --git a/tests/media/common/src/android/mediav2/common/cts/CodecDecoderMultiAccessUnitDrmTestBase.java b/tests/media/common/src/android/mediav2/common/cts/CodecDecoderMultiAccessUnitDrmTestBase.java
new file mode 100644
index 0000000..4c6a7bc
--- /dev/null
+++ b/tests/media/common/src/android/mediav2/common/cts/CodecDecoderMultiAccessUnitDrmTestBase.java
@@ -0,0 +1,185 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediav2.common.cts;
+
+import static android.mediav2.common.cts.CodecDecoderDrmTestBase.setUpMediaDrmAndCrypto;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+
+import android.media.MediaCodec;
+import android.media.MediaCrypto;
+import android.media.MediaCryptoException;
+import android.media.MediaDrm;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.NotProvisionedException;
+import android.media.ResourceBusyException;
+import android.media.UnsupportedSchemeException;
+import android.os.Build;
+import android.platform.test.annotations.RequiresFlagsEnabled;
+import android.util.Log;
+import android.util.Pair;
+
+import androidx.test.filters.SdkSuppress;
+
+import com.android.media.codec.flags.Flags;
+
+import org.junit.After;
+import org.junit.Assert;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
+import java.util.Locale;
+import java.util.UUID;
+
+/**
+ * Wrapper class for trying and testing mediacodec secure decoder components in large buffer mode
+ */
+@SdkSuppress(minSdkVersion = Build.VERSION_CODES.VANILLA_ICE_CREAM, codeName = "VanillaIceCream")
+@RequiresFlagsEnabled(Flags.FLAG_LARGE_AUDIO_FRAME)
+public class CodecDecoderMultiAccessUnitDrmTestBase extends CodecDecoderMultiAccessUnitTestBase {
+ private static final String LOG_TAG =
+ CodecDecoderMultiAccessUnitDrmTestBase.class.getSimpleName();
+
+ protected MediaDrm mDrm;
+ protected MediaCrypto mCrypto;
+
+ public CodecDecoderMultiAccessUnitDrmTestBase(String codecName, String mediaType,
+ String testFile, String allTestParams) {
+ super(codecName, mediaType, testFile, allTestParams);
+ }
+
+ @After
+ public void tearDownCodecDecoderMultiAccessUnitDrmTestBase() {
+ tearDownCrypto();
+ }
+
+ public void setUpCrypto(UUID uuidCryptoScheme, byte[] drmInitData, byte[][] keys)
+ throws UnsupportedSchemeException, NotProvisionedException, ResourceBusyException,
+ MediaCryptoException {
+ Pair<MediaDrm, MediaCrypto> cryptoPair = setUpMediaDrmAndCrypto(uuidCryptoScheme,
+ drmInitData, keys);
+ assertNotNull("failed to set up crypto session \n" + mTestConfig + mTestEnv, cryptoPair);
+ mDrm = cryptoPair.first;
+ mCrypto = cryptoPair.second;
+ }
+
+ public void tearDownCrypto() {
+ if (mCrypto != null) {
+ mCrypto.release();
+ mCrypto = null;
+ }
+ if (mDrm != null) {
+ mDrm.close();
+ mDrm = null;
+ }
+ }
+
+ @Override
+ protected void configureCodec(MediaFormat format, boolean isAsync,
+ boolean signalEosWithLastFrame, boolean isEncoder, int flags) {
+ configureCodecCommon(format, isAsync, signalEosWithLastFrame, isEncoder, flags);
+ mCodec.configure(format, mSurface, mCrypto, flags);
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "codec configured");
+ }
+ }
+
+ protected void enqueueInput(int bufferIndex) {
+ Log.v(LOG_TAG, "enqueueInput: id: " + bufferIndex);
+ if (mExtractor.getSampleSize() < 0) {
+ enqueueEOS(bufferIndex);
+ } else {
+ ArrayDeque<MediaCodec.BufferInfo> bufferInfos = new ArrayDeque<>();
+ ArrayDeque<MediaCodec.CryptoInfo> cryptoInfos = new ArrayDeque<>();
+ ByteBuffer inputBuffer = mCodec.getInputBuffer(bufferIndex);
+ Assert.assertNotNull("error, getInputBuffer returned null.\n", inputBuffer);
+ int offset = 0;
+ int basePts = (int) mExtractor.getSampleTime();
+ boolean baseEncrypted =
+ ((mExtractor.getSampleFlags() & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0);
+ boolean currEncrypted;
+ while (true) {
+ int size = (int) mExtractor.getSampleSize();
+ if (size <= 0) break;
+ int deltaPts = (int) mExtractor.getSampleTime() - basePts;
+ assertTrue("Difference between basePts: " + basePts + " and current pts: "
+ + mExtractor.getSampleTime() + " should be greater than or equal "
+ + "to zero.\n", deltaPts >= 0);
+ if (deltaPts / 1000 > mMaxInputLimitMs) {
+ break;
+ }
+ if (offset + size <= inputBuffer.capacity()) {
+ mExtractor.readSampleData(inputBuffer, offset);
+ } else {
+ if (offset == 0) {
+ throw new RuntimeException(String.format(Locale.getDefault(),
+ "access unit size %d exceeds capacity of the buffer %d, unable to "
+ + "queue input", size, inputBuffer.capacity()));
+ }
+ break;
+ }
+ int extractorFlags = mExtractor.getSampleFlags();
+ long pts = mExtractor.getSampleTime();
+ if (offset != 0) {
+ currEncrypted = (extractorFlags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0;
+ if (baseEncrypted != currEncrypted) break;
+ }
+ MediaCodec.CryptoInfo cryptoInfo = null;
+ if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0) {
+ cryptoInfo = new MediaCodec.CryptoInfo();
+ mExtractor.getSampleCryptoInfo(cryptoInfo);
+ cryptoInfos.add(cryptoInfo);
+ }
+ int codecFlags = 0;
+ if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_KEY_FRAME;
+ }
+ if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_PARTIAL_FRAME) != 0) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_PARTIAL_FRAME;
+ }
+ if (!mExtractor.advance()) {
+ codecFlags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
+ mSawInputEOS = true;
+ }
+ MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
+ bufferInfo.set(offset, size, pts, codecFlags);
+ offset += bufferInfo.size;
+ bufferInfos.add(bufferInfo);
+ }
+ if (bufferInfos.size() > 0) {
+ if (baseEncrypted) {
+ mCodec.queueSecureInputBuffers(bufferIndex, bufferInfos, cryptoInfos);
+ } else {
+ mCodec.queueInputBuffers(bufferIndex, bufferInfos);
+ }
+ for (MediaCodec.BufferInfo info : bufferInfos) {
+ if (info.size > 0 && (info.flags & (MediaCodec.BUFFER_FLAG_CODEC_CONFIG
+ | MediaCodec.BUFFER_FLAG_PARTIAL_FRAME)) == 0) {
+ mOutputBuff.saveInPTS(info.presentationTimeUs);
+ mInputCount++;
+ }
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "input: id: " + bufferIndex + " size: " + info.size
+ + " pts: " + info.presentationTimeUs + " flags: " + info.flags);
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/tests/media/common/src/android/mediav2/common/cts/CodecDecoderMultiAccessUnitTestBase.java b/tests/media/common/src/android/mediav2/common/cts/CodecDecoderMultiAccessUnitTestBase.java
index cf8bcb0..c7224b4 100644
--- a/tests/media/common/src/android/mediav2/common/cts/CodecDecoderMultiAccessUnitTestBase.java
+++ b/tests/media/common/src/android/mediav2/common/cts/CodecDecoderMultiAccessUnitTestBase.java
@@ -72,6 +72,12 @@
}
@Override
+ protected void flushCodec() {
+ super.flushCodec();
+ mAsyncHandleMultiAccessUnits.clearQueues();
+ }
+
+ @Override
protected void resetContext(boolean isAsync, boolean signalEOSWithLastFrame) {
super.resetContext(isAsync, signalEOSWithLastFrame);
mMaxOutputSizeBytes = 0;
diff --git a/tests/media/common/src/android/mediav2/common/cts/CodecEncoderBlockModelTestBase.java b/tests/media/common/src/android/mediav2/common/cts/CodecEncoderBlockModelTestBase.java
new file mode 100644
index 0000000..e067302
--- /dev/null
+++ b/tests/media/common/src/android/mediav2/common/cts/CodecEncoderBlockModelTestBase.java
@@ -0,0 +1,166 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediav2.common.cts;
+
+import static org.junit.Assert.assertEquals;
+
+import android.media.MediaCodec;
+import android.media.MediaFormat;
+import android.os.Build;
+import android.util.Log;
+
+import androidx.annotation.RequiresApi;
+
+import org.junit.After;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Wrapper class for trying and testing mediacodec encoder components in block model mode.
+ */
+@RequiresApi(api = Build.VERSION_CODES.R)
+public class CodecEncoderBlockModelTestBase extends CodecEncoderTestBase {
+ private static final String LOG_TAG = CodecEncoderBlockModelTestBase.class.getSimpleName();
+ private static final int MAX_INPUT_SIZE_MS = 10;
+
+ private final CodecDecoderBlockModelTestBase.LinearBlockWrapper
+ mLinearInputBlock = new CodecDecoderBlockModelTestBase.LinearBlockWrapper();
+
+ // this is made public so that independent instances can configure it on demand
+ public float mMaxInputSizeInMs = MAX_INPUT_SIZE_MS;
+
+ public CodecEncoderBlockModelTestBase(String encoder, String mediaType,
+ EncoderConfigParams[] encCfgParams, String allTestParams) {
+ super(encoder, mediaType, encCfgParams, allTestParams);
+ }
+
+ @After
+ public void tearDownCodecEncoderBlockModelTestBase() {
+ mLinearInputBlock.recycle();
+ }
+
+ @Override
+ protected void configureCodec(MediaFormat format, boolean isAsyncUnUsed,
+ boolean signalEOSWithLastFrameUnUsed, boolean isEncoder) {
+ if (ENABLE_LOGS) {
+ if (!isAsyncUnUsed) {
+ Log.d(LOG_TAG, "Ignoring synchronous mode of operation request");
+ }
+ if (!signalEOSWithLastFrameUnUsed) {
+ Log.d(LOG_TAG, "Ignoring signal eos separately request");
+ }
+ }
+ configureCodec(format, true, true, isEncoder, MediaCodec.CONFIGURE_FLAG_USE_BLOCK_MODEL);
+ }
+
+ @Override
+ protected void resetContext(boolean isAsync, boolean signalEOSWithLastFrame) {
+ mLinearInputBlock.recycle();
+ mMaxInputSizeInMs = MAX_INPUT_SIZE_MS;
+ super.resetContext(isAsync, signalEOSWithLastFrame);
+ }
+
+ @Override
+ protected void enqueueInput(int bufferIndex) {
+ if (mIsLoopBack && mInputBufferReadOffset >= mInputData.length) {
+ mInputBufferReadOffset = 0;
+ }
+ int frameSize = mActiveRawRes.mBytesPerSample * mActiveEncCfg.mChannelCount;
+ int maxInputSize =
+ (int) ((frameSize * mActiveEncCfg.mSampleRate) * mMaxInputSizeInMs / 1000);
+ maxInputSize = ((maxInputSize + (frameSize - 1)) / frameSize) * frameSize;
+
+ int flags = 0;
+ long pts = mInputOffsetPts;
+ pts += mNumBytesSubmitted * 1000000L / ((long) mActiveRawRes.mBytesPerSample
+ * mActiveEncCfg.mChannelCount * mActiveEncCfg.mSampleRate);
+ int size = Math.min(maxInputSize, mInputData.length - mInputBufferReadOffset);
+ assertEquals("Input size selected for queuing equates to partial audio sample \n"
+ + mTestConfig + mTestEnv, 0,
+ size % ((long) mActiveRawRes.mBytesPerSample * mActiveEncCfg.mChannelCount));
+ mLinearInputBlock.allocateBlock(mCodecName, size);
+ mLinearInputBlock.getBuffer().put(mInputData, mInputBufferReadOffset, size);
+ if (mIsLoopBack ? (mInputCount + 1 >= mLoopBackFrameLimit) :
+ (mInputBufferReadOffset + size >= mInputData.length)) {
+ flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
+ mSawInputEOS = true;
+ }
+ mInputBufferReadOffset += size;
+ mNumBytesSubmitted += size;
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "input: id: " + bufferIndex + " size: " + size + " pts: " + pts
+ + " flags: " + flags);
+ }
+ MediaCodec.QueueRequest request = mCodec.getQueueRequest(bufferIndex);
+ request.setLinearBlock(mLinearInputBlock.getBlock(), mLinearInputBlock.getOffset(), size);
+ request.setPresentationTimeUs(pts);
+ request.setFlags(flags);
+ request.queue();
+ mLinearInputBlock.setOffset(mLinearInputBlock.getOffset() + size);
+ mOutputBuff.saveInPTS(pts);
+ mInputCount++;
+ }
+
+ @Override
+ protected void dequeueOutput(int bufferIndex, MediaCodec.BufferInfo info) {
+ MediaCodec.OutputFrame frame = mCodec.getOutputFrame(bufferIndex);
+ long framePts = frame.getPresentationTimeUs();
+ long infoPts = info.presentationTimeUs;
+ int frameFlags = frame.getFlags();
+ int infoFlags = info.flags;
+ assertEquals("presentation timestamps from OutputFrame does not match with the value "
+ + "obtained from callback: framePts=" + framePts + ", infoPts=" + infoPts + "\n"
+ + mTestConfig + mTestEnv, framePts, infoPts);
+ assertEquals("Flags from OutputFrame does not match with the value obtained from "
+ + "callback: frameFlags=" + frameFlags + ", infoFlags=" + infoFlags + "\n"
+ + mTestConfig + mTestEnv, frameFlags, infoFlags);
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "output: id: " + bufferIndex + " flags: " + info.flags + " size: "
+ + info.size + " timestamp: " + info.presentationTimeUs);
+ }
+ if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ mSawOutputEOS = true;
+ }
+ if (info.size > 0) {
+ if (frame.getLinearBlock() != null) {
+ ByteBuffer buf = frame.getLinearBlock().map();
+ if (mSaveToMem) {
+ MediaCodec.BufferInfo copy = new MediaCodec.BufferInfo();
+ copy.set(mOutputBuff.getOutStreamSize(), info.size, info.presentationTimeUs,
+ info.flags);
+ mInfoList.add(copy);
+
+ mOutputBuff.checksum(buf, info);
+ mOutputBuff.saveToMemory(buf, info);
+ }
+ if (mMuxer != null) {
+ if (mTrackID == -1) {
+ mTrackID = mMuxer.addTrack(mCodec.getOutputFormat());
+ mMuxer.start();
+ }
+ mMuxer.writeSampleData(mTrackID, buf, info);
+ }
+ frame.getLinearBlock().recycle();
+ }
+ if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
+ mOutputBuff.saveOutPTS(info.presentationTimeUs);
+ mOutputCount++;
+ }
+ }
+ mCodec.releaseOutputBuffer(bufferIndex, false);
+ }
+}
diff --git a/tests/media/common/src/android/mediav2/common/cts/CodecEncoderTestBase.java b/tests/media/common/src/android/mediav2/common/cts/CodecEncoderTestBase.java
index 0ef1410..79836443 100644
--- a/tests/media/common/src/android/mediav2/common/cts/CodecEncoderTestBase.java
+++ b/tests/media/common/src/android/mediav2/common/cts/CodecEncoderTestBase.java
@@ -580,11 +580,11 @@
}
public void encodeToMemory(String encoder, EncoderConfigParams cfg, RawResource res,
- int frameLimit, boolean saveToMem, boolean muxOutput)
+ OutputManager outputBuff, int frameLimit, boolean saveToMem, boolean muxOutput)
throws IOException, InterruptedException {
mSaveToMem = saveToMem;
mMuxOutput = muxOutput;
- mOutputBuff = new OutputManager();
+ mOutputBuff = outputBuff;
mInfoList.clear();
mActiveEncCfg = cfg;
mActiveRawRes = res;
@@ -603,6 +603,12 @@
mMuxOutput = false;
}
+ public void encodeToMemory(String encoder, EncoderConfigParams cfg, RawResource res,
+ int frameLimit, boolean saveToMem, boolean muxOutput)
+ throws IOException, InterruptedException {
+ encodeToMemory(encoder, cfg, res, new OutputManager(), frameLimit, saveToMem, muxOutput);
+ }
+
public void setLoopBack(boolean loopBack) {
mIsLoopBack = loopBack;
}
diff --git a/tests/media/common/src/android/mediav2/common/cts/CodecTestBase.java b/tests/media/common/src/android/mediav2/common/cts/CodecTestBase.java
index db093f1..6b6109c 100644
--- a/tests/media/common/src/android/mediav2/common/cts/CodecTestBase.java
+++ b/tests/media/common/src/android/mediav2/common/cts/CodecTestBase.java
@@ -17,6 +17,7 @@
package android.mediav2.common.cts;
import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_HdrEditing;
+import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_HlgEditing;
import static android.media.MediaCodecInfo.CodecProfileLevel.*;
import static org.junit.Assert.assertEquals;
@@ -32,7 +33,6 @@
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.params.DynamicRangeProfiles;
import android.hardware.display.DisplayManager;
-import android.media.Image;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
@@ -126,6 +126,8 @@
|| ApiLevelUtil.codenameEquals("VanillaIceCream");
public static final boolean FIRST_SDK_IS_AT_LEAST_T =
ApiLevelUtil.isFirstApiAtLeast(Build.VERSION_CODES.TIRAMISU);
+ public static final boolean FIRST_SDK_IS_AT_LEAST_V =
+ ApiLevelUtil.isFirstApiAtLeast(Build.VERSION_CODES.VANILLA_ICE_CREAM);
public static final boolean VNDK_IS_AT_LEAST_T =
SystemProperties.getInt("ro.vndk.version", Build.VERSION_CODES.CUR_DEVELOPMENT)
>= Build.VERSION_CODES.TIRAMISU;
@@ -142,10 +144,14 @@
SystemProperties.getInt("ro.board.api_level", Build.VERSION_CODES.CUR_DEVELOPMENT)
< Build.VERSION_CODES.UPSIDE_DOWN_CAKE;
public static final int ANDROID_VENDOR_API_202404 = 202404;
- public static final boolean BOARD_SDK_IS_AT_LEAST_202404 =
- SystemProperties.getInt("ro.board.api_level", Build.VERSION_CODES.CUR_DEVELOPMENT)
- >= ANDROID_VENDOR_API_202404;
+ public static final int BOARD_FIRST_SDK =
+ SystemProperties.getInt("ro.board.first_api_level",
+ SystemProperties.getInt("ro.board.api_level",
+ Build.VERSION_CODES.CUR_DEVELOPMENT));
+ public static final boolean BOARD_FIRST_SDK_IS_AT_LEAST_202404 =
+ BOARD_FIRST_SDK >= ANDROID_VENDOR_API_202404;
public static final boolean IS_HDR_EDITING_SUPPORTED;
+ public static final boolean IS_HLG_EDITING_SUPPORTED;
public static final boolean IS_HDR_CAPTURE_SUPPORTED;
private static final String LOG_TAG = CodecTestBase.class.getSimpleName();
@@ -386,7 +392,9 @@
MEDIA_CODEC_LIST_ALL = new MediaCodecList(MediaCodecList.ALL_CODECS);
MEDIA_CODEC_LIST_REGULAR = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
IS_HDR_CAPTURE_SUPPORTED = isHDRCaptureSupported();
- IS_HDR_EDITING_SUPPORTED = isHDREditingSupported();
+ IS_HDR_EDITING_SUPPORTED = isEncoderFeatureSupported(FEATURE_HdrEditing);
+ IS_HLG_EDITING_SUPPORTED = (IS_AT_LEAST_V && android.media.codec.Flags.hlgEditing())
+ ? isEncoderFeatureSupported(FEATURE_HlgEditing) : false;
CODEC_SEL_KEY_MEDIA_TYPE_MAP.put("vp8", MediaFormat.MIMETYPE_VIDEO_VP8);
CODEC_SEL_KEY_MEDIA_TYPE_MAP.put("vp9", MediaFormat.MIMETYPE_VIDEO_VP9);
CODEC_SEL_KEY_MEDIA_TYPE_MAP.put("av1", MediaFormat.MIMETYPE_VIDEO_AV1);
@@ -581,14 +589,17 @@
return false;
}
- public static boolean isHDREditingSupported() {
+ /**
+ * Check if any encoder on the device supports the given feature
+ */
+ public static boolean isEncoderFeatureSupported(String feature) {
for (MediaCodecInfo codecInfo : MEDIA_CODEC_LIST_REGULAR.getCodecInfos()) {
if (!codecInfo.isEncoder()) {
continue;
}
for (String mediaType : codecInfo.getSupportedTypes()) {
CodecCapabilities caps = codecInfo.getCapabilitiesForType(mediaType);
- if (caps != null && caps.isFeatureSupported(FEATURE_HdrEditing)) {
+ if (caps != null && caps.isFeatureSupported(feature)) {
return true;
}
}
@@ -637,6 +648,20 @@
.getSupportedHdrTypes().length > 0;
}
+ public static boolean isFormatSupported(String name, String mediaType, MediaFormat format) {
+ for (MediaCodecInfo codecInfo : MEDIA_CODEC_LIST_ALL.getCodecInfos()) {
+ if (name.equals(codecInfo.getName())) {
+ MediaCodecInfo.CodecCapabilities cap = codecInfo.getCapabilitiesForType(mediaType);
+ boolean isSupported = true;
+ if (format != null) {
+ isSupported = cap.isFormatSupported(format);
+ }
+ if (isSupported) return true;
+ }
+ }
+ return false;
+ }
+
public static boolean areFormatsSupported(String name, String mediaType,
List<MediaFormat> formats) throws IOException {
for (MediaCodecInfo codecInfo : MEDIA_CODEC_LIST_ALL.getCodecInfos()) {
@@ -910,12 +935,19 @@
public static List<Object[]> prepareParamList(List<Object[]> exhaustiveArgsList,
boolean isEncoder, boolean needAudio, boolean needVideo, boolean mustTestAllCodecs) {
return prepareParamList(exhaustiveArgsList, isEncoder, needAudio, needVideo,
- mustTestAllCodecs, ComponentClass.ALL);
+ mustTestAllCodecs, ComponentClass.ALL, null /* features */);
}
public static List<Object[]> prepareParamList(List<Object[]> exhaustiveArgsList,
boolean isEncoder, boolean needAudio, boolean needVideo, boolean mustTestAllCodecs,
ComponentClass selectSwitch) {
+ return prepareParamList(exhaustiveArgsList, isEncoder, needAudio, needVideo,
+ mustTestAllCodecs, selectSwitch, null);
+ }
+
+ public static List<Object[]> prepareParamList(List<Object[]> exhaustiveArgsList,
+ boolean isEncoder, boolean needAudio, boolean needVideo, boolean mustTestAllCodecs,
+ ComponentClass selectSwitch, String[] features) {
ArrayList<String> mediaTypes = compileCompleteTestMediaTypesList(isEncoder,
needAudio, needVideo);
ArrayList<String> cddRequiredMediaTypesList =
@@ -924,7 +956,7 @@
int argLength = exhaustiveArgsList.get(0).length;
for (String mediaType : mediaTypes) {
ArrayList<String> totalListOfCodecs =
- selectCodecs(mediaType, null, null, isEncoder, selectSwitch);
+ selectCodecs(mediaType, null /* formats */, features, isEncoder, selectSwitch);
ArrayList<String> listOfCodecs = new ArrayList<>();
if (codecPrefix != null || codecFilter != null) {
for (String codec : totalListOfCodecs) {
@@ -1100,8 +1132,9 @@
}
}
- protected void configureContextOnly(MediaFormat format, boolean isAsync,
- boolean signalEOSWithLastFrame) {
+ // reusable portions of configureCodec(...) are handled here
+ protected void configureCodecCommon(MediaFormat format, boolean isAsync,
+ boolean signalEOSWithLastFrame, boolean isEncoder, int flags) {
resetContext(isAsync, signalEOSWithLastFrame);
mAsyncHandle.setCallBack(mCodec, isAsync);
@@ -1113,9 +1146,11 @@
(isAsync ? "asynchronous" : "synchronous")));
mTestEnv.append(String.format("Component received input eos :- %s \n",
(signalEOSWithLastFrame ? "with full buffer" : "with empty buffer")));
+ mTestEnv.append(String.format("Component is :- %s \n",
+ (isEncoder ? "encoder" : "decoder")));
+ mTestEnv.append("Component configure flags :- ").append(flags).append("\n");
}
-
protected void configureCodec(MediaFormat format, boolean isAsync,
boolean cryptoCallAndSignalEosWithLastFrame, boolean isEncoder) {
configureCodec(format, isAsync, cryptoCallAndSignalEosWithLastFrame,
@@ -1131,7 +1166,8 @@
}
}
- configureContextOnly(format, isAsync, cryptoCallAndSignalEosWithLastFrame);
+ configureCodecCommon(format, isAsync, cryptoCallAndSignalEosWithLastFrame, isEncoder,
+ flags);
// signalEOS flag has nothing to do with configure. We are using this flag to try all
// available configure apis
@@ -1150,7 +1186,8 @@
protected void configureCodecInDetachedMode(MediaFormat format, boolean isAsync,
boolean cryptoCallAndSignalEosWithLastFrame) {
- configureContextOnly(format, isAsync, cryptoCallAndSignalEosWithLastFrame);
+ configureCodecCommon(format, isAsync, cryptoCallAndSignalEosWithLastFrame,
+ false /* isEncoder */, MediaCodec.CONFIGURE_FLAG_DETACHED_SURFACE);
// signalEOS flag has nothing to do with configure. We are using this flag to try all
// available configure apis
@@ -1446,8 +1483,8 @@
}
protected void setUpSurface(int width, int height, int format, int maxImages,
- Function<Image, Boolean> predicate) {
- mImageSurface.createSurface(width, height, format, maxImages, predicate);
+ int surfaceId, Function<ImageSurface.ImageAndAttributes, Boolean> predicate) {
+ mImageSurface.createSurface(width, height, format, maxImages, surfaceId, predicate);
mSurface = mImageSurface.getSurface();
assertNotNull("Surface created is null \n" + mTestConfig + mTestEnv, mSurface);
assertTrue("Surface created is invalid \n" + mTestConfig + mTestEnv, mSurface.isValid());
diff --git a/tests/media/common/src/android/mediav2/common/cts/CompareStreams.java b/tests/media/common/src/android/mediav2/common/cts/CompareStreams.java
index b6b9068..9423322 100644
--- a/tests/media/common/src/android/mediav2/common/cts/CompareStreams.java
+++ b/tests/media/common/src/android/mediav2/common/cts/CompareStreams.java
@@ -17,6 +17,7 @@
package android.mediav2.common.cts;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUVP010;
+import static android.mediav2.common.cts.DecodeStreamToYuv.findDecoderForFormat;
import static android.mediav2.common.cts.DecodeStreamToYuv.findDecoderForStream;
import static android.mediav2.common.cts.DecodeStreamToYuv.getFormatInStream;
import static android.mediav2.common.cts.DecodeStreamToYuv.getImage;
@@ -25,8 +26,10 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
import android.graphics.ImageFormat;
+import android.graphics.Rect;
import android.media.Image;
import android.media.MediaCodec;
import android.media.MediaExtractor;
@@ -42,7 +45,8 @@
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
/**
* Wrapper class for storing YUV Planes of an image
@@ -69,12 +73,14 @@
private final ArrayList<MediaCodec.BufferInfo> mStreamBufferInfos;
private final boolean mAllowRefResize;
private final boolean mAllowRefLoopBack;
- private final double[] mGlobalMSE;
- private final double[] mMinimumMSE;
- private final double[] mGlobalPSNR;
- private final double[] mMinimumPSNR;
- private final double[] mAvgPSNR;
- private final ArrayList<double[]> mFramesPSNR;
+ private final Map<Long, List<Rect>> mFrameCropRects;
+ private final double[] mGlobalMSE = {0.0, 0.0, 0.0};
+ private final double[] mMinimumMSE = {Float.MAX_VALUE, Float.MAX_VALUE, Float.MAX_VALUE};
+ private final double[] mGlobalPSNR = new double[3];
+ private final double[] mMinimumPSNR = new double[3];
+ private final double[] mAvgPSNR = {0.0, 0.0, 0.0};
+ private final ArrayList<double[]> mFramesPSNR = new ArrayList<>();
+ private final List<List<double[]>> mFramesCropRectPSNR = new ArrayList<>();
private final ArrayList<String> mTmpFiles = new ArrayList<>();
private boolean mGenerateStats;
@@ -83,39 +89,38 @@
private int mFrameSize;
private byte[] mInputData;
- private CompareStreams(RawResource refYuv, String testMediaType, String testFile,
- MediaFormat testFormat, ByteBuffer testBuffer,
+ private CompareStreams(RawResource refYuv, MediaFormat testFormat, ByteBuffer testBuffer,
ArrayList<MediaCodec.BufferInfo> testBufferInfos, boolean allowRefResize,
- boolean allowRefLoopBack) throws IOException {
- super(findDecoderForStream(testMediaType, testFile), testMediaType, testFile, LOG_TAG);
+ boolean allowRefLoopBack) {
+ super(findDecoderForFormat(testFormat), testFormat.getString(MediaFormat.KEY_MIME), null,
+ LOG_TAG);
mRefYuv = refYuv;
mStreamFormat = testFormat;
mStreamBuffer = testBuffer;
mStreamBufferInfos = testBufferInfos;
mAllowRefResize = allowRefResize;
mAllowRefLoopBack = allowRefLoopBack;
- mMinimumMSE = new double[3];
- Arrays.fill(mMinimumMSE, Float.MAX_VALUE);
- mGlobalMSE = new double[3];
- Arrays.fill(mGlobalMSE, 0.0);
- mGlobalPSNR = new double[3];
- mMinimumPSNR = new double[3];
- mAvgPSNR = new double[3];
- Arrays.fill(mAvgPSNR, 0.0);
- mFramesPSNR = new ArrayList<>();
+ mFrameCropRects = null;
}
public CompareStreams(RawResource refYuv, String testMediaType, String testFile,
boolean allowRefResize, boolean allowRefLoopBack) throws IOException {
- this(refYuv, testMediaType, testFile, null, null, null, allowRefResize, allowRefLoopBack);
+ super(findDecoderForStream(testMediaType, testFile), testMediaType, testFile, LOG_TAG);
+ mRefYuv = refYuv;
+ mStreamFormat = null;
+ mStreamBuffer = null;
+ mStreamBufferInfos = null;
+ mAllowRefResize = allowRefResize;
+ mAllowRefLoopBack = allowRefLoopBack;
+ mFrameCropRects = null;
}
public CompareStreams(MediaFormat refFormat, ByteBuffer refBuffer,
ArrayList<MediaCodec.BufferInfo> refBufferInfos, MediaFormat testFormat,
ByteBuffer testBuffer, ArrayList<MediaCodec.BufferInfo> testBufferInfos,
- boolean allowRefResize, boolean allowRefLoopBack) throws IOException {
- this(new DecodeStreamToYuv(refFormat, refBuffer, refBufferInfos).getDecodedYuv(), null,
- null, testFormat, testBuffer, testBufferInfos, allowRefResize, allowRefLoopBack);
+ boolean allowRefResize, boolean allowRefLoopBack) {
+ this(new DecodeStreamToYuv(refFormat, refBuffer, refBufferInfos).getDecodedYuv(),
+ testFormat, testBuffer, testBufferInfos, allowRefResize, allowRefLoopBack);
mTmpFiles.add(mRefYuv.mFileName);
}
@@ -126,6 +131,19 @@
mTmpFiles.add(mRefYuv.mFileName);
}
+ public CompareStreams(RawResource refYuv, String testMediaType, String testFile,
+ Map<Long, List<Rect>> frameCropRects, boolean allowRefResize, boolean allowRefLoopBack)
+ throws IOException {
+ super(findDecoderForStream(testMediaType, testFile), testMediaType, testFile, LOG_TAG);
+ mRefYuv = refYuv;
+ mStreamFormat = null;
+ mStreamBuffer = null;
+ mStreamBufferInfos = null;
+ mAllowRefResize = allowRefResize;
+ mAllowRefLoopBack = allowRefLoopBack;
+ mFrameCropRects = frameCropRects;
+ }
+
static YUVImage fillByteArray(int tgtFrameWidth, int tgtFrameHeight,
int bytesPerSample, int inpFrameWidth, int inpFrameHeight, byte[] inputData) {
YUVImage yuvImage = new YUVImage();
@@ -202,9 +220,12 @@
if (mAllowRefLoopBack && mFileOffset == mFileSize) mFileOffset = 0;
YUVImage yuvRefImage = fillByteArray(width, height, mRefYuv.mBytesPerSample,
mRefYuv.mWidth, mRefYuv.mHeight, mInputData);
+ List<Rect> frameCropRects =
+ mFrameCropRects != null ? mFrameCropRects.get(info.presentationTimeUs) :
+ null;
updateErrorStats(yuvRefImage.mData.get(0), yuvRefImage.mData.get(1),
yuvRefImage.mData.get(2), yuvImage.mData.get(0), yuvImage.mData.get(1),
- yuvImage.mData.get(2));
+ yuvImage.mData.get(2), width, height, frameCropRects);
} catch (IOException e) {
throw new RuntimeException(e);
@@ -223,23 +244,65 @@
mCodec.releaseOutputBuffer(bufferIndex, false);
}
+ private int clamp(int val, int min, int max) {
+ return Math.max(min, Math.min(max, val));
+ }
+
private void updateErrorStats(byte[] yRef, byte[] uRef, byte[] vRef, byte[] yTest,
- byte[] uTest, byte[] vTest) {
- double curYMSE = computeMSE(yRef, yTest, mRefYuv.mBytesPerSample);
- mGlobalMSE[0] += curYMSE;
- mMinimumMSE[0] = Math.min(mMinimumMSE[0], curYMSE);
+ byte[] uTest, byte[] vTest, int imgWidth, int imgHeight, List<Rect> cropRectList) {
+ if (cropRectList == null || cropRectList.isEmpty()) {
+ cropRectList = new ArrayList<>();
+ cropRectList.add(new Rect(0, 0, imgWidth, imgHeight));
+ }
+ double sumYMSE = 0;
+ double sumUMSE = 0;
+ double sumVMSE = 0;
+ Rect frameRect = new Rect(0, 0, imgWidth, imgHeight);
+ ArrayList<double[]> frameCropRectPSNR = new ArrayList<>();
- double curUMSE = computeMSE(uRef, uTest, mRefYuv.mBytesPerSample);
- mGlobalMSE[1] += curUMSE;
- mMinimumMSE[1] = Math.min(mMinimumMSE[1], curUMSE);
+ for (int i = 0; i < cropRectList.size(); i++) {
+ Rect cropRect = new Rect(cropRectList.get(i));
+ cropRect.left = clamp(cropRect.left, 0, imgWidth);
+ cropRect.top = clamp(cropRect.top, 0, imgHeight);
+ cropRect.right = clamp(cropRect.right, 0, imgWidth);
+ cropRect.bottom = clamp(cropRect.bottom, 0, imgHeight);
+ assertTrue("invalid cropRect, " + cropRect, cropRect.isValid());
+ assertTrue(String.format("cropRect %s exceeds frameRect %s", cropRect, frameRect),
+ frameRect.contains(cropRect));
+ double curYMSE = computeMSE(yRef, yTest, mRefYuv.mBytesPerSample, imgWidth, imgHeight,
+ cropRect);
+ sumYMSE += curYMSE;
- double curVMSE = computeMSE(vRef, vTest, mRefYuv.mBytesPerSample);
- mGlobalMSE[2] += curVMSE;
- mMinimumMSE[2] = Math.min(mMinimumMSE[2], curVMSE);
+ cropRect.left = cropRect.left / 2; // for uv
+ cropRect.top = cropRect.top / 2;
+ cropRect.right = cropRect.right / 2;
+ cropRect.bottom = cropRect.bottom / 2;
- double yFramePSNR = computePSNR(curYMSE, mRefYuv.mBytesPerSample);
- double uFramePSNR = computePSNR(curUMSE, mRefYuv.mBytesPerSample);
- double vFramePSNR = computePSNR(curVMSE, mRefYuv.mBytesPerSample);
+ double curUMSE = computeMSE(uRef, uTest, mRefYuv.mBytesPerSample, imgWidth / 2,
+ imgHeight / 2, cropRect);
+ sumUMSE += curUMSE;
+
+ double curVMSE = computeMSE(vRef, vTest, mRefYuv.mBytesPerSample, imgWidth / 2,
+ imgHeight / 2, cropRect);
+ sumVMSE += curVMSE;
+
+ double yCurrCropRectPSNR = computePSNR(curYMSE, mRefYuv.mBytesPerSample);
+ double uCurrCropRectPSNR = computePSNR(curUMSE, mRefYuv.mBytesPerSample);
+ double vCurrCropRectPSNR = computePSNR(curVMSE, mRefYuv.mBytesPerSample);
+
+ frameCropRectPSNR.add(new double[]{yCurrCropRectPSNR, uCurrCropRectPSNR,
+ vCurrCropRectPSNR});
+ }
+ mFramesCropRectPSNR.add(frameCropRectPSNR);
+ mGlobalMSE[0] += sumYMSE;
+ mGlobalMSE[1] += sumUMSE;
+ mGlobalMSE[2] += sumVMSE;
+ mMinimumMSE[0] = Math.min(mMinimumMSE[0], sumYMSE);
+ mMinimumMSE[1] = Math.min(mMinimumMSE[1], sumUMSE);
+ mMinimumMSE[2] = Math.min(mMinimumMSE[2], sumVMSE);
+ double yFramePSNR = computePSNR(sumYMSE, mRefYuv.mBytesPerSample);
+ double uFramePSNR = computePSNR(sumUMSE, mRefYuv.mBytesPerSample);
+ double vFramePSNR = computePSNR(sumVMSE, mRefYuv.mBytesPerSample);
mAvgPSNR[0] += yFramePSNR;
mAvgPSNR[1] += uFramePSNR;
mAvgPSNR[2] += vFramePSNR;
@@ -322,6 +385,11 @@
return mAvgPSNR;
}
+ public List<List<double[]>> getFramesPSNRForRect() throws IOException, InterruptedException {
+ generateErrorStats();
+ return mFramesCropRectPSNR;
+ }
+
public void cleanUp() {
for (String tmpFile : mTmpFiles) {
File tmp = new File(tmpFile);
diff --git a/tests/media/common/src/android/mediav2/common/cts/EncoderConfigParams.java b/tests/media/common/src/android/mediav2/common/cts/EncoderConfigParams.java
index dbd0aac..6893ecc 100644
--- a/tests/media/common/src/android/mediav2/common/cts/EncoderConfigParams.java
+++ b/tests/media/common/src/android/mediav2/common/cts/EncoderConfigParams.java
@@ -27,6 +27,9 @@
import androidx.annotation.NonNull;
+import java.util.HashMap;
+import java.util.Map;
+
/**
* Class to hold encoder configuration settings.
*/
@@ -56,6 +59,9 @@
public final int mCompressionLevel;
public final int mPcmEncoding;
+ // features list
+ public final Map<String, Boolean> mFeatures;
+
// common params
public final int mProfile;
public final int mBitRate;
@@ -196,6 +202,7 @@
mCompressionLevel = 5;
mPcmEncoding = AudioFormat.ENCODING_INVALID;
}
+ mFeatures = cfg.mFeatures;
mBuilder = cfg;
}
@@ -236,6 +243,9 @@
if (mStandard >= 0) mFormat.setInteger(MediaFormat.KEY_COLOR_STANDARD, mStandard);
if (mTransfer >= 0) mFormat.setInteger(MediaFormat.KEY_COLOR_TRANSFER, mTransfer);
}
+ for (Map.Entry<String, Boolean> entry : mFeatures.entrySet()) {
+ mFormat.setFeatureEnabled(entry.getKey(), entry.getValue());
+ }
return new MediaFormat(mFormat);
}
@@ -300,6 +310,13 @@
if (mStandard >= 0) mMsg.append(String.format("color standard : %d, ", mStandard));
if (mTransfer >= 0) mMsg.append(String.format("color transfer : %d, ", mTransfer));
}
+ if (!mFeatures.isEmpty()) {
+ mMsg.append(String.format("features : { "));
+ for (Map.Entry<String, Boolean> entry : mFeatures.entrySet()) {
+ mMsg.append(String.format(entry.getKey() + " : " + entry.getValue() + ", "));
+ }
+ mMsg.append(String.format("}"));
+ }
mMsg.append("\n");
return mMsg.toString();
}
@@ -327,6 +344,9 @@
public int mCompressionLevel = 5;
public int mPcmEncoding = AudioFormat.ENCODING_PCM_16BIT;
+ // feature list
+ public Map<String, Boolean> mFeatures = new HashMap<>();
+
// common params
public int mProfile = -1;
public int mBitRate = 256000;
@@ -431,13 +451,27 @@
return this;
}
+ public Builder setFeature(String feature, boolean enable) {
+ if (feature != null) {
+ this.mFeatures.put(feature, enable);
+ }
+ return this;
+ }
+
public EncoderConfigParams build() {
return new EncoderConfigParams(this);
}
@NonNull
public Builder clone() throws CloneNotSupportedException {
- return (Builder) super.clone();
+ Builder builder = (Builder) super.clone();
+ builder.mFeatures.clear();
+ for (Map.Entry<String, Boolean> entry : mFeatures.entrySet()) {
+ String feature = entry.getKey();
+ boolean enable = entry.getValue();
+ builder.mFeatures.put(feature, enable);
+ }
+ return builder;
}
}
}
diff --git a/tests/media/common/src/android/mediav2/common/cts/EncoderProfileLevelTestBase.java b/tests/media/common/src/android/mediav2/common/cts/EncoderProfileLevelTestBase.java
index 4912eed..2771ea8 100644
--- a/tests/media/common/src/android/mediav2/common/cts/EncoderProfileLevelTestBase.java
+++ b/tests/media/common/src/android/mediav2/common/cts/EncoderProfileLevelTestBase.java
@@ -577,7 +577,13 @@
protected void validateFormatForProfileAndLevelWRTCfg(MediaFormat format, String msg) {
validateProfile(mActiveEncCfg.mProfile, getProfile(format, msg), msg);
- if (mActiveEncCfg.mLevel != -1) {
+ // Few bitstream specifications (e.g. vp9, h263) do not have a place holder for level.
+ // By extension, it is acceptable if this information is not provided in csd.
+ // But, if present, it MUST be according to the test requirements.
+ if (mActiveEncCfg.mLevel != -1
+ && (format.containsKey(MediaFormat.KEY_LEVEL)
+ || (!mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_VP9)
+ && !mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_H263)))) {
validateMinLevel(mActiveEncCfg.mLevel, getLevel(format, msg), msg);
}
}
diff --git a/tests/media/common/src/android/mediav2/common/cts/HDREncoderTestBase.java b/tests/media/common/src/android/mediav2/common/cts/HDREncoderTestBase.java
index a2c1bed..a904e52 100644
--- a/tests/media/common/src/android/mediav2/common/cts/HDREncoderTestBase.java
+++ b/tests/media/common/src/android/mediav2/common/cts/HDREncoderTestBase.java
@@ -176,10 +176,15 @@
HDRDecoderTestBase decoderTest =
new HDRDecoderTestBase(decoder, mMediaType, mMuxedOutputFile, mAllTestParams);
- decoderTest.validateHDRInfo(hdrStaticInfo, hdrStaticInfo, mHdrDynamicInfo,
- mHdrDynamicInfo);
+ if (FIRST_SDK_IS_AT_LEAST_V && mHdrDynamicInfoReceived != null) {
+ mHdrDynamicInfoReceived.putAll(mHdrDynamicInfo);
+ }
+ decoderTest.validateHDRInfo(hdrStaticInfo, hdrStaticInfo,
+ FIRST_SDK_IS_AT_LEAST_V ? mHdrDynamicInfoReceived : mHdrDynamicInfo,
+ FIRST_SDK_IS_AT_LEAST_V ? mHdrDynamicInfoReceived : mHdrDynamicInfo);
if (HDR_INFO_IN_BITSTREAM_CODECS.contains(mMediaType)) {
- decoderTest.validateHDRInfo(hdrStaticInfo, null, mHdrDynamicInfo, null);
+ decoderTest.validateHDRInfo(hdrStaticInfo, null,
+ FIRST_SDK_IS_AT_LEAST_V ? mHdrDynamicInfoReceived : mHdrDynamicInfo, null);
}
}
}
diff --git a/tests/media/common/src/android/mediav2/common/cts/ImageSurface.java b/tests/media/common/src/android/mediav2/common/cts/ImageSurface.java
index 00c0a7a..341f148 100644
--- a/tests/media/common/src/android/mediav2/common/cts/ImageSurface.java
+++ b/tests/media/common/src/android/mediav2/common/cts/ImageSurface.java
@@ -49,7 +49,18 @@
private Surface mReaderSurface;
private HandlerThread mHandlerThread;
private Handler mHandler;
- private Function<Image, Boolean> mPredicate;
+ private int mImageBoundToSurfaceId;
+ private Function<ImageAndAttributes, Boolean> mPredicate;
+
+ public static class ImageAndAttributes {
+ public Image mImage;
+ public int mImageBoundToSurfaceId;
+
+ public ImageAndAttributes(Image image, int surfaceId) {
+ mImage = image;
+ mImageBoundToSurfaceId = surfaceId;
+ }
+ }
@Override
public void onImageAvailable(ImageReader reader) {
@@ -97,7 +108,7 @@
}
public void createSurface(int width, int height, int format, int maxNumImages,
- Function<Image, Boolean> predicate) {
+ int surfaceId, Function<ImageAndAttributes, Boolean> predicate) {
if (mReader != null) {
throw new RuntimeException(
"Current instance of ImageSurface already has a weak reference to some "
@@ -109,6 +120,7 @@
mReader = ImageReader.newInstance(width, height, format, maxNumImages);
mReader.setOnImageAvailableListener(this, mHandler);
mReaderSurface = mReader.getSurface();
+ mImageBoundToSurfaceId = surfaceId;
mPredicate = predicate;
Log.v(LOG_TAG, String.format(Locale.getDefault(), "Created ImageReader size (%dx%d),"
+ " format %d, maxNumImages %d", width, height, format, maxNumImages));
@@ -125,7 +137,8 @@
assertNull("onImageAvailable() generated an exception: " + e, e);
assertNotNull("received null for image", image);
if (mPredicate != null) {
- assertTrue("predicate failed on image instance", mPredicate.apply(image));
+ assertTrue("predicate failed on image instance",
+ mPredicate.apply(new ImageAndAttributes(image, mImageBoundToSurfaceId)));
}
image.close();
}
diff --git a/tests/media/common/src/android/mediav2/common/cts/VideoErrorManager.java b/tests/media/common/src/android/mediav2/common/cts/VideoErrorManager.java
index 28bd32a..dd94b8b 100644
--- a/tests/media/common/src/android/mediav2/common/cts/VideoErrorManager.java
+++ b/tests/media/common/src/android/mediav2/common/cts/VideoErrorManager.java
@@ -19,6 +19,7 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
+import android.graphics.Rect;
import android.util.Log;
import android.util.Pair;
@@ -123,27 +124,45 @@
return Pair.create(varianceSum, blocks);
}
- static double computeMSE(byte[] data0, byte[] data1, int bytesPerSample) {
+ static double computeMSE(byte[] data0, byte[] data1, int bytesPerSample, int imgWidth,
+ int imgHeight, Rect cropRect) {
assertEquals(data0.length, data1.length);
int length = data0.length / bytesPerSample;
long squareError = 0;
+ int cropLeft = 0;
+ int cropTop = 0;
+ int cropWidth = imgWidth;
+ int cropHeight = imgHeight;
+ if (cropRect != null) {
+ cropLeft = cropRect.left;
+ cropTop = cropRect.top;
+ cropWidth = cropRect.width();
+ cropHeight = cropRect.height();
+ }
if (bytesPerSample == 2) {
short[] dataA = new short[length];
ByteBuffer.wrap(data0).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(dataA);
short[] dataB = new short[length];
ByteBuffer.wrap(data1).order(ByteOrder.LITTLE_ENDIAN).asShortBuffer().get(dataB);
- for (int i = 0; i < length; i++) {
- long diff = ((int) dataA[i] & 0xffff) - ((int) dataB[i] & 0xffff);
- squareError += diff * diff;
+ for (int h = 0; h < cropHeight; h++) {
+ int offset = (cropTop + h) * imgWidth + cropLeft;
+ for (int w = 0; w < cropWidth; w++) {
+ long diff = (long) ((int) dataA[offset + w] & 0xffff) - ((int) dataB[offset + w]
+ & 0xffff);
+ squareError += diff * diff;
+ }
}
} else {
- for (int i = 0; i < length; i++) {
- int diff = ((int) data0[i] & 0xff) - ((int) data1[i] & 0xff);
- squareError += diff * diff;
+ for (int h = 0; h < cropHeight; h++) {
+ int offset = (cropTop + h) * imgWidth + cropLeft;
+ for (int w = 0; w < cropWidth; w++) {
+ int diff = ((int) data0[offset + w] & 0xff) - ((int) data1[offset + w] & 0xff);
+ squareError += diff * ((long) diff);
+ }
}
}
- return (double) squareError / length;
+ return (double) squareError / (cropWidth * cropHeight);
}
static double computePSNR(double mse, int bytesPerSample) {
@@ -174,7 +193,8 @@
refStream.seek(0);
refStream.read(yRef);
}
- double curYMSE = computeMSE(yRef, yTest, mRefYuv.mBytesPerSample);
+ double curYMSE = computeMSE(yRef, yTest, mRefYuv.mBytesPerSample, mRefYuv.mWidth,
+ mRefYuv.mHeight, null);
mGlobalMSE[0] += curYMSE;
mMinimumMSE[0] = Math.min(mMinimumMSE[0], curYMSE);
@@ -184,7 +204,8 @@
assertEquals("failed to read U Plane " + mTestYuv.mFileName
+ " contains insufficient bytes", uvSize,
testStream.read(uvTest));
- double curUMSE = computeMSE(uvRef, uvTest, mRefYuv.mBytesPerSample);
+ double curUMSE = computeMSE(uvRef, uvTest, mRefYuv.mBytesPerSample,
+ mRefYuv.mWidth / 2, mRefYuv.mHeight / 2, null);
mGlobalMSE[1] += curUMSE;
mMinimumMSE[1] = Math.min(mMinimumMSE[1], curUMSE);
@@ -194,7 +215,8 @@
assertEquals("failed to read V Plane " + mTestYuv.mFileName
+ " contains insufficient bytes", uvSize,
testStream.read(uvTest));
- double curVMSE = computeMSE(uvRef, uvTest, mRefYuv.mBytesPerSample);
+ double curVMSE = computeMSE(uvRef, uvTest, mRefYuv.mBytesPerSample,
+ mRefYuv.mWidth / 2, mRefYuv.mHeight / 2, null);
mGlobalMSE[2] += curVMSE;
mMinimumMSE[2] = Math.min(mMinimumMSE[2], curVMSE);
diff --git a/tests/media/src/android/mediav2/cts/AdaptivePlaybackTest.java b/tests/media/src/android/mediav2/cts/AdaptivePlaybackTest.java
index 6d0a33a..c7011aa 100644
--- a/tests/media/src/android/mediav2/cts/AdaptivePlaybackTest.java
+++ b/tests/media/src/android/mediav2/cts/AdaptivePlaybackTest.java
@@ -181,7 +181,49 @@
"cosmat_800x640_24fps_768kbps_av1_10bit.mkv"}, CODEC_ALL},
}));
}
- return prepareParamList(exhaustiveArgsList, isEncoder, needAudio, needVideo, false);
+ List<Object[]> argsList = prepareParamList(exhaustiveArgsList, isEncoder, needAudio,
+ needVideo, false);
+ if (android.media.codec.Flags.dynamicColorAspects()) {
+ List<Object[]> dynamicColorAspectsArgs = Arrays.asList(new Object[][]{
+ {MediaFormat.MIMETYPE_VIDEO_AVC, new String[]{
+ "bbb_640x360_512kbps_30fps_avc_nob.mp4",
+ "cosmat_1280x720_24fps_crf22_avc_10bit_nob.mkv",
+ "bbb_800x640_768kbps_30fps_avc_nob.mp4",
+ "cosmat_640x360_24fps_crf22_avc_10bit_nob.mkv",
+ "bbb_1280x720_1mbps_30fps_avc_2b.mp4",
+ "cosmat_800x640_24fps_crf22_avc_10bit_2b.mkv"}, CODEC_OPTIONAL},
+ {MediaFormat.MIMETYPE_VIDEO_HEVC, new String[]{
+ "bbb_640x360_512kbps_30fps_hevc_nob.mp4",
+ "cosmat_1280x720_24fps_crf22_hevc_10bit_nob.mkv",
+ "cosmat_352x288_hdr10_only_stream_hevc.mkv",
+ "bbb_800x640_768kbps_30fps_hevc_nob.mp4",
+ "cosmat_640x360_24fps_crf22_hevc_10bit_2b.mkv",
+ "bbb_1280x720_1mbps_30fps_hevc_2b.mp4",
+ "cosmat_352x288_hdr10plus_hevc.mp4",
+ "cosmat_800x640_24fps_crf22_hevc_10bit_nob.mkv"}, CODEC_OPTIONAL},
+ {MediaFormat.MIMETYPE_VIDEO_VP9, new String[]{
+ "bbb_640x360_512kbps_30fps_vp9.webm",
+ "cosmat_1280x720_24fps_crf22_vp9_10bit.mkv",
+ "cosmat_352x288_hdr10_only_container_vp9.mkv",
+ "bbb_800x640_768kbps_30fps_vp9.webm",
+ "cosmat_640x360_24fps_crf22_vp9_10bit.mkv",
+ "bbb_1280x720_1mbps_30fps_vp9.webm",
+ "cosmat_800x640_24fps_crf22_vp9_10bit.mkv"}, CODEC_OPTIONAL},
+ {MediaFormat.MIMETYPE_VIDEO_AV1, new String[]{
+ "bbb_640x360_512kbps_30fps_av1.webm",
+ "cosmat_1280x720_24fps_1200kbps_av1_10bit.mkv",
+ "cosmat_352x288_hdr10_stream_and_container_correct_av1.mkv",
+ "bbb_800x640_768kbps_30fps_av1.webm",
+ "cosmat_640x360_24fps_512kbps_av1_10bit.mkv",
+ "bbb_1280x720_1mbps_30fps_av1.webm",
+ "cosmat_352x288_hdr10plus_av1.mkv",
+ "cosmat_800x640_24fps_768kbps_av1_10bit.mkv"}, CODEC_OPTIONAL},
+ });
+ argsList.addAll(prepareParamList(dynamicColorAspectsArgs, isEncoder, needAudio,
+ needVideo, false /* mustTestAllCodecs */, ComponentClass.ALL,
+ new String[]{MediaCodecInfo.CodecCapabilities.FEATURE_DynamicColorAspects}));
+ }
+ return argsList;
}
@Override
@@ -196,6 +238,16 @@
mCodec.releaseOutputBuffer(bufferIndex, mSurface != null);
}
+ private ArrayList<String> getSupportedFiles(List<MediaFormat> formats) {
+ ArrayList<String> supportedClips = new ArrayList<>();
+ for (int i = 0; i < formats.size(); i++) {
+ if (isFormatSupported(mCodecName, mMediaType, formats.get(i))) {
+ supportedClips.add(mSrcFiles[i]);
+ }
+ }
+ return supportedClips;
+ }
+
private MediaFormat createInputList(MediaFormat format, ByteBuffer buffer,
ArrayList<MediaCodec.BufferInfo> list, int offset, long ptsOffset) {
if (hasCSD(format)) {
@@ -245,7 +297,8 @@
* Test video decoder for seamless resolution changes.
*/
@CddTest(requirement = "5.3/C-1-1")
- @ApiTest(apis = "android.media.MediaCodecInfo.CodecCapabilities#FEATURE_AdaptivePlayback")
+ @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_AdaptivePlayback",
+ "android.media.MediaCodecInfo.CodecCapabilities#FEATURE_DynamicColorAspects"})
@LargeTest
@Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
public void testAdaptivePlayback() throws IOException, InterruptedException {
@@ -263,18 +316,26 @@
formats.add(setUpSource(MEDIA_DIR + file));
mExtractor.release();
}
- checkFormatSupport(mCodecName, mMediaType, false, formats, null, mSupportRequirements);
+ ArrayList<String> resFiles;
+ if (mSupportRequirements.equals(CODEC_ALL)) {
+ checkFormatSupport(mCodecName, mMediaType, false, formats, null, mSupportRequirements);
+ resFiles = new ArrayList<>(Arrays.asList(mSrcFiles));
+ } else {
+ resFiles = getSupportedFiles(formats);
+ }
+ Assume.assumeTrue("none of the given test clips are supported by the codec: "
+ + mCodecName, !resFiles.isEmpty());
formats.clear();
int totalSize = 0;
- for (String srcFile : mSrcFiles) {
- File file = new File(MEDIA_DIR + srcFile);
+ for (String resFile : resFiles) {
+ File file = new File(MEDIA_DIR + resFile);
totalSize += (int) file.length();
}
long ptsOffset = 0;
int buffOffset = 0;
ArrayList<MediaCodec.BufferInfo> list = new ArrayList<>();
ByteBuffer buffer = ByteBuffer.allocate(totalSize);
- for (String file : mSrcFiles) {
+ for (String file : resFiles) {
formats.add(createInputList(setUpSource(MEDIA_DIR + file), buffer, list, buffOffset,
ptsOffset));
mExtractor.release();
diff --git a/tests/media/src/android/mediav2/cts/AudioEncoderTest.java b/tests/media/src/android/mediav2/cts/AudioEncoderTest.java
index b187cc8..33a2fdf 100644
--- a/tests/media/src/android/mediav2/cts/AudioEncoderTest.java
+++ b/tests/media/src/android/mediav2/cts/AudioEncoderTest.java
@@ -81,7 +81,7 @@
return foreman.build();
}
- private static List<Object[]> flattenParams(List<Object[]> params) {
+ protected static List<Object[]> flattenParams(List<Object[]> params) {
List<Object[]> argsList = new ArrayList<>();
for (Object[] param : params) {
String mediaType = (String) param[0];
diff --git a/tests/media/src/android/mediav2/cts/Av1FilmGrainValidationTest.java b/tests/media/src/android/mediav2/cts/Av1FilmGrainValidationTest.java
index f096422..a142204 100644
--- a/tests/media/src/android/mediav2/cts/Av1FilmGrainValidationTest.java
+++ b/tests/media/src/android/mediav2/cts/Av1FilmGrainValidationTest.java
@@ -165,7 +165,7 @@
public void testAv1FilmGrainRequirement() throws Exception {
MediaFormat format = setUpSource(mTestFile);
mImageSurface = new ImageSurface();
- setUpSurface(getWidth(format), getHeight(format), ImageFormat.YUV_420_888, 1, null);
+ setUpSurface(getWidth(format), getHeight(format), ImageFormat.YUV_420_888, 1, 0, null);
mOutputBuff = new OutputManager();
mCodec = MediaCodec.createByCodecName(mCodecName);
configureCodec(format, true, true, false);
diff --git a/tests/media/src/android/mediav2/cts/CodecDecoderBlockModelMultiAccessUnitTest.java b/tests/media/src/android/mediav2/cts/CodecDecoderBlockModelMultiAccessUnitTest.java
index 7d2fa50..771f222 100644
--- a/tests/media/src/android/mediav2/cts/CodecDecoderBlockModelMultiAccessUnitTest.java
+++ b/tests/media/src/android/mediav2/cts/CodecDecoderBlockModelMultiAccessUnitTest.java
@@ -40,6 +40,7 @@
import com.android.media.codec.flags.Flags;
import org.junit.Before;
+import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@@ -411,6 +412,105 @@
}
/**
+ * Verifies component and framework behaviour to flush API when the codec is operating in
+ * multiple frame block model mode. The test verifies if the component / framework output
+ * is consistent with single access unit normal mode.
+ * <p>
+ * While the component is decoding the test clip, mediacodec flush() is called. The flush API
+ * is called at various points :-
+ * <ul>
+ * <li>In running state, after queueing n frames.</li>
+ * <li>In eos state.</li>
+ * </ul>
+ * <p>
+ * In all situations (pre-flush or post-flush), the test expects the output timestamps to be
+ * strictly increasing. The flush call makes the output received non-deterministic even for a
+ * given input. Hence, besides timestamp checks, no additional validation is done for outputs
+ * received before flush. Post flush, the decode begins from a sync frame. So the test
+ * expects consistent output and this needs to be identical to the reference
+ * (single access unit mode)
+ * <p>
+ */
+ @ApiTest(apis = {"android.media.MediaFormat#KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE",
+ "android.media.MediaFormat#KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE",
+ "android.media.MediaCodec.Callback#onOutputBuffersAvailable",
+ "android.media.MediaCodec#flush"})
+ @LargeTest
+ @Ignore("TODO(b/147576107)")
+ @Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
+ public void testFlush() throws IOException, InterruptedException {
+ assumeTrue(mCodecName + " does not support FEATURE_MultipleFrames",
+ isFeatureSupported(mCodecName, mMediaType, FEATURE_MultipleFrames));
+
+ MediaFormat format = setUpSource(mTestFile);
+ final long pts = 250000;
+ mExtractor.release();
+ OutputManager ref = null, test;
+ if (isMediaTypeOutputUnAffectedBySeek(mMediaType)) {
+ CodecDecoderTestBase cdtb = new CodecDecoderTestBase(mCodecName, mMediaType, null,
+ mAllTestParams);
+ cdtb.decodeToMemory(mTestFile, mCodecName, pts, MediaExtractor.SEEK_TO_CLOSEST_SYNC,
+ Integer.MAX_VALUE);
+ ref = cdtb.getOutputManager();
+ test = new OutputManager(ref.getSharedErrorLogs());
+ } else {
+ test = new OutputManager();
+ }
+
+ mOutputBuff = test;
+ setUpSource(mTestFile);
+ int maxSampleSize = getMaxSampleSizeForMediaType(mTestFile, mMediaType);
+ configureKeysForLargeAudioBlockModelFrameMode(format, maxSampleSize, OUT_SIZE_IN_MS[0][0],
+ OUT_SIZE_IN_MS[0][1]);
+ mMaxInputLimitMs = OUT_SIZE_IN_MS[0][0];
+ mCodec = MediaCodec.createByCodecName(mCodecName);
+ test.reset();
+ mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+ configureCodec(format, true, true, false);
+
+ mCodec.start();
+ mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+ test.reset();
+ doWork(23);
+ if (!test.isPtsStrictlyIncreasing(mPrevOutputPts)) {
+ fail("Output timestamps are not strictly increasing \n" + mTestConfig + mTestEnv
+ + test.getErrMsg());
+ }
+
+ /* test flush in running state */
+ flushCodec();
+ mCodec.start();
+ mSaveToMem = true;
+ test.reset();
+ mExtractor.seekTo(pts, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+ doWork(Integer.MAX_VALUE);
+ queueEOS();
+ waitForAllOutputs();
+ if (ref != null && !ref.equalsByteOutput(test)) {
+ fail("Decoder output is not consistent across runs \n" + mTestConfig + mTestEnv
+ + test.getErrMsg());
+ }
+
+ /* test flush in eos state */
+ flushCodec();
+ mCodec.start();
+ test.reset();
+ mExtractor.seekTo(pts, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+ doWork(Integer.MAX_VALUE);
+ queueEOS();
+ waitForAllOutputs();
+ mCodec.stop();
+ if (ref != null && !ref.equalsByteOutput(test)) {
+ fail("Decoder output is not consistent across runs \n" + mTestConfig + mTestEnv
+ + test.getErrMsg());
+ }
+
+ mSaveToMem = false;
+ mCodec.release();
+ mExtractor.release();
+ }
+
+ /**
* Verifies component and framework behaviour for format change in multiple frame block model
* mode. The format change is not seamless (AdaptivePlayback) but done via reconfigure.
* <p>
diff --git a/tests/media/src/android/mediav2/cts/CodecDecoderDetachedSurfaceTest.java b/tests/media/src/android/mediav2/cts/CodecDecoderDetachedSurfaceTest.java
new file mode 100644
index 0000000..b59f60a
--- /dev/null
+++ b/tests/media/src/android/mediav2/cts/CodecDecoderDetachedSurfaceTest.java
@@ -0,0 +1,503 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediav2.cts;
+
+import static android.media.codec.Flags.FLAG_NULL_OUTPUT_SURFACE;
+import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUVP010;
+
+import static org.junit.Assert.assertArrayEquals;
+import static org.junit.Assert.fail;
+
+import android.graphics.ImageFormat;
+import android.media.Image;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.mediav2.common.cts.CodecDecoderTestBase;
+import android.mediav2.common.cts.ImageSurface;
+import android.mediav2.common.cts.OutputManager;
+import android.os.Build;
+import android.platform.test.annotations.AppModeFull;
+import android.platform.test.annotations.RequiresFlagsEnabled;
+import android.util.Log;
+import android.util.Pair;
+import android.view.Surface;
+
+import androidx.test.filters.LargeTest;
+import androidx.test.filters.SdkSuppress;
+
+import com.android.compatibility.common.util.ApiTest;
+
+import org.junit.After;
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.Locale;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * Test mediacodec api, video decoders and their interactions in surface mode.
+ * <p>
+ * When video decoders are configured in surface mode, the getOutputImage() returns null. So
+ * there is no way to validate the decoded output frame analytically. The tests in this class
+ * however ensures that,
+ * <ul>
+ * <li> The number of decoded frames are equal to the number of input frames.</li>
+ * <li> The output timestamp list is same as the input timestamp list.</li>
+ * </ul>
+ * <p>
+ * The test verifies all the above needs by running mediacodec in both sync and async mode.
+ */
+@AppModeFull(reason = "Instant apps cannot access the SD card")
+@RunWith(Parameterized.class)
+public class CodecDecoderDetachedSurfaceTest extends CodecDecoderTestBase {
+ private static final String LOG_TAG = CodecDecoderDetachedSurfaceTest.class.getSimpleName();
+ private static final String MEDIA_DIR = WorkDir.getMediaDirString();
+ private static final int MAX_ACTIVE_SURFACES = 4;
+ private static final long WAIT_FOR_IMAGE_TIMEOUT_MS = 5;
+ private static final int[] BURST_LENGTHS = new int[]{25, 19, 13, 5};
+
+ private final int mBurstLength;
+
+ private int mOutputCountInBursts;
+ // current tests decode in burst mode. This field maintains the number of frames
+ // decoded in a single burst session
+ private final Lock mLock = new ReentrantLock();
+ private final int[] mFramesRendered = new int[MAX_ACTIVE_SURFACES];
+ // total frames rendered on to output surface
+ private final int[] mFramesRenderedExpected = new int[MAX_ACTIVE_SURFACES];
+ // exp number of frames to be rendered on output surface
+ private boolean mSurfaceAttached = true;
+ private int mAttachedSurfaceId;
+ // are display surface and codec configured surface same
+ private final ArrayList<ImageSurface> mImageSurfaces = new ArrayList<>();
+ private final ArrayList<Surface> mSurfaces = new ArrayList<>();
+
+ public CodecDecoderDetachedSurfaceTest(String decoder, String mediaType, String testFile,
+ int burstLength, String allTestParams) {
+ super(decoder, mediaType, MEDIA_DIR + testFile, allTestParams);
+ mBurstLength = burstLength;
+ }
+
+ @Parameterized.Parameters(name = "{index}_{0}_{1}_{3}")
+ public static Collection<Object[]> input() {
+ final boolean isEncoder = false;
+ final boolean needAudio = false;
+ final boolean needVideo = true;
+ // mediaType, test file
+ final List<Object[]> exhaustiveArgsList = new ArrayList<>();
+ final List<Object[]> args = new ArrayList<>(Arrays.asList(new Object[][]{
+ {MediaFormat.MIMETYPE_VIDEO_MPEG2, "bbb_340x280_768kbps_30fps_mpeg2.mp4"},
+ {MediaFormat.MIMETYPE_VIDEO_AVC, "bbb_340x280_768kbps_30fps_avc.mp4"},
+ {MediaFormat.MIMETYPE_VIDEO_HEVC, "bbb_520x390_1mbps_30fps_hevc.mp4"},
+ {MediaFormat.MIMETYPE_VIDEO_MPEG4, "bbb_128x96_64kbps_12fps_mpeg4.mp4"},
+ {MediaFormat.MIMETYPE_VIDEO_H263, "bbb_176x144_128kbps_15fps_h263.3gp"},
+ {MediaFormat.MIMETYPE_VIDEO_VP8, "bbb_340x280_768kbps_30fps_vp8.webm"},
+ {MediaFormat.MIMETYPE_VIDEO_VP9, "bbb_340x280_768kbps_30fps_vp9.webm"},
+ {MediaFormat.MIMETYPE_VIDEO_AV1, "bbb_340x280_768kbps_30fps_av1.mp4"},
+ {MediaFormat.MIMETYPE_VIDEO_AV1,
+ "bikes_qcif_color_bt2020_smpte2086Hlg_bt2020Ncl_fr_av1.mp4"},
+ }));
+ // P010 support was added in Android T, hence limit the following tests to Android T and
+ // above
+ if (IS_AT_LEAST_T) {
+ args.addAll(Arrays.asList(new Object[][]{
+ {MediaFormat.MIMETYPE_VIDEO_AVC, "cosmat_520x390_24fps_crf22_avc_10bit.mkv"},
+ {MediaFormat.MIMETYPE_VIDEO_HEVC, "cosmat_520x390_24fps_crf22_hevc_10bit.mkv"},
+ {MediaFormat.MIMETYPE_VIDEO_VP9, "cosmat_520x390_24fps_crf22_vp9_10bit.mkv"},
+ {MediaFormat.MIMETYPE_VIDEO_AV1, "cosmat_520x390_24fps_768kbps_av1_10bit.mkv"},
+ }));
+ }
+ for (Object[] arg : args) {
+ for (int burstLength : BURST_LENGTHS) {
+ Object[] testArgs = new Object[arg.length + 1];
+ System.arraycopy(arg, 0, testArgs, 0, arg.length);
+ testArgs[arg.length] = burstLength;
+ exhaustiveArgsList.add(testArgs);
+ }
+ }
+ return prepareParamList(exhaustiveArgsList, isEncoder, needAudio, needVideo, false);
+ }
+
+ @Before
+ public void setUp() throws IOException, InterruptedException {
+ MediaFormat format = setUpSource(mTestFile);
+ mExtractor.release();
+ ArrayList<MediaFormat> formatList = new ArrayList<>();
+ formatList.add(format);
+ checkFormatSupport(mCodecName, mMediaType, false, formatList, null,
+ SupportClass.CODEC_OPTIONAL);
+ int width = getWidth(format);
+ int height = getHeight(format);
+ int colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
+ for (int i = 0; i < MAX_ACTIVE_SURFACES; i++) {
+ ImageSurface sf = new ImageSurface();
+ sf.createSurface(width, height,
+ colorFormat == COLOR_FormatYUVP010 ? ImageFormat.YCBCR_P010 :
+ ImageFormat.YUV_420_888, 5, i, this::onFrameReceived);
+ mImageSurfaces.add(sf);
+ mSurfaces.add(sf.getSurface());
+ }
+ }
+
+ @After
+ public void tearDown() {
+ mSurfaces.clear();
+ for (ImageSurface imgSurface : mImageSurfaces) {
+ imgSurface.release();
+ }
+ mImageSurfaces.clear();
+ }
+
+ @Override
+ protected void resetContext(boolean isAsync, boolean signalEOSWithLastFrame) {
+ super.resetContext(isAsync, signalEOSWithLastFrame);
+ mOutputCountInBursts = 0;
+ Arrays.fill(mFramesRendered, 0);
+ Arrays.fill(mFramesRenderedExpected, 0);
+ }
+
+ @Override
+ protected void doWork(int frameLimit) throws InterruptedException, IOException {
+ if (mIsCodecInAsyncMode) {
+ // dequeue output after inputEOS is expected to be done in waitForAllOutputs()
+ while (!mAsyncHandle.hasSeenError() && !mSawInputEOS
+ && mOutputCountInBursts < frameLimit) {
+ Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandle.getWork();
+ if (element != null) {
+ int bufferID = element.first;
+ MediaCodec.BufferInfo info = element.second;
+ if (info != null) {
+ // <id, info> corresponds to output callback. Handle it accordingly
+ dequeueOutput(bufferID, info);
+ } else {
+ // <id, null> corresponds to input callback. Handle it accordingly
+ enqueueInput(bufferID);
+ }
+ }
+ }
+ } else {
+ MediaCodec.BufferInfo outInfo = new MediaCodec.BufferInfo();
+ // dequeue output after inputEOS is expected to be done in waitForAllOutputs()
+ while (!mSawInputEOS && mOutputCountInBursts < frameLimit) {
+ int outputBufferId = mCodec.dequeueOutputBuffer(outInfo, Q_DEQ_TIMEOUT_US);
+ if (outputBufferId >= 0) {
+ dequeueOutput(outputBufferId, outInfo);
+ } else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
+ mOutFormat = mCodec.getOutputFormat();
+ mSignalledOutFormatChanged = true;
+ }
+ int inputBufferId = mCodec.dequeueInputBuffer(Q_DEQ_TIMEOUT_US);
+ if (inputBufferId != -1) {
+ enqueueInput(inputBufferId);
+ }
+ }
+ }
+ }
+
+ @Override
+ protected void dequeueOutput(int bufferIndex, MediaCodec.BufferInfo info) {
+ if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ mSawOutputEOS = true;
+ }
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "output: id: " + bufferIndex + " flags: " + info.flags + " size: "
+ + info.size + " timestamp: " + info.presentationTimeUs);
+ }
+ if (info.size > 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
+ mOutputBuff.saveOutPTS(info.presentationTimeUs);
+ mOutputCount++;
+ mOutputCountInBursts++;
+ if (mSurfaceAttached) mFramesRenderedExpected[mAttachedSurfaceId]++;
+ }
+ mCodec.releaseOutputBuffer(bufferIndex, mSurface != null);
+ if (info.size > 0) {
+ getAllImagesInRenderQueue();
+ }
+ }
+
+ private boolean onFrameReceived(ImageSurface.ImageAndAttributes obj) {
+ if (obj.mImage != null) {
+ mLock.lock();
+ try {
+ mFramesRendered[obj.mImageBoundToSurfaceId] += 1;
+ } finally {
+ mLock.unlock();
+ }
+ }
+ return true;
+ }
+
+ private void getAllImagesInRenderQueue() {
+ for (int i = 0; i < mImageSurfaces.size(); i++) {
+ boolean hasImage;
+ do {
+ try (Image image = mImageSurfaces.get(i).getImage(WAIT_FOR_IMAGE_TIMEOUT_MS)) {
+ onFrameReceived(new ImageSurface.ImageAndAttributes(image, i));
+ hasImage = image != null;
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+ } while(hasImage);
+ }
+ }
+
+ /**
+ * At the start of the test #MAX_ACTIVE_SURFACES number of surfaces are instantiated. The
+ * first surface is used for codec configuration. After decoding/rendering 'n' frames,
+ * the output surface associated with codec session is switched using the api
+ * MediaCodec#setOutputSurface. This is continued till end of sequence. The test checks if
+ * the number of frames rendered to each surface is as expected.
+ */
+ @ApiTest(apis = {"android.media.MediaCodec#setOutputSurface"})
+ @LargeTest
+ @Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
+ public void testSetOutputSurface() throws IOException, InterruptedException {
+ boolean[] boolStates = {true, false};
+ final long pts = 0;
+ final int mode = MediaExtractor.SEEK_TO_CLOSEST_SYNC;
+ MediaFormat format = setUpSource(mTestFile);
+ mCodec = MediaCodec.createByCodecName(mCodecName);
+ mOutputBuff = new OutputManager();
+ for (boolean isAsync : boolStates) {
+ mImageSurface = mImageSurfaces.get(0); // use first surface instance for configuration
+ mSurface = mSurfaces.get(0);
+ mOutputBuff.reset();
+ mExtractor.seekTo(pts, mode);
+ configureCodec(format, isAsync, isAsync /* use crypto configure api */,
+ false /* isEncoder */);
+ mCodec.start();
+ int surfaceId = MAX_ACTIVE_SURFACES - 1;
+ while (!mSawInputEOS) {
+ mOutputCountInBursts = 0;
+ mCodec.setOutputSurface(mSurfaces.get(surfaceId)); // switch surface periodically
+ mImageSurface = mImageSurfaces.get(surfaceId);
+ mSurface = mSurfaces.get(surfaceId);
+ mAttachedSurfaceId = surfaceId;
+ doWork(mBurstLength);
+ getAllImagesInRenderQueue();
+ surfaceId += 1;
+ surfaceId = surfaceId % MAX_ACTIVE_SURFACES;
+ }
+ queueEOS();
+ waitForAllOutputs();
+ endCodecSession(mCodec);
+ getAllImagesInRenderQueue();
+ assertArrayEquals(String.format(Locale.getDefault(),
+ "Number of frames rendered to output surface are not as expected."
+ + " Exp / got : %s / %s \n",
+ Arrays.toString(mFramesRenderedExpected), Arrays.toString(mFramesRendered))
+ + mTestConfig + mTestEnv, mFramesRenderedExpected, mFramesRendered);
+ }
+ mCodec.release();
+ mExtractor.release();
+ }
+
+ /**
+ * At the start of the test #MAX_ACTIVE_SURFACES number of surfaces are instantiated. The
+ * codec is configured with flag CONFIGURE_FLAG_DETACHED_SURFACE. At the start of the decode
+ * a surface is attached to the component using MediaCodec#setOutputSurface. After
+ * decoding/rendering 'n' frames, the output surface is detached using the api
+ * MediaCodec#detachSurface. After decoding/rendering 'n' frames, a new surface is attached.
+ * This is continued till end of sequence. The test checks if the number of frames rendered
+ * to each surface at the end of session is as expected.
+ */
+ @SdkSuppress(minSdkVersion = Build.VERSION_CODES.VANILLA_ICE_CREAM, codeName =
+ "VanillaIceCream")
+ @RequiresFlagsEnabled(FLAG_NULL_OUTPUT_SURFACE)
+ @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_DetachedSurface",
+ "android.media.MediaCodec#detachOutputSurface",
+ "android.media.MediaCodec#CONFIGURE_FLAG_DETACHED_SURFACE"})
+ @LargeTest
+ @Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
+ public void testFeatureDetachedSurface() throws IOException, InterruptedException {
+ Assume.assumeTrue("codec: " + mCodecName + " does not support FEATURE_DetachedSurface",
+ isFeatureSupported(mCodecName, mMediaType,
+ MediaCodecInfo.CodecCapabilities.FEATURE_DetachedSurface));
+ boolean[] boolStates = {true, false};
+ final long pts = 0;
+ final int mode = MediaExtractor.SEEK_TO_CLOSEST_SYNC;
+ MediaFormat format = setUpSource(mTestFile);
+ mCodec = MediaCodec.createByCodecName(mCodecName);
+ mOutputBuff = new OutputManager();
+ for (boolean isAsync : boolStates) {
+ mOutputBuff.reset();
+ mSurface = null;
+ mExtractor.seekTo(pts, mode);
+ configureCodec(format, isAsync, isAsync /* use crypto configure api */,
+ false /* isEncoder */, MediaCodec.CONFIGURE_FLAG_DETACHED_SURFACE);
+ mCodec.start();
+ boolean attachSurface = true;
+ int surfaceId = 0;
+ while (!mSawInputEOS) {
+ mOutputCountInBursts = 0;
+ if (attachSurface) {
+ mCodec.setOutputSurface(mSurfaces.get(surfaceId));
+ mImageSurface = mImageSurfaces.get(surfaceId);
+ mSurface = mSurfaces.get(surfaceId);
+ mSurfaceAttached = true;
+ mAttachedSurfaceId = surfaceId;
+ surfaceId += 1;
+ surfaceId = surfaceId % MAX_ACTIVE_SURFACES;
+ } else {
+ mCodec.detachOutputSurface();
+ mSurfaceAttached = false;
+ }
+ attachSurface = !attachSurface;
+ doWork(mBurstLength);
+ getAllImagesInRenderQueue();
+ }
+ queueEOS();
+ waitForAllOutputs();
+ endCodecSession(mCodec);
+ getAllImagesInRenderQueue();
+ assertArrayEquals(String.format(Locale.getDefault(),
+ "Number of frames rendered to output surface are not as expected."
+ + " Exp / got : %s / %s \n",
+ Arrays.toString(mFramesRenderedExpected), Arrays.toString(mFramesRendered))
+ + mTestConfig + mTestEnv, mFramesRenderedExpected, mFramesRendered);
+ }
+ mCodec.release();
+ mExtractor.release();
+ }
+
+ /**
+ * If the component does not support FEATURE_DetachedSurface the test checks if passing the
+ * flag CONFIGURE_FLAG_DETACHED_SURFACE during configure throws an exception. Also, in normal
+ * running state, call to detachOutputSurface() must throw exception. Vice versa, if the
+ * component supports FEATURE_DetachedSurface, flag CONFIGURE_FLAG_DETACHED_SURFACE and
+ * detachOutputSurface() must work as documented. Additionally, after detaching output
+ * surface, the application releases the surface and expects normal decode functionality.
+ */
+ @SdkSuppress(minSdkVersion = Build.VERSION_CODES.VANILLA_ICE_CREAM, codeName =
+ "VanillaIceCream")
+ @RequiresFlagsEnabled(FLAG_NULL_OUTPUT_SURFACE)
+ @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_DetachedSurface",
+ "android.media.MediaCodec#CONFIGURE_FLAG_DETACHED_SURFACE"})
+ @LargeTest
+ @Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
+ public void testDetachOutputSurface() throws IOException, InterruptedException {
+ boolean hasSupport = isFeatureSupported(mCodecName, mMediaType,
+ MediaCodecInfo.CodecCapabilities.FEATURE_DetachedSurface);
+ boolean[] boolStates = {true, false};
+ final long pts = 0;
+ final int mode = MediaExtractor.SEEK_TO_CLOSEST_SYNC;
+ MediaFormat format = setUpSource(mTestFile);
+ mCodec = MediaCodec.createByCodecName(mCodecName);
+ mOutputBuff = new OutputManager();
+ for (boolean isAsync : boolStates) {
+ mOutputBuff.reset();
+ mSurface = null;
+ mExtractor.seekTo(pts, mode);
+ if (hasSupport) {
+ try {
+ configureCodec(format, isAsync, isAsync /* use crypto configure api */,
+ false /* isEncoder */, MediaCodec.CONFIGURE_FLAG_DETACHED_SURFACE);
+ } catch (IllegalArgumentException e) {
+ fail(mCodecName + " advertises support for feature: FEATURE_DetachedSurface but"
+ + " configuration fails with MediaCodec"
+ + ".CONFIGURE_FLAG_DETACHED_SURFACE \n" + mTestConfig + mTestEnv);
+ }
+ mCodec.start();
+
+ // attach a surface and decode few frames
+ int surfaceId = 0;
+ mOutputCountInBursts = 0;
+ mCodec.setOutputSurface(mSurfaces.get(surfaceId));
+ mImageSurface = mImageSurfaces.get(surfaceId);
+ mSurface = mSurfaces.get(surfaceId);
+ mSurfaceAttached = true;
+ mAttachedSurfaceId = surfaceId;
+ doWork(mBurstLength); // decode
+ getAllImagesInRenderQueue();
+
+ // detach surface and release it
+ try {
+ mCodec.detachOutputSurface();
+ } catch (IllegalStateException e) {
+ fail(mCodecName + " advertises support for feature: FEATURE_DetachedSurface but"
+ + " detachOutputSurface() fails with " + e + "\n" + mTestConfig
+ + mTestEnv);
+ }
+ mImageSurfaces.get(surfaceId).release();
+ mImageSurfaces.remove(surfaceId);
+ mSurfaces.remove(surfaceId);
+
+ // decode few frames without attaching surface
+ mOutputCountInBursts = 0;
+ mSurfaceAttached = false;
+ doWork(mBurstLength);
+ getAllImagesInRenderQueue();
+
+ // attach new surface and decode few frames
+ mOutputCountInBursts = 0;
+ mCodec.setOutputSurface(mSurfaces.get(surfaceId));
+ mImageSurface = mImageSurfaces.get(surfaceId);
+ mSurface = mSurfaces.get(surfaceId);
+ mSurfaceAttached = true;
+ mAttachedSurfaceId = surfaceId;
+ doWork(mBurstLength);
+ getAllImagesInRenderQueue();
+ } else {
+ try {
+ configureCodec(format, isAsync, isAsync /* use crypto configure api */,
+ false /* isEncoder */, MediaCodec.CONFIGURE_FLAG_DETACHED_SURFACE);
+ fail(mCodecName + " does not advertise support for feature:"
+ + " FEATURE_DetachedSurface but configuration succeeds with MediaCodec"
+ + ".CONFIGURE_FLAG_DETACHED_SURFACE \n" + mTestConfig + mTestEnv);
+ } catch (IllegalArgumentException ignored) {
+ }
+ mImageSurface = mImageSurfaces.get(0); // use first instance for configuration
+ mSurface = mSurfaces.get(0);
+ configureCodec(format, isAsync, isAsync /* use crypto configure api */,
+ false /* isEncoder */);
+
+ mCodec.start();
+ mOutputCountInBursts = 0;
+ doWork(mBurstLength);
+ getAllImagesInRenderQueue();
+ try {
+ mCodec.detachOutputSurface();
+ fail(mCodecName + " has no support for feature: FEATURE_DetachedSurface but"
+ + " detachOutputSurface() succeeds \n" + mTestConfig + mTestEnv);
+ } catch (IllegalStateException ignored) {
+ }
+ }
+ queueEOS();
+ waitForAllOutputs();
+ endCodecSession(mCodec);
+ getAllImagesInRenderQueue();
+ assertArrayEquals(String.format(Locale.getDefault(),
+ "Number of frames rendered to output surface are not as expected."
+ + " Exp / got : %s / %s \n",
+ Arrays.toString(mFramesRenderedExpected), Arrays.toString(mFramesRendered))
+ + mTestConfig + mTestEnv, mFramesRenderedExpected, mFramesRendered);
+ }
+ mCodec.release();
+ mExtractor.release();
+ }
+}
diff --git a/tests/media/src/android/mediav2/cts/CodecDecoderMultiAccessUnitTest.java b/tests/media/src/android/mediav2/cts/CodecDecoderMultiAccessUnitTest.java
index eb4e07b..3dae33f 100644
--- a/tests/media/src/android/mediav2/cts/CodecDecoderMultiAccessUnitTest.java
+++ b/tests/media/src/android/mediav2/cts/CodecDecoderMultiAccessUnitTest.java
@@ -39,6 +39,7 @@
import com.android.media.codec.flags.Flags;
import org.junit.Before;
+import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@@ -410,6 +411,123 @@
}
/**
+ * Verifies component and framework behaviour to flush API when the codec is operating in
+ * multiple frame mode.
+ * <p>
+ * While the component is decoding the test clip, mediacodec flush() is called. The flush API
+ * is called at various points :-
+ * <ul>
+ * <li>In running state but before queueing any input (might have to resubmit csd as they
+ * may not have been processed).</li>
+ * <li>In running state, after queueing 1 frame.</li>
+ * <li>In running state, after queueing n frames.</li>
+ * <li>In eos state.</li>
+ * </ul>
+ * <p>
+ * In all situations (pre-flush or post-flush), the test expects the output timestamps to be
+ * strictly increasing. The flush call makes the output received non-deterministic even for a
+ * given input. Hence, besides timestamp checks, no additional validation is done for outputs
+ * received before flush. Post flush, the decode begins from a sync frame. So the test
+ * expects consistent output and this needs to be identical to the reference
+ * (single access unit mode)
+ * <p>
+ */
+ @ApiTest(apis = {"android.media.MediaFormat#KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE",
+ "android.media.MediaFormat#KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE",
+ "android.media.MediaCodec.Callback#onOutputBuffersAvailable",
+ "android.media.MediaCodec#flush"})
+ @LargeTest
+ @Ignore("TODO(b/147576107)")
+ @Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
+ public void testFlush() throws IOException, InterruptedException {
+ assumeTrue(mCodecName + " does not support FEATURE_MultipleFrames",
+ isFeatureSupported(mCodecName, mMediaType, FEATURE_MultipleFrames));
+ MediaFormat format = setUpSource(mTestFile);
+ final long pts = 250000;
+ mExtractor.release();
+ mCsdBuffers.clear();
+ for (int i = 0; ; i++) {
+ String csdKey = "csd-" + i;
+ if (format.containsKey(csdKey)) {
+ mCsdBuffers.add(format.getByteBuffer(csdKey));
+ } else break;
+ }
+
+ OutputManager ref = null, test;
+ if (isMediaTypeOutputUnAffectedBySeek(mMediaType)) {
+ CodecDecoderTestBase cdtb = new CodecDecoderTestBase(mCodecName, mMediaType, null,
+ mAllTestParams);
+ cdtb.decodeToMemory(mTestFile, mCodecName, pts, MediaExtractor.SEEK_TO_CLOSEST_SYNC,
+ Integer.MAX_VALUE);
+ ref = cdtb.getOutputManager();
+ test = new OutputManager(ref.getSharedErrorLogs());
+ } else {
+ test = new OutputManager();
+ }
+ mOutputBuff = test;
+ setUpSource(mTestFile);
+ int maxSampleSize = getMaxSampleSizeForMediaType(mTestFile, mMediaType);
+ configureKeysForLargeAudioFrameMode(format, maxSampleSize, OUT_SIZE_IN_MS[0][0],
+ OUT_SIZE_IN_MS[0][1]);
+ mMaxInputLimitMs = OUT_SIZE_IN_MS[0][0];
+ mCodec = MediaCodec.createByCodecName(mCodecName);
+ test.reset();
+ mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+ configureCodec(format, true, true, false);
+ mCodec.start();
+
+ /* test flush in running state before queuing input */
+ flushCodec();
+
+ mCodec.start();
+ queueCodecConfig(); /* flushed codec too soon after start, resubmit csd */
+ doWork(1);
+ flushCodec();
+ mCodec.start();
+ queueCodecConfig(); /* flushed codec too soon after start, resubmit csd */
+
+ mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+ test.reset();
+ doWork(23);
+ if (!test.isPtsStrictlyIncreasing(mPrevOutputPts)) {
+ fail("Output timestamps are not strictly increasing \n" + mTestConfig + mTestEnv
+ + test.getErrMsg());
+ }
+
+ /* test flush in running state */
+ flushCodec();
+ mCodec.start();
+ mSaveToMem = true;
+ test.reset();
+ mExtractor.seekTo(pts, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+ doWork(Integer.MAX_VALUE);
+ queueEOS();
+ waitForAllOutputs();
+ if (ref != null && !ref.equalsByteOutput(test)) {
+ fail("Decoder output is not consistent across runs \n" + mTestConfig + mTestEnv
+ + test.getErrMsg());
+ }
+
+ /* test flush in eos state */
+ flushCodec();
+ mCodec.start();
+ test.reset();
+ mExtractor.seekTo(pts, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+ doWork(Integer.MAX_VALUE);
+ queueEOS();
+ waitForAllOutputs();
+ mCodec.stop();
+ if (ref != null && !ref.equalsByteOutput(test)) {
+ fail("Decoder output is not consistent across runs \n" + mTestConfig + mTestEnv
+ + test.getErrMsg());
+ }
+
+ mSaveToMem = false;
+ mCodec.release();
+ mExtractor.release();
+ }
+
+ /**
* Verifies component and framework behaviour for format change in multiple frame mode.
* The format change is not seamless (AdaptivePlayback) but done via reconfigure.
* <p>
diff --git a/tests/media/src/android/mediav2/cts/CodecEncoderBlockModelMultiAccessUnitTest.java b/tests/media/src/android/mediav2/cts/CodecEncoderBlockModelMultiAccessUnitTest.java
new file mode 100644
index 0000000..8e5f191
--- /dev/null
+++ b/tests/media/src/android/mediav2/cts/CodecEncoderBlockModelMultiAccessUnitTest.java
@@ -0,0 +1,351 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediav2.cts;
+
+import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_MultipleFrames;
+import static android.mediav2.common.cts.CodecTestBase.SupportClass.CODEC_OPTIONAL;
+import static android.mediav2.cts.AudioEncoderTest.flattenParams;
+import static android.mediav2.cts.CodecDecoderMultiAccessUnitTest.getCompressionRatio;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.junit.Assume.assumeTrue;
+
+import android.media.AudioFormat;
+import android.media.MediaCodec;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.mediav2.common.cts.CodecAsyncHandlerMultiAccessUnits;
+import android.mediav2.common.cts.CodecEncoderBlockModelTestBase;
+import android.mediav2.common.cts.CodecEncoderTestBase;
+import android.mediav2.common.cts.EncoderConfigParams;
+import android.mediav2.common.cts.OutputManager;
+import android.os.Build;
+import android.platform.test.annotations.AppModeFull;
+import android.platform.test.annotations.RequiresFlagsEnabled;
+import android.util.Log;
+import android.util.Pair;
+
+import androidx.test.filters.LargeTest;
+import androidx.test.filters.SdkSuppress;
+
+import com.android.compatibility.common.util.ApiTest;
+import com.android.media.codec.flags.Flags;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * Tests audio encoders support for feature MultipleFrames in block model mode.
+ * <p>
+ * MultipleFrames feature is optional and is not required to support by all components. If a
+ * component supports this feature, then multiple access units are grouped together (demarcated
+ * with access unit offsets and timestamps) are sent as input to the component. The components
+ * processes the input sent and returns output in a large enough buffer (demarcated with access
+ * unit offsets and timestamps). The number of access units that can be grouped is dependent on
+ * format keys, KEY_MAX_INPUT_SIZE, KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE.
+ * <p>
+ * The test runs the component in MultipleFrames block model mode and normal mode and expects same
+ * output for a given input.
+ **/
+@SdkSuppress(minSdkVersion = Build.VERSION_CODES.VANILLA_ICE_CREAM, codeName = "VanillaIceCream")
+@AppModeFull(reason = "Instant apps cannot access the SD card")
+@RequiresFlagsEnabled(Flags.FLAG_LARGE_AUDIO_FRAME)
+@RunWith(Parameterized.class)
+public class CodecEncoderBlockModelMultiAccessUnitTest extends CodecEncoderBlockModelTestBase {
+ private static final String LOG_TAG =
+ CodecEncoderBlockModelMultiAccessUnitTest.class.getSimpleName();
+ private static final int[][] OUT_SIZE_IN_MS = {
+ {1000, 250}, // max out size, threshold batch out size
+ {1000, 100},
+ {500, 20},
+ {100, 100},
+ {40, 100}
+ };
+
+ private CodecAsyncHandlerMultiAccessUnits mAsyncHandleMultiAccessUnits;
+ private int mMaxOutputSizeBytes;
+
+ @Parameterized.Parameters(name = "{index}_{0}_{1}_{3}")
+ public static Collection<Object[]> input() {
+ List<Object[]> defArgsList = new ArrayList<>(Arrays.asList(new Object[][]{
+ // mediaType, arrays of bit-rates, sample rates, channel counts, pcm encoding
+ {MediaFormat.MIMETYPE_AUDIO_AAC, new int[]{64000, 128000}, new int[]{8000, 12000,
+ 16000, 22050, 24000, 32000, 44100, 48000}, new int[]{1, 2},
+ AudioFormat.ENCODING_PCM_16BIT},
+ {MediaFormat.MIMETYPE_AUDIO_OPUS, new int[]{64000, 128000}, new int[]{8000, 12000,
+ 16000, 24000, 48000}, new int[]{1, 2},
+ AudioFormat.ENCODING_PCM_16BIT},
+ {MediaFormat.MIMETYPE_AUDIO_AMR_NB, new int[]{4750, 5150, 5900, 6700, 7400, 7950,
+ 10200, 12200}, new int[]{8000}, new int[]{1},
+ AudioFormat.ENCODING_PCM_16BIT},
+ {MediaFormat.MIMETYPE_AUDIO_AMR_WB, new int[]{6600, 8850, 12650, 14250, 15850,
+ 18250, 19850, 23050, 23850}, new int[]{16000}, new int[]{1},
+ AudioFormat.ENCODING_PCM_16BIT},
+ {MediaFormat.MIMETYPE_AUDIO_FLAC, new int[]{0, 1, 2, 3, 4, 5, 6, 7, 8},
+ new int[]{8000, 16000, 32000, 48000, 96000, 192000}, new int[]{1, 2},
+ AudioFormat.ENCODING_PCM_16BIT},
+ {MediaFormat.MIMETYPE_AUDIO_FLAC, new int[]{0, 1, 2, 3, 4, 5, 6, 7, 8},
+ new int[]{8000, 16000, 32000, 48000, 96000, 192000}, new int[]{1, 2},
+ AudioFormat.ENCODING_PCM_FLOAT},
+ }));
+ List<Object[]> argsList = flattenParams(defArgsList);
+ return prepareParamList(argsList, true, true, false, true);
+ }
+
+ public CodecEncoderBlockModelMultiAccessUnitTest(String encoder, String mediaType,
+ EncoderConfigParams cfgParams, @SuppressWarnings("unused") String testLabel,
+ String allTestParams) {
+ super(encoder, mediaType, new EncoderConfigParams[]{cfgParams}, allTestParams);
+ mAsyncHandle = new CodecAsyncHandlerMultiAccessUnits();
+ }
+
+ @Before
+ public void setUp() throws IOException {
+ mActiveEncCfg = mEncCfgParams[0];
+ MediaFormat format = mActiveEncCfg.getFormat();
+ ArrayList<MediaFormat> formatList = new ArrayList<>();
+ formatList.add(format);
+ checkFormatSupport(mCodecName, mMediaType, true, formatList, null, CODEC_OPTIONAL);
+ mActiveRawRes = EncoderInput.getRawResource(mActiveEncCfg);
+ assertNotNull("no raw resource found for testing config : " + mActiveEncCfg + mTestConfig
+ + mTestEnv, mActiveRawRes);
+ Object asyncHandle = mAsyncHandle;
+ assertTrue("async handle shall be an instance of CodecAsyncHandlerMultiAccessUnits"
+ + " while testing Feature_MultipleFrames" + mTestConfig + mTestEnv,
+ asyncHandle instanceof CodecAsyncHandlerMultiAccessUnits);
+ mAsyncHandleMultiAccessUnits = (CodecAsyncHandlerMultiAccessUnits) asyncHandle;
+ }
+
+ @Override
+ protected void resetContext(boolean isAsync, boolean signalEOSWithLastFrame) {
+ super.resetContext(isAsync, signalEOSWithLastFrame);
+ mMaxOutputSizeBytes = 0;
+ }
+
+ private void validateOutputFormat(MediaFormat outFormat) {
+ Assert.assertTrue("Output format " + outFormat + " does not contain key "
+ + MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE + ". \n"
+ + mTestConfig + mTestEnv,
+ outFormat.containsKey(MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE));
+ mMaxOutputSizeBytes = outFormat.getInteger(MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE);
+ }
+
+ private void dequeueOutputs(int bufferIndex, ArrayDeque<MediaCodec.BufferInfo> infos) {
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "output: id: " + bufferIndex);
+ }
+ validateOutputFormat(mCodec.getOutputFormat(bufferIndex));
+ MediaCodec.OutputFrame frame = mCodec.getOutputFrame(bufferIndex);
+ ByteBuffer buf = frame.getLinearBlock() != null ? frame.getLinearBlock().map() : null;
+ MediaCodec.BufferInfo[] frameInfos =
+ frame.getBufferInfos().toArray(new MediaCodec.BufferInfo[0]);
+ MediaCodec.BufferInfo[] callBackInfos = infos.toArray(new MediaCodec.BufferInfo[0]);
+ assertEquals("Lengths of frameInfos received via callback and getBufferInfos api are not "
+ + "identical. \n" + mTestConfig + mTestEnv, frameInfos.length,
+ callBackInfos.length);
+
+ int totalSize = 0;
+ for (int i = 0; i < frameInfos.length; ++i) {
+ MediaCodec.BufferInfo frameInfo = frameInfos[i];
+ MediaCodec.BufferInfo callBackInfo = callBackInfos[i];
+ Assert.assertNotNull("received null entry in dequeueOutput frame infos list. \n"
+ + mTestConfig + mTestEnv, frameInfo);
+ Assert.assertNotNull("received null entry in dequeueOutput callback infos list. \n"
+ + mTestConfig + mTestEnv, callBackInfo);
+
+ long framePts = frameInfo.presentationTimeUs;
+ long infoPts = callBackInfo.presentationTimeUs;
+ int frameFlags = frameInfo.flags;
+ int infoFlags = callBackInfo.flags;
+ assertEquals("presentation timestamps from OutputFrame does not match with the value "
+ + "obtained from callback: framePts=" + framePts + ", infoPts=" + infoPts
+ + "\n" + mTestConfig + mTestEnv, framePts, infoPts);
+ assertEquals("Flags from OutputFrame does not match with the value obtained from "
+ + "callback: frameFlags=" + frameFlags + ", infoFlags=" + infoFlags + "\n"
+ + mTestConfig + mTestEnv, frameFlags, infoFlags);
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "output: id: " + bufferIndex + " flags: " + infoFlags + " size: "
+ + callBackInfo.size + " timestamp: " + infoPts);
+ }
+ if ((infoFlags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ mSawOutputEOS = true;
+ }
+ if (callBackInfo.size > 0) {
+ if (buf != null) {
+ if (mSaveToMem) {
+ MediaCodec.BufferInfo copy = new MediaCodec.BufferInfo();
+ copy.set(mOutputBuff.getOutStreamSize(), callBackInfo.size, infoPts,
+ infoFlags);
+ mInfoList.add(copy);
+
+ mOutputBuff.checksum(buf, callBackInfo);
+ mOutputBuff.saveToMemory(buf, callBackInfo);
+ }
+ if (mMuxer != null) {
+ if (mTrackID == -1) {
+ mTrackID = mMuxer.addTrack(mCodec.getOutputFormat());
+ mMuxer.start();
+ }
+ mMuxer.writeSampleData(mTrackID, buf, callBackInfo);
+ }
+ }
+ if ((infoFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
+ mOutputBuff.saveOutPTS(infoPts);
+ mOutputCount++;
+ }
+ }
+ totalSize += callBackInfo.size;
+ }
+ if (frame.getLinearBlock() != null) {
+ frame.getLinearBlock().recycle();
+ }
+ assertTrue("Sum of all info sizes: " + totalSize + " exceeds max output size: "
+ + mMaxOutputSizeBytes + " \n" + mTestConfig + mTestEnv,
+ totalSize <= mMaxOutputSizeBytes);
+ mCodec.releaseOutputBuffer(bufferIndex, false);
+ }
+
+ @Override
+ protected void doWork(int frameLimit) throws InterruptedException, IOException {
+ mLoopBackFrameLimit = frameLimit;
+ if (mMuxOutput) {
+ int muxerFormat = getMuxerFormatForMediaType(mMediaType);
+ mMuxedOutputFile = getTempFilePath("");
+ mMuxer = new MediaMuxer(mMuxedOutputFile, muxerFormat);
+ }
+ // dequeue output after inputEOS is expected to be done in waitForAllOutputs()
+ while (!mAsyncHandleMultiAccessUnits.hasSeenError() && !mSawInputEOS
+ && mInputCount < frameLimit) {
+ Pair<Integer, ArrayDeque<MediaCodec.BufferInfo>> element =
+ mAsyncHandleMultiAccessUnits.getWorkList();
+ if (element != null) {
+ int bufferID = element.first;
+ ArrayDeque<MediaCodec.BufferInfo> infos = element.second;
+ if (infos != null) {
+ // <id, infos> corresponds to output callback. Handle it accordingly
+ dequeueOutputs(bufferID, infos);
+ } else {
+ // <id, null> corresponds to input callback. Handle it accordingly
+ enqueueInput(bufferID);
+ }
+ }
+ }
+ }
+
+ @Override
+ protected void waitForAllOutputs() throws InterruptedException {
+ while (!mAsyncHandleMultiAccessUnits.hasSeenError() && !mSawOutputEOS) {
+ Pair<Integer, ArrayDeque<MediaCodec.BufferInfo>> element =
+ mAsyncHandleMultiAccessUnits.getOutputs();
+ if (element != null) {
+ dequeueOutputs(element.first, element.second);
+ }
+ }
+ if (mMuxOutput) {
+ if (mTrackID != -1) {
+ mMuxer.stop();
+ mTrackID = -1;
+ }
+ if (mMuxer != null) {
+ mMuxer.release();
+ mMuxer = null;
+ }
+ }
+ validateTestState();
+ }
+
+ /**
+ * Verifies if the component under test can encode the test file correctly in multiple frame
+ * block model mode. The encoding happens in asynchronous mode with eos flag signalled with
+ * last raw frame. The test verifies if the component / framework output is consistent
+ * with single access unit normal mode and single access unit block model mode.
+ * <p>
+ * Check description of class {@link CodecEncoderBlockModelMultiAccessUnitTest}
+ */
+ @ApiTest(apis = {"android.media.MediaFormat#KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE",
+ "android.media.MediaFormat#KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE",
+ "android.media.MediaCodec.Callback#onOutputBuffersAvailable",
+ "android.media.MediaCodec#CONFIGURE_FLAG_USE_BLOCK_MODEL"})
+ @LargeTest
+ @Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
+ public void testSimpleEncode() throws IOException, InterruptedException {
+ assumeTrue(mCodecName + " does not support FEATURE_MultipleFrames",
+ isFeatureSupported(mCodecName, mMediaType, FEATURE_MultipleFrames));
+ assumeTrue(mCodecName + " is not compatible with LinearBlocks",
+ MediaCodec.LinearBlock.isCodecCopyFreeCompatible(new String[]{mCodecName}));
+
+ CodecEncoderTestBase referenceBase = new CodecEncoderTestBase(mCodecName, mMediaType,
+ new EncoderConfigParams[]{mActiveEncCfg}, mAllTestParams);
+ referenceBase.encodeToMemory(mCodecName, mActiveEncCfg, mActiveRawRes, Integer.MAX_VALUE,
+ true, false);
+ OutputManager ref = referenceBase.getOutputManager();
+
+ CodecEncoderBlockModelTestBase cebmtb = new CodecEncoderBlockModelTestBase(mCodecName,
+ mMediaType, new EncoderConfigParams[]{mActiveEncCfg}, mAllTestParams);
+ OutputManager test = new OutputManager(ref.getSharedErrorLogs());
+ cebmtb.encodeToMemory(mCodecName, mActiveEncCfg, mActiveRawRes, test, Integer.MAX_VALUE,
+ true, false);
+ if (!ref.equalsDequeuedOutput(test)) {
+ fail("Output in block model mode is not same as output in normal mode.\n" + mTestConfig
+ + mTestEnv + test.getErrMsg());
+ }
+
+ mOutputBuff = new OutputManager(ref.getSharedErrorLogs());
+ mSaveToMem = true;
+ mMuxOutput = false;
+ setUpSource(mActiveRawRes.mFileName);
+ mCodec = MediaCodec.createByCodecName(mCodecName);
+ MediaFormat format = mActiveEncCfg.getFormat();
+ for (int[] outSizeInMs : OUT_SIZE_IN_MS) {
+ int frameSize = mActiveRawRes.mBytesPerSample * mActiveEncCfg.mChannelCount;
+ int maxOutputSize = (outSizeInMs[0] * frameSize * mActiveEncCfg.mSampleRate) / 1000;
+ int thresholdOutputSize =
+ (outSizeInMs[1] * frameSize * mActiveEncCfg.mSampleRate) / 1000;
+ mMaxInputSizeInMs = (outSizeInMs[0] / getCompressionRatio(mMediaType));
+ format.setInteger(MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, maxOutputSize);
+ format.setInteger(MediaFormat.KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE,
+ thresholdOutputSize);
+ configureCodec(format, true, true, true);
+ mOutputBuff.reset();
+ mInfoList.clear();
+ mCodec.start();
+ doWork(Integer.MAX_VALUE);
+ queueEOS();
+ waitForAllOutputs();
+ mCodec.reset();
+ if (!ref.equalsDequeuedOutput(mOutputBuff)) {
+ fail("Output of encoder in MultipleFrames mode differs from single access unit "
+ + "mode.\n" + mTestConfig + mTestEnv + mOutputBuff.getErrMsg());
+ }
+ }
+ mCodec.release();
+ }
+}
diff --git a/tests/media/src/android/mediav2/cts/CodecEncoderMultiAccessUnitTest.java b/tests/media/src/android/mediav2/cts/CodecEncoderMultiAccessUnitTest.java
new file mode 100644
index 0000000..ed55b72
--- /dev/null
+++ b/tests/media/src/android/mediav2/cts/CodecEncoderMultiAccessUnitTest.java
@@ -0,0 +1,338 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediav2.cts;
+
+import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_MultipleFrames;
+import static android.mediav2.common.cts.CodecTestBase.SupportClass.CODEC_OPTIONAL;
+import static android.mediav2.cts.AudioEncoderTest.flattenParams;
+import static android.mediav2.cts.CodecDecoderMultiAccessUnitTest.getCompressionRatio;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+import static org.junit.Assume.assumeTrue;
+
+import android.media.AudioFormat;
+import android.media.MediaCodec;
+import android.media.MediaFormat;
+import android.media.MediaMuxer;
+import android.mediav2.common.cts.CodecAsyncHandlerMultiAccessUnits;
+import android.mediav2.common.cts.CodecEncoderTestBase;
+import android.mediav2.common.cts.EncoderConfigParams;
+import android.mediav2.common.cts.OutputManager;
+import android.os.Build;
+import android.platform.test.annotations.AppModeFull;
+import android.platform.test.annotations.RequiresFlagsEnabled;
+import android.util.Log;
+import android.util.Pair;
+
+import androidx.test.filters.LargeTest;
+import androidx.test.filters.SdkSuppress;
+
+import com.android.compatibility.common.util.ApiTest;
+import com.android.media.codec.flags.Flags;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * Tests audio encoders support for feature MultipleFrames.
+ * <p>
+ * MultipleFrames feature is optional and is not required to support by all components. If a
+ * component supports this feature, then multiple access units are grouped together (demarcated
+ * with access unit offsets and timestamps) are sent as input to the component. The components
+ * processes the input sent and returns output in a large enough buffer (demarcated with access
+ * unit offsets and timestamps). The number of access units that can be grouped is dependent on
+ * format keys, KEY_MAX_INPUT_SIZE, KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE.
+ * <p>
+ * The test runs the component in MultipleFrames mode and normal mode and expects same output for
+ * a given input.
+ **/
+@SdkSuppress(minSdkVersion = Build.VERSION_CODES.VANILLA_ICE_CREAM, codeName = "VanillaIceCream")
+@AppModeFull(reason = "Instant apps cannot access the SD card")
+@RequiresFlagsEnabled(Flags.FLAG_LARGE_AUDIO_FRAME)
+@RunWith(Parameterized.class)
+public class CodecEncoderMultiAccessUnitTest extends CodecEncoderTestBase {
+ private static final String LOG_TAG = CodecEncoderMultiAccessUnitTest.class.getSimpleName();
+ private static final int[][] OUT_SIZE_IN_MS = {
+ {1000, 250}, // max out size, threshold batch out size
+ {1000, 100},
+ {500, 20},
+ {100, 100},
+ {40, 100}
+ };
+
+ private CodecAsyncHandlerMultiAccessUnits mAsyncHandleMultiAccessUnits;
+ private int mMaxOutputSizeBytes;
+
+ @Parameterized.Parameters(name = "{index}_{0}_{1}_{3}")
+ public static Collection<Object[]> input() {
+ List<Object[]> defArgsList = new ArrayList<>(Arrays.asList(new Object[][]{
+ // mediaType, arrays of bit-rates, sample rates, channel counts, pcm encoding
+ {MediaFormat.MIMETYPE_AUDIO_AAC, new int[]{64000, 128000}, new int[]{8000, 12000,
+ 16000, 22050, 24000, 32000, 44100, 48000}, new int[]{1, 2},
+ AudioFormat.ENCODING_PCM_16BIT},
+ {MediaFormat.MIMETYPE_AUDIO_OPUS, new int[]{64000, 128000}, new int[]{8000, 12000,
+ 16000, 24000, 48000}, new int[]{1, 2},
+ AudioFormat.ENCODING_PCM_16BIT},
+ {MediaFormat.MIMETYPE_AUDIO_AMR_NB, new int[]{4750, 5150, 5900, 6700, 7400, 7950,
+ 10200, 12200}, new int[]{8000}, new int[]{1},
+ AudioFormat.ENCODING_PCM_16BIT},
+ {MediaFormat.MIMETYPE_AUDIO_AMR_WB, new int[]{6600, 8850, 12650, 14250, 15850,
+ 18250, 19850, 23050, 23850}, new int[]{16000}, new int[]{1},
+ AudioFormat.ENCODING_PCM_16BIT},
+ {MediaFormat.MIMETYPE_AUDIO_FLAC, new int[]{0, 1, 2, 3, 4, 5, 6, 7, 8},
+ new int[]{8000, 16000, 32000, 48000, 96000, 192000}, new int[]{1, 2},
+ AudioFormat.ENCODING_PCM_16BIT},
+ {MediaFormat.MIMETYPE_AUDIO_FLAC, new int[]{0, 1, 2, 3, 4, 5, 6, 7, 8},
+ new int[]{8000, 16000, 32000, 48000, 96000, 192000}, new int[]{1, 2},
+ AudioFormat.ENCODING_PCM_FLOAT},
+ }));
+ List<Object[]> argsList = flattenParams(defArgsList);
+ return prepareParamList(argsList, true, true, false, true);
+ }
+
+ public CodecEncoderMultiAccessUnitTest(String encoder, String mediaType,
+ EncoderConfigParams cfgParams, @SuppressWarnings("unused") String testLabel,
+ String allTestParams) {
+ super(encoder, mediaType, new EncoderConfigParams[]{cfgParams}, allTestParams);
+ mAsyncHandle = new CodecAsyncHandlerMultiAccessUnits();
+ }
+
+ @Before
+ public void setUp() throws IOException {
+ mActiveEncCfg = mEncCfgParams[0];
+ MediaFormat format = mActiveEncCfg.getFormat();
+ ArrayList<MediaFormat> formatList = new ArrayList<>();
+ formatList.add(format);
+ checkFormatSupport(mCodecName, mMediaType, true, formatList, null, CODEC_OPTIONAL);
+ mActiveRawRes = EncoderInput.getRawResource(mActiveEncCfg);
+ assertNotNull("no raw resource found for testing config : " + mActiveEncCfg + mTestConfig
+ + mTestEnv, mActiveRawRes);
+ Object asyncHandle = mAsyncHandle;
+ Assert.assertTrue("async handle shall be an instance of CodecAsyncHandlerMultiAccessUnits"
+ + " while testing Feature_MultipleFrames" + mTestConfig + mTestEnv,
+ asyncHandle instanceof CodecAsyncHandlerMultiAccessUnits);
+ mAsyncHandleMultiAccessUnits = (CodecAsyncHandlerMultiAccessUnits) asyncHandle;
+ }
+
+ @Override
+ protected void resetContext(boolean isAsync, boolean signalEOSWithLastFrame) {
+ super.resetContext(isAsync, signalEOSWithLastFrame);
+ mMaxOutputSizeBytes = 0;
+ }
+
+ private void validateOutputFormat(MediaFormat outFormat) {
+ Assert.assertTrue("Output format " + outFormat + " does not contain key "
+ + MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE + ". \n"
+ + mTestConfig + mTestEnv,
+ outFormat.containsKey(MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE));
+ mMaxOutputSizeBytes = outFormat.getInteger(MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE);
+ }
+
+ private void dequeueOutputs(int bufferIndex, ArrayDeque<MediaCodec.BufferInfo> infos) {
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, "output: id: " + bufferIndex);
+ }
+ validateOutputFormat(mCodec.getOutputFormat(bufferIndex));
+ ByteBuffer buf = mCodec.getOutputBuffer(bufferIndex);
+ int totalSize = 0;
+ for (MediaCodec.BufferInfo info : infos) {
+ Assert.assertNotNull("received null entry in dequeueOutput infos list. \n"
+ + mTestConfig + mTestEnv, info);
+ if (ENABLE_LOGS) {
+ Log.v(LOG_TAG, " flags: " + info.flags + " size: " + info.size + " timestamp: "
+ + info.presentationTimeUs);
+ }
+ if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ mSawOutputEOS = true;
+ }
+ if (info.size > 0) {
+ if (mSaveToMem) {
+ MediaCodec.BufferInfo copy = new MediaCodec.BufferInfo();
+ copy.set(mOutputBuff.getOutStreamSize(), info.size, info.presentationTimeUs,
+ info.flags);
+ mInfoList.add(copy);
+
+ mOutputBuff.checksum(buf, info);
+ mOutputBuff.saveToMemory(buf, info);
+ }
+ if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
+ mOutputBuff.saveOutPTS(info.presentationTimeUs);
+ mOutputCount++;
+ }
+ if (mMuxer != null) {
+ if (mTrackID == -1) {
+ mTrackID = mMuxer.addTrack(mCodec.getOutputFormat());
+ mMuxer.start();
+ }
+ mMuxer.writeSampleData(mTrackID, buf, info);
+ }
+ }
+ totalSize += info.size;
+ }
+ assertTrue("Sum of all info sizes: " + totalSize + " exceeds max output size: "
+ + mMaxOutputSizeBytes + " \n" + mTestConfig + mTestEnv,
+ totalSize <= mMaxOutputSizeBytes);
+ mCodec.releaseOutputBuffer(bufferIndex, false);
+ }
+
+ @Override
+ protected void doWork(int frameLimit) throws InterruptedException, IOException {
+ mLoopBackFrameLimit = frameLimit;
+ if (mMuxOutput) {
+ int muxerFormat = getMuxerFormatForMediaType(mMediaType);
+ mMuxedOutputFile = getTempFilePath("");
+ mMuxer = new MediaMuxer(mMuxedOutputFile, muxerFormat);
+ }
+ // dequeue output after inputEOS is expected to be done in waitForAllOutputs()
+ while (!mAsyncHandleMultiAccessUnits.hasSeenError() && !mSawInputEOS
+ && mInputCount < frameLimit) {
+ Pair<Integer, ArrayDeque<MediaCodec.BufferInfo>> element =
+ mAsyncHandleMultiAccessUnits.getWorkList();
+ if (element != null) {
+ int bufferID = element.first;
+ ArrayDeque<MediaCodec.BufferInfo> infos = element.second;
+ if (infos != null) {
+ // <id, infos> corresponds to output callback. Handle it accordingly
+ dequeueOutputs(bufferID, infos);
+ } else {
+ // <id, null> corresponds to input callback. Handle it accordingly
+ enqueueInput(bufferID);
+ }
+ }
+ }
+ }
+
+ @Override
+ protected void queueEOS() throws InterruptedException {
+ while (!mAsyncHandleMultiAccessUnits.hasSeenError() && !mSawInputEOS) {
+ Pair<Integer, ArrayDeque<MediaCodec.BufferInfo>> element =
+ mAsyncHandleMultiAccessUnits.getWorkList();
+ if (element != null) {
+ int bufferID = element.first;
+ ArrayDeque<MediaCodec.BufferInfo> infos = element.second;
+ if (infos != null) {
+ dequeueOutputs(bufferID, infos);
+ } else {
+ enqueueEOS(element.first);
+ }
+ }
+ }
+ }
+
+ @Override
+ protected void waitForAllOutputs() throws InterruptedException {
+ while (!mAsyncHandleMultiAccessUnits.hasSeenError() && !mSawOutputEOS) {
+ Pair<Integer, ArrayDeque<MediaCodec.BufferInfo>> element =
+ mAsyncHandleMultiAccessUnits.getOutputs();
+ if (element != null) {
+ dequeueOutputs(element.first, element.second);
+ }
+ }
+ if (mMuxOutput) {
+ if (mTrackID != -1) {
+ mMuxer.stop();
+ mTrackID = -1;
+ }
+ if (mMuxer != null) {
+ mMuxer.release();
+ mMuxer = null;
+ }
+ }
+ validateTestState();
+ }
+
+ /**
+ * Checks if the component under test can encode the test file correctly. The encoding
+ * happens in asynchronous mode, eos flag signalled with last raw frame and eos flag
+ * signalled separately after sending all raw frames. It expects consistent output in all
+ * these runs. That is, the ByteBuffer info and output timestamp list has to be same in all
+ * the runs. Further the output timestamp has to be strictly increasing. The test also
+ * verifies if the component / framework output is consistent with normal mode (single access
+ * unit mode).
+ * <p>
+ * Check description of class {@link CodecEncoderMultiAccessUnitTest}
+ */
+ @ApiTest(apis = {"android.media.MediaFormat#KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE",
+ "android.media.MediaFormat#KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE",
+ "android.media.MediaCodec.Callback#onOutputBuffersAvailable"})
+ @LargeTest
+ @Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
+ public void testSimpleEncode() throws IOException, InterruptedException {
+ assumeTrue(mCodecName + " does not support FEATURE_MultipleFrames",
+ isFeatureSupported(mCodecName, mMediaType, FEATURE_MultipleFrames));
+
+ CodecEncoderTestBase cetb = new CodecEncoderTestBase(mCodecName, mMediaType,
+ new EncoderConfigParams[]{mActiveEncCfg}, mAllTestParams);
+ cetb.encodeToMemory(mCodecName, mActiveEncCfg, mActiveRawRes, Integer.MAX_VALUE, true,
+ false);
+ OutputManager ref = cetb.getOutputManager();
+
+ boolean[] boolStates = {true, false};
+ OutputManager testA = new OutputManager(ref.getSharedErrorLogs());
+ OutputManager testB = new OutputManager(ref.getSharedErrorLogs());
+ mSaveToMem = true;
+ mMuxOutput = false;
+ setUpSource(mActiveRawRes.mFileName);
+ mCodec = MediaCodec.createByCodecName(mCodecName);
+ MediaFormat format = mActiveEncCfg.getFormat();
+ for (int[] outSizeInMs : OUT_SIZE_IN_MS) {
+ int frameSize = mActiveRawRes.mBytesPerSample * mActiveEncCfg.mChannelCount;
+ int maxOutputSize = (outSizeInMs[0] * frameSize * mActiveEncCfg.mSampleRate) / 1000;
+ int maxInputSize = (int) (maxOutputSize / getCompressionRatio(mMediaType));
+ maxInputSize = ((maxInputSize + (frameSize - 1)) / frameSize) * frameSize;
+ int thresholdOutputSize =
+ (outSizeInMs[1] * frameSize * mActiveEncCfg.mSampleRate) / 1000;
+ format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
+ format.setInteger(MediaFormat.KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE, maxOutputSize);
+ format.setInteger(MediaFormat.KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE,
+ thresholdOutputSize);
+ for (boolean eosType : boolStates) {
+ configureCodec(format, true, eosType, true);
+ mOutputBuff = eosType ? testA : testB;
+ mOutputBuff.reset();
+ mInfoList.clear();
+ mCodec.start();
+ doWork(Integer.MAX_VALUE);
+ queueEOS();
+ waitForAllOutputs();
+ mCodec.reset();
+ if (!ref.equalsDequeuedOutput(mOutputBuff)) {
+ fail("Output of encoder in MultipleFrames mode differs from single access unit"
+ + " mode. \n" + mTestConfig + mTestEnv + mOutputBuff.getErrMsg());
+ }
+ }
+ if (!testA.equals(testB)) {
+ fail("Output of encoder component is not consistent across runs. \n" + mTestConfig
+ + mTestEnv + testB.getErrMsg());
+ }
+ }
+ mCodec.release();
+ }
+}
diff --git a/tests/media/src/android/mediav2/cts/CodecEncoderTest.java b/tests/media/src/android/mediav2/cts/CodecEncoderTest.java
index 8c043e6..c601471 100644
--- a/tests/media/src/android/mediav2/cts/CodecEncoderTest.java
+++ b/tests/media/src/android/mediav2/cts/CodecEncoderTest.java
@@ -76,6 +76,7 @@
private static final String LOG_TAG = CodecEncoderTest.class.getSimpleName();
private static final ArrayList<String> ABR_MEDIATYPE_LIST = new ArrayList<>();
+ private boolean mGotCSD;
private int mNumSyncFramesReceived;
private final ArrayList<Integer> mSyncFramesPos = new ArrayList<>();
@@ -97,11 +98,16 @@
@Override
protected void resetContext(boolean isAsync, boolean signalEOSWithLastFrame) {
super.resetContext(isAsync, signalEOSWithLastFrame);
+ mGotCSD = false;
mNumSyncFramesReceived = 0;
mSyncFramesPos.clear();
}
+ @Override
protected void dequeueOutput(int bufferIndex, MediaCodec.BufferInfo info) {
+ if (info.size > 0 && ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0)) {
+ mGotCSD = true;
+ }
if (info.size > 0 && (info.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME) != 0) {
mNumSyncFramesReceived += 1;
mSyncFramesPos.add(mOutputCount);
@@ -277,6 +283,19 @@
+ mTestEnv, mActiveRawRes);
}
+ private void validateCSD() {
+ if (mMediaType.equals(MediaFormat.MIMETYPE_AUDIO_AAC)
+ || mMediaType.equals(MediaFormat.MIMETYPE_AUDIO_OPUS)
+ || mMediaType.equals(MediaFormat.MIMETYPE_AUDIO_FLAC)
+ || mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_MPEG4)
+ || mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC)
+ || mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)
+ || mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_VP9)) {
+ assertTrue("components that support mediaType: " + mMediaType
+ + " must generate CodecPrivateData \n" + mTestConfig + mTestEnv, mGotCSD);
+ }
+ }
+
/**
* Checks if the component under test can encode the test file correctly. The encoding
* happens in synchronous, asynchronous mode, eos flag signalled with last raw frame and
@@ -288,7 +307,7 @@
* parameters, the test checks for consistency across runs. Although the test collects the
* output in a byte buffer, no analysis is done that checks the integrity of the bitstream.
*/
- @CddTest(requirements = {"2.2.2", "2.3.2", "2.5.2", "5.1.1", "5.2/C-1-1"})
+ @CddTest(requirements = {"2.2.2", "2.3.2", "2.5.2", "5.1.1", "5.2/C-1-1", "5.2.4/C-1-3"})
@ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420Flexible",
"android.media.AudioFormat#ENCODING_PCM_16BIT"})
@LargeTest
@@ -320,6 +339,7 @@
queueEOS();
waitForAllOutputs();
validateMetrics(mCodecName, format);
+ validateCSD();
/* TODO(b/147348711) */
if (false) mCodec.stop();
else mCodec.reset();
diff --git a/tests/media/src/android/mediav2/cts/CodecInfoTest.java b/tests/media/src/android/mediav2/cts/CodecInfoTest.java
index b3b96e4..e330006 100644
--- a/tests/media/src/android/mediav2/cts/CodecInfoTest.java
+++ b/tests/media/src/android/mediav2/cts/CodecInfoTest.java
@@ -24,12 +24,16 @@
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUVP010;
+import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_DynamicColorAspects;
import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_HdrEditing;
+import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_HlgEditing;
+import static android.media.codec.Flags.FLAG_DYNAMIC_COLOR_ASPECTS;
import static android.media.codec.Flags.FLAG_IN_PROCESS_SW_AUDIO_CODEC;
-import static android.mediav2.common.cts.CodecTestBase.BOARD_SDK_IS_AT_LEAST_202404;
+import static android.mediav2.common.cts.CodecTestBase.BOARD_FIRST_SDK_IS_AT_LEAST_202404;
import static android.mediav2.common.cts.CodecTestBase.BOARD_SDK_IS_AT_LEAST_T;
import static android.mediav2.common.cts.CodecTestBase.FIRST_SDK_IS_AT_LEAST_T;
import static android.mediav2.common.cts.CodecTestBase.IS_AT_LEAST_T;
+import static android.mediav2.common.cts.CodecTestBase.IS_AT_LEAST_V;
import static android.mediav2.common.cts.CodecTestBase.IS_HDR_CAPTURE_SUPPORTED;
import static android.mediav2.common.cts.CodecTestBase.PROFILE_HDR10_MAP;
import static android.mediav2.common.cts.CodecTestBase.PROFILE_HDR10_PLUS_MAP;
@@ -52,6 +56,8 @@
import android.mediav2.common.cts.CodecTestBase;
import android.os.Build;
import android.platform.test.annotations.RequiresFlagsEnabled;
+import android.platform.test.flag.junit.CheckFlagsRule;
+import android.platform.test.flag.junit.DeviceFlagsValueProvider;
import android.util.Range;
import androidx.test.filters.SdkSuppress;
@@ -64,6 +70,7 @@
import com.android.compatibility.common.util.VsrTest;
import org.junit.Assume;
+import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@@ -87,6 +94,9 @@
public String mCodecName;
public MediaCodecInfo mCodecInfo;
+ @Rule
+ public final CheckFlagsRule mCheckFlagsRule = DeviceFlagsValueProvider.createCheckFlagsRule();
+
public CodecInfoTest(String mediaType, String codecName, MediaCodecInfo codecInfo) {
mMediaType = mediaType;
mCodecName = codecName;
@@ -164,6 +174,7 @@
*/
@CddTest(requirements = {"5.1.7/C-1-2", "5.1.7/C-1-3", "5.1.7/C-4-1", "5.12/C-6-5",
"5.12/C-7-1", "5.12/C-7-3"})
+ @VsrTest(requirements = {"VSR-4.4.011"})
@Test
public void testColorFormatSupport() {
Assume.assumeTrue("Test is applicable for video codecs", mMediaType.startsWith("video/"));
@@ -191,20 +202,23 @@
IntStream.of(caps.colorFormats).noneMatch(x -> x == COLOR_FormatYUVP010));
}
- // Encoders that support FEATURE_HdrEditing, must support ABGR2101010 color format
- // and at least one HDR profile
+ // Encoders that support FEATURE_HdrEditing / FEATURE_HlgEditing, must support
+ // ABGR2101010 color format and at least one HDR profile
boolean hdrEditingSupported = caps.isFeatureSupported(FEATURE_HdrEditing);
- if (hdrEditingSupported) {
+ boolean hlgEditingSupported = (IS_AT_LEAST_V && android.media.codec.Flags.hlgEditing())
+ ? caps.isFeatureSupported(FEATURE_HlgEditing) : false;
+ if (hdrEditingSupported || hlgEditingSupported) {
boolean abgr2101010Supported = IntStream.of(caps.colorFormats)
.anyMatch(x -> x == COLOR_Format32bitABGR2101010);
- assertTrue(mCodecName + " supports FEATURE_HdrEditing, but does not support"
- + " COLOR_FormatABGR2101010 color formats.", abgr2101010Supported);
- assertTrue(mCodecName + " supports FEATURE_HdrEditing, but does not support"
- + " any HDR profiles.", canHandleHdr);
+ assertTrue(mCodecName + " supports feature HdrEditing/HlgEditing, but does not"
+ + " support COLOR_FormatABGR2101010 color formats.", abgr2101010Supported);
+ assertTrue(mCodecName + " supports feature HdrEditing/HlgEditing, but does not"
+ + " support any HDR profiles.", canHandleHdr);
}
} else {
- if (FIRST_SDK_IS_AT_LEAST_T && VNDK_IS_AT_LEAST_T && BOARD_SDK_IS_AT_LEAST_T
- && canDisplaySupportHDRContent() && canHandleHdr) {
+ if (((FIRST_SDK_IS_AT_LEAST_T && VNDK_IS_AT_LEAST_T && BOARD_SDK_IS_AT_LEAST_T)
+ || BOARD_FIRST_SDK_IS_AT_LEAST_202404) && canDisplaySupportHDRContent()
+ && canHandleHdr) {
if (MediaUtils.isTv()) {
// Some TV devices support HDR10 display with VO instead of GPU. In this
// case, skip checking P010 on TV devices.
@@ -268,6 +282,28 @@
}
/**
+ * All decoders for compression technologies that were introduced after 2002 must support
+ * dynamic color aspects feature on CHIPSETs that set ro.board.first_api_level to V or higher.
+ */
+ @RequiresFlagsEnabled(FLAG_DYNAMIC_COLOR_ASPECTS)
+ @SdkSuppress(minSdkVersion = Build.VERSION_CODES.VANILLA_ICE_CREAM,
+ codeName = "VanillaIceCream")
+ @VsrTest(requirements = {"VSR-4.2.005.001"})
+ @Test
+ public void testDynamicColorAspectSupport() {
+ Assume.assumeTrue("Test is applicable for video codecs", mMediaType.startsWith("video/"));
+ Assume.assumeFalse("Test is applicable only for decoders", mCodecInfo.isEncoder());
+ Assume.assumeTrue("Skipping, Only intended for coding technologies introduced after 2002.",
+ !mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_MPEG4)
+ && !mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_H263)
+ && !mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_MPEG2));
+ Assume.assumeTrue("Skipping, Only intended for devices with board first_api_level >= V",
+ BOARD_FIRST_SDK_IS_AT_LEAST_202404);
+ assertTrue(mCodecName + " does not support FEATURE_DynamicColorAspects.",
+ isFeatureSupported(mCodecName, mMediaType, FEATURE_DynamicColorAspects));
+ }
+
+ /**
* Components advertising support for compression technologies that were introduced after 2002
* must support a given resolution in both portrait and landscape mode.
*/
@@ -280,7 +316,7 @@
&& !mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_H263)
&& !mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_MPEG2));
Assume.assumeTrue("Skipping, Only intended for devices with SDK >= 202404",
- BOARD_SDK_IS_AT_LEAST_202404);
+ BOARD_FIRST_SDK_IS_AT_LEAST_202404);
if (!isFeatureSupported(mCodecName, mMediaType, "can-swap-width-height")) {
MediaCodecInfo.VideoCapabilities vCaps =
mCodecInfo.getCapabilitiesForType(mMediaType).getVideoCapabilities();
@@ -304,7 +340,7 @@
&& !mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_H263)
&& !mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_MPEG2));
Assume.assumeTrue("Skipping, Only intended for devices with SDK >= 202404",
- BOARD_SDK_IS_AT_LEAST_202404);
+ BOARD_FIRST_SDK_IS_AT_LEAST_202404);
MediaCodecInfo.VideoCapabilities vCaps =
mCodecInfo.getCapabilitiesForType(mMediaType).getVideoCapabilities();
int widthAlignment = vCaps.getWidthAlignment();
diff --git a/tests/media/src/android/mediav2/cts/DecodeGlAccuracyTest.java b/tests/media/src/android/mediav2/cts/DecodeGlAccuracyTest.java
index acd830d..2ed9519 100644
--- a/tests/media/src/android/mediav2/cts/DecodeGlAccuracyTest.java
+++ b/tests/media/src/android/mediav2/cts/DecodeGlAccuracyTest.java
@@ -589,7 +589,7 @@
if (mUseYuvSampling) {
String message = "Device doesn't support EXT_YUV_target GL extension \n" + mTestConfig
+ mTestEnv;
- if (IS_AT_LEAST_T && IS_HDR_EDITING_SUPPORTED) {
+ if (IS_AT_LEAST_T && (IS_HDR_EDITING_SUPPORTED || IS_HLG_EDITING_SUPPORTED)) {
assertTrue(message, mEGLWindowOutSurface.getEXTYuvTargetSupported());
} else {
assumeTrue(message, mEGLWindowOutSurface.getEXTYuvTargetSupported());
diff --git a/tests/media/src/android/mediav2/cts/DecoderDynamicColorAspectTest.java b/tests/media/src/android/mediav2/cts/DecoderDynamicColorAspectTest.java
new file mode 100644
index 0000000..86a23c2
--- /dev/null
+++ b/tests/media/src/android/mediav2/cts/DecoderDynamicColorAspectTest.java
@@ -0,0 +1,374 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediav2.cts;
+
+import static android.media.codec.Flags.FLAG_DYNAMIC_COLOR_ASPECTS;
+import static android.mediav2.common.cts.CodecTestBase.SupportClass.CODEC_OPTIONAL;
+
+import static org.junit.Assert.assertTrue;
+
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.mediav2.common.cts.CodecDecoderTestBase;
+import android.mediav2.common.cts.CodecTestBase;
+import android.mediav2.common.cts.OutputManager;
+import android.os.Build;
+import android.os.Bundle;
+import android.platform.test.annotations.AppModeFull;
+import android.platform.test.annotations.RequiresFlagsEnabled;
+import android.util.Range;
+
+import androidx.test.filters.LargeTest;
+import androidx.test.filters.SdkSuppress;
+
+import com.android.compatibility.common.util.ApiTest;
+
+import org.junit.Assume;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+
+/**
+ * Test video decoders support for feature FEATURE_DynamicColorAspects.
+ * <p>
+ * The test decodes multiple clips that are configured with different color space attributes
+ * (color-primaries, color-standard and color-transfer), serially. The test seeks for output
+ * format at every dequeueOutput() call. The test expects the primaries, standard, transfer
+ * characteristics received shall match the clips attributes.
+ * <p>
+ * Certain media types cannot hold color aspects information in the bitstream (vp8/vp9/...). For
+ * these media types this information shall be configured via setParameters(Bundle). The test
+ * expects the component to relay this information correctly in the output format of each buffer
+ * to the client.
+ */
+@SdkSuppress(minSdkVersion = Build.VERSION_CODES.VANILLA_ICE_CREAM, codeName = "VanillaIceCream")
+@RequiresFlagsEnabled(FLAG_DYNAMIC_COLOR_ASPECTS)
+@RunWith(Parameterized.class)
+@AppModeFull(reason = "Instant apps cannot access the SD card")
+public class DecoderDynamicColorAspectTest extends CodecDecoderTestBase {
+ private static final String LOG_TAG = DecoderDynamicColorAspectTest.class.getSimpleName();
+ private static final String MEDIA_DIR = WorkDir.getMediaDirString();
+
+ private static class MediaAndColorSpaceAttrib {
+ public final String mTestFile;
+ public final int mColorRange;
+ public final int mColorStandard;
+ public final int mColorTransfer;
+
+ MediaAndColorSpaceAttrib(String testFile, int colorRange, int colorStandard,
+ int colorTransfer) {
+ mTestFile = testFile;
+ mColorRange = colorRange;
+ mColorStandard = colorStandard;
+ mColorTransfer = colorTransfer;
+ }
+ }
+
+ private final MediaAndColorSpaceAttrib mIncorrectColorSpaceAttrib =
+ new MediaAndColorSpaceAttrib(null, MediaFormat.COLOR_RANGE_FULL,
+ MediaFormat.COLOR_STANDARD_BT601_NTSC, MediaFormat.COLOR_TRANSFER_LINEAR);
+ private final ArrayList<MediaAndColorSpaceAttrib> mMediaAndColorSpaceAttribList;
+ private final HashMap<Range<Integer>, MediaAndColorSpaceAttrib> mRangeMediaColorSpaceMap =
+ new HashMap<>();
+ private long mMaxPts = 0;
+
+ public DecoderDynamicColorAspectTest(String decoder, String mediaType,
+ ArrayList<MediaAndColorSpaceAttrib> mediaAndColorSpaceAttrib, String allTestParams) {
+ super(decoder, mediaType, null, allTestParams);
+ mMediaAndColorSpaceAttribList = mediaAndColorSpaceAttrib;
+ }
+
+ @Parameterized.Parameters(name = "{index}_{0}_{1}")
+ public static Collection<Object[]> input() {
+ final List<Object[]> exhaustiveArgsList = Arrays.asList(new Object[][]{
+ {MediaFormat.MIMETYPE_VIDEO_AVC, new ArrayList<>(Arrays.asList(
+ new MediaAndColorSpaceAttrib(
+ "bbb_qcif_color_bt709_lr_sdr_avc.mp4",
+ MediaFormat.COLOR_RANGE_LIMITED,
+ MediaFormat.COLOR_STANDARD_BT709,
+ MediaFormat.COLOR_TRANSFER_SDR_VIDEO),
+ new MediaAndColorSpaceAttrib(
+ "bbb_qcif_color_bt601_625_fr_gamma22_avc.mp4",
+ MediaFormat.COLOR_RANGE_FULL,
+ MediaFormat.COLOR_STANDARD_BT601_PAL,
+ /* MediaFormat.COLOR_TRANSFER_GAMMA2_2 */ 4)))},
+ {MediaFormat.MIMETYPE_VIDEO_AVC, new ArrayList<>(Arrays.asList(
+ new MediaAndColorSpaceAttrib(
+ "bikes_qcif_color_bt2020_smpte2084_bt2020Ncl_lr_avc.mp4",
+ MediaFormat.COLOR_RANGE_LIMITED,
+ MediaFormat.COLOR_STANDARD_BT2020,
+ MediaFormat.COLOR_TRANSFER_ST2084),
+ new MediaAndColorSpaceAttrib(
+ "bikes_qcif_color_bt2020_smpte2086Hlg_bt2020Ncl_fr_avc.mp4",
+ MediaFormat.COLOR_RANGE_FULL,
+ MediaFormat.COLOR_STANDARD_BT2020,
+ MediaFormat.COLOR_TRANSFER_HLG)))},
+ {MediaFormat.MIMETYPE_VIDEO_HEVC, new ArrayList<>(Arrays.asList(
+ new MediaAndColorSpaceAttrib(
+ "bbb_qcif_color_bt709_lr_sdr_hevc.mp4",
+ MediaFormat.COLOR_RANGE_LIMITED,
+ MediaFormat.COLOR_STANDARD_BT709,
+ MediaFormat.COLOR_TRANSFER_SDR_VIDEO),
+ new MediaAndColorSpaceAttrib(
+ "bbb_qcif_color_bt601_625_fr_gamma22_hevc.mp4",
+ MediaFormat.COLOR_RANGE_FULL,
+ MediaFormat.COLOR_STANDARD_BT601_PAL,
+ /* MediaFormat.COLOR_TRANSFER_GAMMA2_2 */ 4)))},
+ {MediaFormat.MIMETYPE_VIDEO_HEVC, new ArrayList<>(Arrays.asList(
+ new MediaAndColorSpaceAttrib(
+ "bikes_qcif_color_bt2020_smpte2084_bt2020Ncl_lr_hevc.mp4",
+ MediaFormat.COLOR_RANGE_LIMITED,
+ MediaFormat.COLOR_STANDARD_BT2020,
+ MediaFormat.COLOR_TRANSFER_ST2084),
+ new MediaAndColorSpaceAttrib(
+ "bikes_qcif_color_bt2020_smpte2086Hlg_bt2020Ncl_fr_hevc.mp4",
+ MediaFormat.COLOR_RANGE_FULL,
+ MediaFormat.COLOR_STANDARD_BT2020,
+ MediaFormat.COLOR_TRANSFER_HLG)))},
+ {MediaFormat.MIMETYPE_VIDEO_AV1, new ArrayList<>(Arrays.asList(
+ new MediaAndColorSpaceAttrib(
+ "bbb_qcif_color_bt709_lr_sdr_av1.mp4",
+ MediaFormat.COLOR_RANGE_LIMITED,
+ MediaFormat.COLOR_STANDARD_BT709,
+ MediaFormat.COLOR_TRANSFER_SDR_VIDEO),
+ new MediaAndColorSpaceAttrib(
+ "bbb_qcif_color_bt601_625_fr_gamma22_av1.mp4",
+ MediaFormat.COLOR_RANGE_FULL,
+ MediaFormat.COLOR_STANDARD_BT601_PAL,
+ /* MediaFormat.COLOR_TRANSFER_GAMMA2_2 */ 4)))},
+ {MediaFormat.MIMETYPE_VIDEO_AV1, new ArrayList<>(Arrays.asList(
+ new MediaAndColorSpaceAttrib(
+ "bikes_qcif_color_bt2020_smpte2084_bt2020Ncl_lr_av1.mp4",
+ MediaFormat.COLOR_RANGE_LIMITED,
+ MediaFormat.COLOR_STANDARD_BT2020,
+ MediaFormat.COLOR_TRANSFER_ST2084),
+ new MediaAndColorSpaceAttrib(
+ "bikes_qcif_color_bt2020_smpte2086Hlg_bt2020Ncl_fr_av1.mp4",
+ MediaFormat.COLOR_RANGE_FULL,
+ MediaFormat.COLOR_STANDARD_BT2020,
+ MediaFormat.COLOR_TRANSFER_HLG)))},
+ {MediaFormat.MIMETYPE_VIDEO_VP8, new ArrayList<>(Arrays.asList(
+ new MediaAndColorSpaceAttrib(
+ "bbb_qcif_color_bt709_lr_sdr_vp8.webm",
+ MediaFormat.COLOR_RANGE_LIMITED,
+ MediaFormat.COLOR_STANDARD_BT709,
+ MediaFormat.COLOR_TRANSFER_SDR_VIDEO),
+ new MediaAndColorSpaceAttrib(
+ "bbb_qcif_color_bt601_625_fr_gamma22_vp8.mkv",
+ MediaFormat.COLOR_RANGE_FULL,
+ MediaFormat.COLOR_STANDARD_BT601_PAL,
+ /* MediaFormat.COLOR_TRANSFER_GAMMA2_2 */ 4)))},
+ {MediaFormat.MIMETYPE_VIDEO_VP9, new ArrayList<>(Arrays.asList(
+ new MediaAndColorSpaceAttrib(
+ "bbb_qcif_color_bt709_lr_sdr_vp9.webm",
+ MediaFormat.COLOR_RANGE_LIMITED,
+ MediaFormat.COLOR_STANDARD_BT709,
+ MediaFormat.COLOR_TRANSFER_SDR_VIDEO),
+ new MediaAndColorSpaceAttrib(
+ "bbb_qcif_color_bt601_625_fr_gamma22_vp9.mkv",
+ MediaFormat.COLOR_RANGE_FULL,
+ MediaFormat.COLOR_STANDARD_BT601_PAL,
+ /* MediaFormat.COLOR_TRANSFER_GAMMA2_2 */ 4)))},
+ {MediaFormat.MIMETYPE_VIDEO_VP9, new ArrayList<>(Arrays.asList(
+ new MediaAndColorSpaceAttrib(
+ "bikes_qcif_color_bt2020_smpte2084_bt2020Ncl_lr_vp9.mkv",
+ MediaFormat.COLOR_RANGE_LIMITED,
+ MediaFormat.COLOR_STANDARD_BT2020,
+ MediaFormat.COLOR_TRANSFER_ST2084),
+ new MediaAndColorSpaceAttrib(
+ "bikes_qcif_color_bt2020_smpte2086Hlg_bt2020Ncl_fr_vp9.mkv",
+ MediaFormat.COLOR_RANGE_FULL,
+ MediaFormat.COLOR_STANDARD_BT2020,
+ MediaFormat.COLOR_TRANSFER_HLG)))},
+ });
+ return CodecTestBase.prepareParamList(exhaustiveArgsList, false, false, true, false);
+ }
+
+ @Before
+ public void setUp() throws IOException {
+ Assume.assumeTrue("codec: " + mCodecName + " does not support"
+ + " FEATURE_DynamicColorAspects", isFeatureSupported(mCodecName, mMediaType,
+ MediaCodecInfo.CodecCapabilities.FEATURE_DynamicColorAspects));
+ ArrayList<MediaFormat> formats = new ArrayList<>();
+ for (MediaAndColorSpaceAttrib mediaAndColorSpaceAttrib : mMediaAndColorSpaceAttribList) {
+ formats.add(setUpSource(MEDIA_DIR + mediaAndColorSpaceAttrib.mTestFile));
+ mExtractor.release();
+ }
+ checkFormatSupport(mCodecName, mMediaType, false, formats, null, CODEC_OPTIONAL);
+ }
+
+ private MediaFormat createInputList(MediaFormat format, ByteBuffer buffer,
+ ArrayList<MediaCodec.BufferInfo> list, int offset, long ptsOffset,
+ ArrayList<Long> inpPtsList) {
+ if (hasCSD(format)) {
+ MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
+ bufferInfo.offset = offset;
+ bufferInfo.size = 0;
+ // For some devices with VNDK versions till Android U, sending a zero
+ // timestamp for CSD results in out of order timestamps at the output.
+ // For devices with VNDK versions > Android U, codecs are expected to
+ // handle CSD buffers with timestamp set to zero.
+ bufferInfo.presentationTimeUs = VNDK_IS_AT_MOST_U ? ptsOffset : 0;
+ bufferInfo.flags = MediaCodec.BUFFER_FLAG_CODEC_CONFIG;
+ for (int i = 0; ; i++) {
+ String csdKey = "csd-" + i;
+ if (format.containsKey(csdKey)) {
+ ByteBuffer csdBuffer = format.getByteBuffer(csdKey);
+ bufferInfo.size += csdBuffer.limit();
+ buffer.put(csdBuffer);
+ format.removeKey(csdKey);
+ } else break;
+ }
+ list.add(bufferInfo);
+ offset += bufferInfo.size;
+ }
+ while (true) {
+ MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
+ bufferInfo.size = mExtractor.readSampleData(buffer, offset);
+ if (bufferInfo.size < 0) break;
+ bufferInfo.offset = offset;
+ bufferInfo.presentationTimeUs = ptsOffset + mExtractor.getSampleTime();
+ mMaxPts = Math.max(mMaxPts, bufferInfo.presentationTimeUs);
+ int flags = mExtractor.getSampleFlags();
+ bufferInfo.flags = 0;
+ if ((flags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
+ bufferInfo.flags |= MediaCodec.BUFFER_FLAG_KEY_FRAME;
+ }
+ if (((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0)
+ && ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_PARTIAL_FRAME) == 0)) {
+ if (!inpPtsList.contains(bufferInfo.presentationTimeUs)) {
+ inpPtsList.add(bufferInfo.presentationTimeUs);
+ }
+ }
+ list.add(bufferInfo);
+ mExtractor.advance();
+ offset += bufferInfo.size;
+ }
+ buffer.clear();
+ buffer.position(offset);
+ return format;
+ }
+
+ @Override
+ protected void enqueueInput(int bufferIndex, ByteBuffer buffer, MediaCodec.BufferInfo info) {
+ for (Range range : mRangeMediaColorSpaceMap.keySet()) {
+ if (mInputCount == (int) range.getLower()) {
+ final Bundle colorAspectUpdate = new Bundle();
+ MediaAndColorSpaceAttrib mediaAndColorSpaceAttrib;
+ if (mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_VP9)
+ || mMediaType.equals(MediaFormat.MIMETYPE_VIDEO_VP8)) {
+ mediaAndColorSpaceAttrib = mRangeMediaColorSpaceMap.get(range);
+ } else {
+ mediaAndColorSpaceAttrib = mIncorrectColorSpaceAttrib;
+ }
+ colorAspectUpdate.putInt(MediaFormat.KEY_COLOR_RANGE,
+ mediaAndColorSpaceAttrib.mColorRange);
+ colorAspectUpdate.putInt(MediaFormat.KEY_COLOR_STANDARD,
+ mediaAndColorSpaceAttrib.mColorStandard);
+ colorAspectUpdate.putInt(MediaFormat.KEY_COLOR_TRANSFER,
+ mediaAndColorSpaceAttrib.mColorTransfer);
+ mCodec.setParameters(colorAspectUpdate);
+ break;
+ }
+ }
+ super.enqueueInput(bufferIndex, buffer, info);
+ }
+
+ @Override
+ protected void dequeueOutput(int bufferIndex, MediaCodec.BufferInfo info) {
+ if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
+ mSawOutputEOS = true;
+ }
+ if (info.size > 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
+ mOutputBuff.saveOutPTS(info.presentationTimeUs);
+ boolean validateAttrib = false;
+ for (Range<Integer> range : mRangeMediaColorSpaceMap.keySet()) {
+ if (range.contains(mOutputCount)) {
+ MediaAndColorSpaceAttrib mediaAndColorSpaceAttrib =
+ mRangeMediaColorSpaceMap.get(range);
+ validateColorAspects(mCodec.getOutputFormat(bufferIndex),
+ mediaAndColorSpaceAttrib.mColorRange,
+ mediaAndColorSpaceAttrib.mColorStandard,
+ mediaAndColorSpaceAttrib.mColorTransfer);
+ validateAttrib = true;
+ break;
+ }
+ }
+ assertTrue("unable to validate color space attributes for output frame id : "
+ + mOutputCount + " \n" + mTestConfig + mTestEnv, validateAttrib);
+ mOutputCount++;
+ }
+ mCodec.releaseOutputBuffer(bufferIndex, mSurface != null);
+ }
+
+ /**
+ * Check description of class {@link DecoderDynamicColorAspectTest}
+ */
+ @SdkSuppress(minSdkVersion = Build.VERSION_CODES.VANILLA_ICE_CREAM,
+ codeName = "VanillaIceCream")
+ @ApiTest(apis = {"android.media.MediaFormat#KEY_COLOR_RANGE",
+ "android.media.MediaFormat#KEY_COLOR_STANDARD",
+ "android.media.MediaFormat#KEY_COLOR_TRANSFER",
+ "android.media.MediaCodecInfo.CodecCapabilities#FEATURE_DynamicColorAspects"})
+ @LargeTest
+ @Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
+ public void testDynamicColorAspect() throws IOException, InterruptedException {
+ int totalSize = 0;
+ for (MediaAndColorSpaceAttrib mediaAndColorSpaceAttrib : mMediaAndColorSpaceAttribList) {
+ File file = new File(MEDIA_DIR + mediaAndColorSpaceAttrib.mTestFile);
+ totalSize += (int) file.length();
+ }
+ long ptsOffset = 0;
+ int buffOffset = 0;
+ ArrayList<MediaFormat> formats = new ArrayList<>();
+ ArrayList<MediaCodec.BufferInfo> list = new ArrayList<>();
+ ArrayList<Long> inpPtsList = new ArrayList<>();
+ ByteBuffer buffer = ByteBuffer.allocate(totalSize);
+ for (MediaAndColorSpaceAttrib mediaAndColorSpaceAttrib : mMediaAndColorSpaceAttribList) {
+ int lower = inpPtsList.size();
+ formats.add(createInputList(setUpSource(MEDIA_DIR + mediaAndColorSpaceAttrib.mTestFile),
+ buffer, list, buffOffset, ptsOffset, inpPtsList));
+ mExtractor.release();
+ ptsOffset = mMaxPts + 1000000L;
+ buffOffset = (list.get(list.size() - 1).offset) + (list.get(list.size() - 1).size);
+ mRangeMediaColorSpaceMap.put(new Range<>(lower, inpPtsList.size() - 1),
+ mediaAndColorSpaceAttrib);
+ }
+ mSaveToMem = false;
+ mOutputBuff = new OutputManager();
+ mCodec = MediaCodec.createByCodecName(mCodecName);
+ configureCodec(formats.get(0), false, true, false);
+ mCodec.start();
+ doWork(buffer, list);
+ queueEOS();
+ waitForAllOutputs();
+ mCodec.stop();
+ mCodec.release();
+ }
+}
diff --git a/tests/media/src/android/mediav2/cts/DecoderPushBlankBufferOnStopTest.java b/tests/media/src/android/mediav2/cts/DecoderPushBlankBufferOnStopTest.java
index 0133136..cb259d2 100644
--- a/tests/media/src/android/mediav2/cts/DecoderPushBlankBufferOnStopTest.java
+++ b/tests/media/src/android/mediav2/cts/DecoderPushBlankBufferOnStopTest.java
@@ -97,9 +97,9 @@
ComponentClass.SOFTWARE);
}
- private boolean isBlankFrame(Image image) {
+ private boolean isBlankFrame(ImageSurface.ImageAndAttributes image) {
int threshold = 0;
- for (Image.Plane plane : image.getPlanes()) {
+ for (Image.Plane plane : image.mImage.getPlanes()) {
ByteBuffer buffer = plane.getBuffer();
while (buffer.hasRemaining()) {
int pixelValue = buffer.get() & 0xFF;
@@ -125,7 +125,7 @@
assertTrue("Codec: " + mCodecName + " doesn't support format: " + format,
areFormatsSupported(mCodecName, mMediaType, formatList));
mImageSurface = new ImageSurface();
- setUpSurface(getWidth(format), getHeight(format), PixelFormat.RGBX_8888, 1,
+ setUpSurface(getWidth(format), getHeight(format), PixelFormat.RGBX_8888, 1, 0,
this::isBlankFrame);
mSurface = mImageSurface.getSurface();
mCodec = MediaCodec.createByCodecName(mCodecName);
@@ -147,7 +147,7 @@
assertNotNull("Blank buffers are not received by image surface for format: "
+ format + "\n" + mTestConfig + mTestEnv, img);
assertTrue("received image is not a blank buffer \n" + mTestConfig + mTestEnv,
- isBlankFrame(img));
+ isBlankFrame(new ImageSurface.ImageAndAttributes(img, 0)));
}
}
mCodec.release();
diff --git a/tests/mediapc/common/src/android/mediapc/cts/common/CodecMetrics.java b/tests/mediapc/common/src/android/mediapc/cts/common/CodecMetrics.java
new file mode 100644
index 0000000..7000be5
--- /dev/null
+++ b/tests/mediapc/common/src/android/mediapc/cts/common/CodecMetrics.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediapc.cts.common;
+
+import com.google.auto.value.AutoValue;
+
+@AutoValue
+public abstract class CodecMetrics {
+
+ public static CodecMetrics getMetrics(double fps, double fdps) {
+ return new AutoValue_CodecMetrics(fps, fdps);
+ }
+
+ public abstract double fps();
+ public abstract double fdps();
+}
diff --git a/tests/mediapc/common/src/android/mediapc/cts/common/HdrDisplayRequirement.java b/tests/mediapc/common/src/android/mediapc/cts/common/HdrDisplayRequirement.java
new file mode 100644
index 0000000..6a67446
--- /dev/null
+++ b/tests/mediapc/common/src/android/mediapc/cts/common/HdrDisplayRequirement.java
@@ -0,0 +1,58 @@
+/*
+ * Copyright 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.mediapc.cts.common;
+
+import android.os.Build;
+
+public class HdrDisplayRequirement extends Requirement {
+ private static final String TAG = HdrDisplayRequirement.class.getSimpleName();
+
+ private HdrDisplayRequirement(String id, RequiredMeasurement<?>... reqs) {
+ super(id, reqs);
+ }
+
+ public void setIsHdr(boolean isHdr) {
+ this.setMeasuredValue(RequirementConstants.IS_HDR, isHdr);
+ }
+
+ /** Set the display luminance in nits. */
+ public void setDisplayLuminance(float luminance) {
+ this.setMeasuredValue(RequirementConstants.DISPLAY_LUMINANCE_NITS, luminance);
+ }
+
+ /**
+ * [7.1.1.3/H-3-1] MUST have a HDR display supporting at least 1000 nits
+ * average.
+ */
+ public static HdrDisplayRequirement createR7_1_1_3__H_3_1() {
+ RequiredMeasurement<Boolean> isHdr = RequiredMeasurement
+ .<Boolean>builder()
+ .setId(RequirementConstants.IS_HDR)
+ .setPredicate(RequirementConstants.BOOLEAN_EQ)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
+ .build();
+ RequiredMeasurement<Float> luminance = RequiredMeasurement
+ .<Float>builder()
+ .setId(RequirementConstants.DISPLAY_LUMINANCE_NITS)
+ .setPredicate(RequirementConstants.FLOAT_GTE)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 1000.0f)
+ .build();
+
+ return new HdrDisplayRequirement(
+ RequirementConstants.R7_1_1_3__H_3_1, isHdr, luminance);
+ }
+}
diff --git a/tests/mediapc/common/src/android/mediapc/cts/common/PerformanceClassEvaluator.java b/tests/mediapc/common/src/android/mediapc/cts/common/PerformanceClassEvaluator.java
index 4b82d12..57091b4 100644
--- a/tests/mediapc/common/src/android/mediapc/cts/common/PerformanceClassEvaluator.java
+++ b/tests/mediapc/common/src/android/mediapc/cts/common/PerformanceClassEvaluator.java
@@ -82,19 +82,21 @@
.<Double>builder()
.setId(RequirementConstants.API_NATIVE_LATENCY)
.setPredicate(RequirementConstants.DOUBLE_LTE)
- .addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 80.0)
- .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 80.0)
- .addRequiredValue(Build.VERSION_CODES.S, 100.0)
.addRequiredValue(Build.VERSION_CODES.R, 100.0)
+ .addRequiredValue(Build.VERSION_CODES.S, 100.0)
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 80.0)
+ .addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 80.0)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 80.0)
.build();
RequiredMeasurement<Double> apiJavaLatency = RequiredMeasurement
.<Double>builder()
.setId(RequirementConstants.API_JAVA_LATENCY)
.setPredicate(RequirementConstants.DOUBLE_LTE)
- .addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 80.0)
- .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 80.0)
- .addRequiredValue(Build.VERSION_CODES.S, 100.0)
.addRequiredValue(Build.VERSION_CODES.R, 100.0)
+ .addRequiredValue(Build.VERSION_CODES.S, 100.0)
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 80.0)
+ .addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 80.0)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 80.0)
.build();
return new AudioTap2ToneLatencyRequirement(
@@ -127,6 +129,7 @@
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
return new Camera240FpsRequirement(RequirementConstants.R7_5__H_1_9, requirement);
@@ -174,12 +177,14 @@
.setId(RequirementConstants.REAR_CAMERA2_EXTENSION_NIGHT_SUPPORTED)
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
RequiredMeasurement<Boolean> frontCamera2NightRequirement = RequiredMeasurement
.<Boolean>builder()
.setId(RequirementConstants.FRONT_CAMERA2_EXTENSION_NIGHT_SUPPORTED)
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
RequiredMeasurement<Boolean> rearCameraXNightRequirement = RequiredMeasurement
@@ -187,12 +192,14 @@
.setId(RequirementConstants.REAR_CAMERAX_EXTENSION_NIGHT_SUPPORTED)
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
RequiredMeasurement<Boolean> frontCameraXNightRequirement = RequiredMeasurement
.<Boolean>builder()
.setId(RequirementConstants.FRONT_CAMERAX_EXTENSION_NIGHT_SUPPORTED)
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
return new CameraExtensionRequirement(RequirementConstants.R7_5__H_1_15,
@@ -230,6 +237,7 @@
.addRequiredValue(Build.VERSION_CODES.S, 1000.0f)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 1000.0f)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 1000.0f)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 1000.0f)
.build();
RequiredMeasurement<Float> frontJpegLatency = RequiredMeasurement
.<Float>builder()
@@ -239,6 +247,7 @@
.addRequiredValue(Build.VERSION_CODES.S, 1000.0f)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 1000.0f)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 1000.0f)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 1000.0f)
.build();
return new CameraLatencyRequirement(RequirementConstants.R7_5__H_1_5,
@@ -259,6 +268,7 @@
.addRequiredValue(Build.VERSION_CODES.S, 600.0f)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 500.0f)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 500.0f)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 500.0f)
.build();
RequiredMeasurement<Float> frontLaunchLatency = RequiredMeasurement
.<Float>builder()
@@ -268,6 +278,7 @@
.addRequiredValue(Build.VERSION_CODES.S, 600.0f)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 500.0f)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 500.0f)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 500.0f)
.build();
return new CameraLatencyRequirement(RequirementConstants.R7_5__H_1_6,
@@ -299,6 +310,7 @@
.addRequiredValue(Build.VERSION_CODES.S, true)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
return new CameraRawRequirement(RequirementConstants.R7_5__H_1_8, requirement);
@@ -336,6 +348,7 @@
.addRequiredValue(Build.VERSION_CODES.S, TIMESTAMP_REALTIME)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, TIMESTAMP_REALTIME)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, TIMESTAMP_REALTIME)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, TIMESTAMP_REALTIME)
.build();
RequiredMeasurement<Integer> frontTimestampSource = RequiredMeasurement
.<Integer>builder()
@@ -345,6 +358,7 @@
.addRequiredValue(Build.VERSION_CODES.S, TIMESTAMP_REALTIME)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, TIMESTAMP_REALTIME)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, TIMESTAMP_REALTIME)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, TIMESTAMP_REALTIME)
.build();
return new CameraTimestampSourceRequirement(RequirementConstants.R7_5__H_1_4,
@@ -475,6 +489,10 @@
this.setMeasuredValue(RequirementConstants.CONCURRENT_FPS, achievedFps);
}
+ public void setFrameDropsPerSecond(double fdps) {
+ this.setMeasuredValue(RequirementConstants.FRAMES_DROPPED_PER_SECOND, fdps);
+ }
+
// copied from android.mediapc.cts.getReqMinConcurrentInstances due to build issues on aosp
public static int getReqMinConcurrentInstances(int performanceClass, String mimeType1,
String mimeType2, int resolution) {
@@ -685,6 +703,26 @@
}
/**
+ * [2.2.7.1/5.1/H-1-2] MUST support 6 instances of hardware video decoder sessions (AVC,
+ * HEVC, VP9, AV1 or later) in any codec combination running concurrently with 3 sessions
+ * at 1080p resolution@30 fps and 3 sessions at 4k(U) resolution@30fps. For all sessions,
+ * there MUST NOT be more than 1 frame dropped per second. AV1 codecs are only required to
+ * support 1080p resolution, but are still required to support 6 instances at 1080p30fps.
+ */
+ public static ConcurrentCodecRequirement create5_1__H_1_2_4k_drop() {
+ RequiredMeasurement<Double> frameDropsPerSec = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.FRAMES_DROPPED_PER_SECOND)
+ .setPredicate(RequirementConstants.DOUBLE_LTE)
+ // MUST NOT drop more than 1 frame per second
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 1.0)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.FRAMES_DROPPED_PER_SECOND,
+ frameDropsPerSec);
+
+ }
+
+ /**
* [2.2.7.1/5.1/H-1-3] MUST advertise the maximum number of hardware video encoder
* sessions that can be run concurrently in any codec combination via the
* CodecCapabilities.getMaxSupportedInstances() and VideoCapabilities
@@ -790,6 +828,27 @@
}
/**
+ * [5.1/H-1-4] MUST support 6 instances of 8-bit (SDR) hardware video encoder sessions
+ * (AVC, HEVC, VP9, AV1 or later) in any codec combination running concurrently with 4
+ * sessions at 1080p resolution@30 fps and 2 sessions at 4k resolution@30fps, unless AV1.
+ * For all sessions, there MUST NOT be more than 1 frame dropped per second. AV1 codecs are
+ * only required to support 1080p resolution, but are still required to support 6 instances
+ * at 1080p30fps.
+ */
+ public static ConcurrentCodecRequirement create5_1__H_1_4_4k_drop() {
+ RequiredMeasurement<Double> frameDropsPerSec = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.FRAMES_DROPPED_PER_SECOND)
+ .setPredicate(RequirementConstants.DOUBLE_LTE)
+ // MUST NOT drop more than 1 frame per second
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 1.0)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.FRAMES_DROPPED_PER_SECOND,
+ frameDropsPerSec);
+
+ }
+
+ /**
* [2.2.7.1/5.1/H-1-5] MUST advertise the maximum number of hardware video encoder and
* decoder sessions that can be run concurrently in any codec combination via the
* CodecCapabilities.getMaxSupportedInstances() and VideoCapabilities
@@ -904,6 +963,24 @@
}
/**
+ * [2.2.7.1/5.1/H-1-6] Support 6 instances of hardware video decoder and hardware video
+ * encoder sessions (AVC, HEVC, VP9 or AV1) in any codec combination running concurrently
+ * at 720p(R,S) /1080p(T) /4k(U) @30fps resolution.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_6_4k_drop() {
+ RequiredMeasurement<Double> frameDropsPerSec = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.FRAMES_DROPPED_PER_SECOND)
+ .setPredicate(RequirementConstants.DOUBLE_LTE)
+ // MUST NOT drop more than 1 frame per second
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 1.0)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.FRAMES_DROPPED_PER_SECOND,
+ frameDropsPerSec);
+
+ }
+
+ /**
* [2.2.7.1/5.1/H-1-9] Support 2 instances of secure hardware video decoder sessions
* (AVC, HEVC, VP9 or AV1) in any codec combination running concurrently at 1080p
* resolution@30fps.
@@ -935,6 +1012,25 @@
}
/**
+ * [2.2.7.1/5.1/H-1-2] MUST support 6 instances of hardware video decoder sessions (AVC,
+ * HEVC, VP9, AV1 or later) in any codec combination running concurrently with 3 sessions
+ * at 1080p resolution@30 fps and 3 sessions at 4k(U) resolution@30fps. For all sessions,
+ * there MUST NOT be more than 1 frame dropped per second. AV1 codecs are only required to
+ * support 1080p resolution, but are still required to support 6 instances at 1080p30fps.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_9_4k_drop() {
+ RequiredMeasurement<Double> frameDropsPerSec = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.FRAMES_DROPPED_PER_SECOND)
+ .setPredicate(RequirementConstants.DOUBLE_LTE)
+ // MUST NOT drop more than 1 frame per second
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 1.0)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.FRAMES_DROPPED_PER_SECOND,
+ frameDropsPerSec);
+ }
+
+ /**
* [2.2.7.1/5.1/H-1-10] Support 3 instances of non-secure hardware video decoder sessions
* together with 1 instance of secure hardware video decoder session (4 instances total)
* (AVC, HEVC, VP9 or AV1) in any codec combination running concurrently at 1080p
@@ -969,6 +1065,25 @@
}
/**
+ * [2.2.7.1/5.1/H-1-2] MUST support 6 instances of hardware video decoder sessions (AVC,
+ * HEVC, VP9, AV1 or later) in any codec combination running concurrently with 3 sessions
+ * at 1080p resolution@30 fps and 3 sessions at 4k(U) resolution@30fps. For all sessions,
+ * there MUST NOT be more than 1 frame dropped per second. AV1 codecs are only required to
+ * support 1080p resolution, but are still required to support 6 instances at 1080p30fps.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_10_4k_drop() {
+ RequiredMeasurement<Double> frameDropsPerSec = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.FRAMES_DROPPED_PER_SECOND)
+ .setPredicate(RequirementConstants.DOUBLE_LTE)
+ // MUST NOT drop more than 1 frame per second
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 1.0)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.FRAMES_DROPPED_PER_SECOND,
+ frameDropsPerSec);
+ }
+
+ /**
* [2.2.7.1/5.1/H-1-19] Support 3 instances of hardware video decoder and hardware video
* encoder sessions (AVC, HEVC, VP9 or AV1) in any codec combination running concurrently
* at 4k(U) @30fps resolution for 10-bit with at most one encoder session.
@@ -986,6 +1101,23 @@
return create4k(RequirementConstants.R5_1__H_1_19, reqConcurrentFps);
}
+
+ /**
+ * [2.2.7.1/5.1/H-1-19] Support 3 instances of hardware video decoder and hardware video
+ * encoder sessions (AVC, HEVC, VP9 or AV1) in any codec combination running concurrently
+ * at 4k(U) @30fps resolution for 10-bit with at most one encoder session.
+ */
+ public static ConcurrentCodecRequirement createR5_1__H_1_19_4k_drop() {
+ RequiredMeasurement<Double> frameDropsPerSec = RequiredMeasurement.<Double>builder()
+ .setId(RequirementConstants.FRAMES_DROPPED_PER_SECOND)
+ .setPredicate(RequirementConstants.DOUBLE_LTE)
+ // MUST NOT drop more than 1 frame per second
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 1.0)
+ .build();
+
+ return new ConcurrentCodecRequirement(RequirementConstants.FRAMES_DROPPED_PER_SECOND,
+ frameDropsPerSec);
+ }
}
public static class ConcurrentRearFrontRequirement extends Requirement {
@@ -1010,6 +1142,7 @@
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
return new ConcurrentRearFrontRequirement(RequirementConstants.R7_5__H_1_11,
@@ -1092,12 +1225,14 @@
.setId(RequirementConstants.REAR_CAMERA_DYNAMIC_TENBITS_SUPPORTED)
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
RequiredMeasurement<Boolean> frontDynamicRangeTenBitsRequirement = RequiredMeasurement
.<Boolean>builder()
.setId(RequirementConstants.FRONT_CAMERA_DYNAMIC_TENBITS_SUPPORTED)
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
return new DynamicRangeTenBitsRequirement(RequirementConstants.R7_5__H_1_16,
rearDynamicRangeTenBitsRequirement, frontDynamicRangeTenBitsRequirement);
@@ -1163,12 +1298,14 @@
.setId(RequirementConstants.REAR_CAMERA_FACE_DETECTION_SUPPORTED)
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
RequiredMeasurement<Boolean> frontFaceDetectionRequirement = RequiredMeasurement
.<Boolean>builder()
.setId(RequirementConstants.FRONT_CAMERA_FACE_DETECTION_SUPPORTED)
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
return new FaceDetectionRequirement(RequirementConstants.R7_5__H_1_17,
rearFaceDetectionRequirement, frontFaceDetectionRequirement);
@@ -1201,6 +1338,7 @@
.addRequiredValue(Build.VERSION_CODES.R, 100.0)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 125.0)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 150.0)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 150.0)
.build();
return new FileSystemRequirement(RequirementConstants.R8_2__H_1_1, filesystem_io_rate);
@@ -1228,6 +1366,7 @@
.setPredicate(RequirementConstants.DOUBLE_GTE)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 10.0)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 10.0)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 10.0)
.build();
return new FileSystemRequirement(RequirementConstants.R8_2__H_1_2, filesystem_io_rate);
@@ -1257,6 +1396,7 @@
.addRequiredValue(Build.VERSION_CODES.R, 200.0)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 250.0)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 250.0)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 250.0)
.build();
return new FileSystemRequirement(RequirementConstants.R8_2__H_1_3, filesystem_io_rate);
@@ -1285,6 +1425,7 @@
.addRequiredValue(Build.VERSION_CODES.R, 25.0)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 40.0)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 100.0)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 100.0)
.build();
return new FileSystemRequirement(RequirementConstants.R8_2__H_1_4, filesystem_io_rate);
@@ -1551,6 +1692,7 @@
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
return new LogicalMultiCameraRequirement(RequirementConstants.R7_5__H_1_13,
@@ -1562,6 +1704,12 @@
public static class MemoryRequirement extends Requirement {
private static final String TAG = MemoryRequirement.class.getSimpleName();
+ // Media performance requires 6 GB minimum RAM, but keeping the following to
+ // 5 GB as activityManager.getMemoryInfo() typically returns around 5.4 GB on a 6 GB device,
+ // so these values are a bit lower than the required value stated on the Android CDD.
+ private static final long RS_REQUIRED_MEMORY_MB = Utils.MIN_MEMORY_PERF_CLASS_CANDIDATE_MB;
+ private static final long TUV_REQUIRED_MEMORY_MB = Utils.MIN_MEMORY_PERF_CLASS_T_MB;
+
private MemoryRequirement(String id, RequiredMeasurement<?> ... reqs) {
super(id, reqs);
}
@@ -1578,10 +1726,7 @@
.<Long>builder()
.setId(RequirementConstants.PHYSICAL_MEMORY)
.setPredicate(RequirementConstants.LONG_GTE)
- // Media performance requires 6 GB minimum RAM, but keeping the following to
- // 5 GB as activityManager.getMemoryInfo() returns around 5.4 GB on a 6 GB
- // device.
- .addRequiredValue(Build.VERSION_CODES.R, 5L * 1024L)
+ .addRequiredValue(Build.VERSION_CODES.R, RS_REQUIRED_MEMORY_MB)
.build();
return new MemoryRequirement(RequirementConstants.R7_6_1__H_1_1, physical_memory);
@@ -1595,13 +1740,10 @@
.<Long>builder()
.setId(RequirementConstants.PHYSICAL_MEMORY)
.setPredicate(RequirementConstants.LONG_GTE)
- // Media performance requires 6/8 GB minimum RAM, but keeping the following to
- // 5/7 GB as activityManager.getMemoryInfo() returns around 5.4 GB on a 6 GB
- // device.
- .addRequiredValue(Build.VERSION_CODES.S, 5L * 1024L)
- .addRequiredValue(Build.VERSION_CODES.TIRAMISU, 7L * 1024L)
- .addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 7L * 1024L)
- .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 7L * 1024L)
+ .addRequiredValue(Build.VERSION_CODES.S, RS_REQUIRED_MEMORY_MB)
+ .addRequiredValue(Build.VERSION_CODES.TIRAMISU, TUV_REQUIRED_MEMORY_MB)
+ .addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, TUV_REQUIRED_MEMORY_MB)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, TUV_REQUIRED_MEMORY_MB)
.build();
return new MemoryRequirement(RequirementConstants.R7_6_1__H_2_1, physical_memory);
@@ -1632,6 +1774,7 @@
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
return new PreviewStabilizationRequirement(RequirementConstants.R7_5__H_1_12,
@@ -1734,6 +1877,7 @@
.addRequiredValue(Build.VERSION_CODES.S, true)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
RequiredMeasurement<Long> cameraResolution = RequiredMeasurement
@@ -1749,6 +1893,9 @@
.addRequiredValue(
Build.VERSION_CODES.UPSIDE_DOWN_CAKE,
MIN_FRONT_SENSOR_S_PERF_CLASS_RESOLUTION)
+ .addRequiredValue(
+ Build.VERSION_CODES.VANILLA_ICE_CREAM,
+ MIN_FRONT_SENSOR_S_PERF_CLASS_RESOLUTION)
.build();
RequiredMeasurement<Boolean> videoSizeReqSatisfied = RequiredMeasurement
@@ -1759,6 +1906,7 @@
.addRequiredValue(Build.VERSION_CODES.S, true)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
RequiredMeasurement<Double> videoFps = RequiredMeasurement
@@ -1769,6 +1917,7 @@
.addRequiredValue(Build.VERSION_CODES.S, 29.9)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, 29.9)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, 29.9)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, 29.9)
.build();
return new PrimaryCameraRequirement(RequirementConstants.R7_5__H_1_2,
@@ -1922,6 +2071,7 @@
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
RequiredMeasurement<Boolean> frontRequirement = RequiredMeasurement
.<Boolean>builder()
@@ -1929,6 +2079,7 @@
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
return new StreamUseCaseRequirement(RequirementConstants.R7_5__H_1_14,
@@ -1971,11 +2122,24 @@
this.setMeasuredValue(RequirementConstants.AV1_ENC_BITRATE, bitrate);
}
+ public void setHlgEditingSupportedReq(boolean HlgEditingSupported) {
+ this.setMeasuredValue(RequirementConstants.HLG_EDITING, HlgEditingSupported);
+ }
+
+ public void setPortraitResolutionSupportreq(boolean isPortraitSupported) {
+ this.setMeasuredValue(RequirementConstants.PORTRAIT_RESOLUTION, isPortraitSupported);
+ }
+
public void setColorFormatSupportReq(boolean colorFormatSupported) {
this.setMeasuredValue(RequirementConstants.RGBA_1010102_COLOR_FORMAT_REQ,
colorFormatSupported);
}
+ public void setDynamicColorAspectsSupportReq(boolean dynamicColorAspectsSupported) {
+ this.setMeasuredValue(RequirementConstants.DYNAMIC_COLOR_ASPECTS,
+ dynamicColorAspectsSupported);
+ }
+
/**
* [2.2.7.1/5.1/H-1-15] Must have at least 1 HW video decoder supporting 4K60
*/
@@ -2073,6 +2237,53 @@
}
/**
+ * [5.1/H-1-20] MUST support the Feature_HlgEditing feature for all hardware AV1 and HEVC
+ * encoders present on the device at 4K resolution or the largest Camera-supported
+ * resolution, whichever is less.
+ */
+ public static VideoCodecRequirement createR5_1__H_1_20() {
+ RequiredMeasurement<Boolean> requirement = RequiredMeasurement
+ .<Boolean>builder()
+ .setId(RequirementConstants.HLG_EDITING)
+ .setPredicate(RequirementConstants.BOOLEAN_EQ)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
+ .build();
+
+ return new VideoCodecRequirement(RequirementConstants.R5_1__H_1_20, requirement);
+ }
+
+ /**
+ * [5.1/H-1-21] MUST support FEATURE_DynamicColorAspects for all hardware video decoders
+ * (AVC, HEVC, VP9, AV1 or later).
+ */
+ public static VideoCodecRequirement createR5_1__H_1_21() {
+ RequiredMeasurement<Boolean> requirement = RequiredMeasurement
+ .<Boolean>builder()
+ .setId(RequirementConstants.DYNAMIC_COLOR_ASPECTS)
+ .setPredicate(RequirementConstants.BOOLEAN_EQ)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
+ .build();
+
+ return new VideoCodecRequirement(RequirementConstants.R5_1__H_1_21, requirement);
+ }
+
+ /**
+ * [5.12/H-1-22] MUST support both landscape and portrait resolution for all hardware
+ * codecs. AV1 codecs are limited to only 1080p resolution while others should support
+ * 4k or camera preferred resolution (whichever is less)
+ */
+ public static VideoCodecRequirement createR5_1__H_1_22() {
+ RequiredMeasurement<Boolean> requirement = RequiredMeasurement
+ .<Boolean>builder()
+ .setId(RequirementConstants.PORTRAIT_RESOLUTION)
+ .setPredicate(RequirementConstants.BOOLEAN_EQ)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
+ .build();
+
+ return new VideoCodecRequirement(RequirementConstants.R5_1__H_1_22, requirement);
+ }
+
+ /**
* [5.12/H-1-2] MUST support RGBA_1010102 color format for all hardware AV1 and HEVC
* encoders present on the device.
*/
@@ -2118,6 +2329,7 @@
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
RequiredMeasurement<Boolean> frontRequirement = RequiredMeasurement
.<Boolean>builder()
@@ -2125,6 +2337,7 @@
.setPredicate(RequirementConstants.BOOLEAN_EQ)
.addRequiredValue(Build.VERSION_CODES.TIRAMISU, true)
.addRequiredValue(Build.VERSION_CODES.UPSIDE_DOWN_CAKE, true)
+ .addRequiredValue(Build.VERSION_CODES.VANILLA_ICE_CREAM, true)
.build();
return new UltraWideZoomRatioRequirement(RequirementConstants.R7_5__H_1_10,
@@ -2167,6 +2380,14 @@
return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_2_4k());
}
+ public ConcurrentCodecRequirement addR5_1__H_1_2_4k_drop() {
+ return this.addRequirement(ConcurrentCodecRequirement.create5_1__H_1_2_4k_drop());
+ }
+
+ public ConcurrentCodecRequirement addR5_1__H_1_4_4k_drop() {
+ return this.addRequirement(ConcurrentCodecRequirement.create5_1__H_1_4_4k_drop());
+ }
+
public ConcurrentCodecRequirement addR5_1__H_1_3_720p(String mimeType1, String mimeType2,
int resolution) {
return this.addRequirement(
@@ -2221,6 +2442,10 @@
return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_6_4k());
}
+ public ConcurrentCodecRequirement addR5_1__H_1_6_4k_drop() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_6_4k_drop());
+ }
+
public CodecInitLatencyRequirement addR5_1__H_1_7(String mediaType) {
return this.addRequirement(CodecInitLatencyRequirement.createR5_1__H_1_7(mediaType));
}
@@ -2237,6 +2462,10 @@
return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_9_4k());
}
+ public ConcurrentCodecRequirement addR5_1__H_1_9_4k_drop() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_9_4k_drop());
+ }
+
public ConcurrentCodecRequirement addR5_1__H_1_10_1080p() {
return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_10_1080p());
}
@@ -2244,6 +2473,11 @@
public ConcurrentCodecRequirement addR5_1__H_1_10_4k() {
return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_10_4k());
}
+
+ public ConcurrentCodecRequirement addR5_1__H_1_10_4k_drop() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_10_4k_drop());
+ }
+
public SecureCodecRequirement addR5_1__H_1_11() {
return this.addRequirement(SecureCodecRequirement.createR5_1__H_1_11());
}
@@ -2285,6 +2519,24 @@
return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_19());
}
+ public ConcurrentCodecRequirement addR5_1__H_1_19_4k_drop() {
+ return this.addRequirement(ConcurrentCodecRequirement.createR5_1__H_1_19_4k_drop());
+ }
+
+ /* Adds requirement 5.1/H-1-20 */
+ public VideoCodecRequirement addR5_1__H_1_20() {
+ return this.addRequirement(VideoCodecRequirement.createR5_1__H_1_20());
+ }
+
+ /* Adds requirement 5.1/H-1-21 */
+ public VideoCodecRequirement addR5_1__H_1_21() {
+ return this.addRequirement(VideoCodecRequirement.createR5_1__H_1_21());
+ }
+
+ /* Adds requirement 5.1/H-1-22 */
+ public VideoCodecRequirement addR5_1__H_1_22() {
+ return this.addRequirement(VideoCodecRequirement.createR5_1__H_1_22());
+ }
public FrameDropRequirement addR5_3__H_1_1_R() {
return this.addRequirement(FrameDropRequirement.createR5_3__H_1_1_R());
@@ -2417,6 +2669,11 @@
return this.<DensityRequirement>addRequirement(DensityRequirement.createR7_1_1_3__H_2_1());
}
+ public HdrDisplayRequirement addR7_1_1_3__H_3_1() {
+ return this.<HdrDisplayRequirement>addRequirement(
+ HdrDisplayRequirement.createR7_1_1_3__H_3_1());
+ }
+
public MemoryRequirement addR7_6_1__H_2_1() {
return this.<MemoryRequirement>addRequirement(MemoryRequirement.createR7_6_1__H_2_1());
}
diff --git a/tests/mediapc/common/src/android/mediapc/cts/common/RequirementConstants.java b/tests/mediapc/common/src/android/mediapc/cts/common/RequirementConstants.java
index 6c3e99a..0af31ac 100644
--- a/tests/mediapc/common/src/android/mediapc/cts/common/RequirementConstants.java
+++ b/tests/mediapc/common/src/android/mediapc/cts/common/RequirementConstants.java
@@ -49,6 +49,9 @@
public static final String R5_1__H_1_17 = "r5_1__h_1_17"; // 5.1/H-1-17
public static final String R5_1__H_1_18 = "r5_1__h_1_18"; // 5.1/H-1-18
public static final String R5_1__H_1_19 = "r5_1__h_1_19"; // 5.1/H-1-19
+ public static final String R5_1__H_1_20 = "r5_1__h_1_20"; // 5.1/H-1-20
+ public static final String R5_1__H_1_21 = "r5_1__h_1_21"; // 5.1/H-1-21
+ public static final String R5_1__H_1_22 = "r5_1__h_1_22"; // 5.1/H-1-22
public static final String R5_3__H_1_1 = "r5_3__h_1_1"; // 5.3/H-1-1
public static final String R5_3__H_1_2 = "r5_3__h_1_2"; // 5.3/H-1-2
@@ -86,6 +89,7 @@
public static final String R7_6_1__H_1_1 = "r7_6_1__h_1_1"; // 7.6.1/H-1-1
public static final String R7_1_1_1__H_2_1 = "r7_1_1_1__h_2_1"; // 7.1.1.1/H-2-1
public static final String R7_1_1_3__H_2_1 = "r7_1_1_3__h_2_1"; // 7.1.1.3/H-2-1
+ public static final String R7_1_1_3__H_3_1 = "r7_1_1_3__H_3_1"; // 7.1.1.3/H-3-1
public static final String R7_6_1__H_2_1 = "r7_6_1__h_2_1"; // 7.6.1/H-2-1
public static final String R8_2__H_1_1 = "r8_2__h_1_1"; // 8.2/H-1-1
@@ -114,12 +118,15 @@
public static final String CONCURRENT_SESSIONS = "concurrent_sessions";
public static final String DISPLAY_DENSITY = "display_density_dpi";
+ public static final String DISPLAY_LUMINANCE_NITS = "display_luminance_nits";
+ public static final String DYNAMIC_COLOR_ASPECTS = "dynamic_color_aspects";
public static final String EXT_YUV_EXTENSION = "ext_yuv_target_supported";
public static final String FILESYSTEM_IO_RATE = "filesystem_io_rate_mbps";
public static final String FRAME_RATE = "frame_rate";
public static final String FRAMES_DROPPED = "frame_drops_per_30sec";
+ public static final String FRAMES_DROPPED_PER_SECOND = "frame_drops_per_sec";
public static final String FRONT_CAMERA_DYNAMIC_TENBITS_SUPPORTED =
"front_camera_dynamic_tenbits_supported";
@@ -140,6 +147,10 @@
public static final String FRONT_CAMERA2_EXTENSION_NIGHT_SUPPORTED =
"front_camera2_extension_night_supported";
+ public static final String HLG_EDITING = "hlg_editing";
+
+ public static final String IS_HDR = "is_hdr";
+
public static final String LONG_RESOLUTION = "long_resolution_pixels";
public static final String NUM_4k_HW_DEC = "number_4k_hw_decoders";
@@ -149,6 +160,8 @@
public static final String PHYSICAL_MEMORY = "physical_memory_mb";
+ public static final String PORTRAIT_RESOLUTION = "portrait_resolution";
+
public static final String PRIMARY_CAMERA_AVAILABLE = "primary_camera_available";
public static final String PRIMARY_CAMERA_VIDEO_FPS =
"primary_camera_video_fps";
@@ -202,6 +215,7 @@
public static final BiPredicate<Double, Double> DOUBLE_LTE = RequirementConstants.lte();
public static final BiPredicate<Double, Double> DOUBLE_EQ = RequirementConstants.eq();
+ public static final BiPredicate<Float, Float> FLOAT_GTE = RequirementConstants.gte();
public static final BiPredicate<Float, Float> FLOAT_LTE = RequirementConstants.lte();
public static final BiPredicate<Boolean, Boolean> BOOLEAN_EQ = RequirementConstants.eq();
diff --git a/tests/mediapc/common/src/android/mediapc/cts/common/Utils.java b/tests/mediapc/common/src/android/mediapc/cts/common/Utils.java
index 3f4ec3b..d0cedd2 100644
--- a/tests/mediapc/common/src/android/mediapc/cts/common/Utils.java
+++ b/tests/mediapc/common/src/android/mediapc/cts/common/Utils.java
@@ -59,13 +59,15 @@
public static final int MIN_DISPLAY_LONG_CANDIDATE_PIXELS = 1920;
public static final int DISPLAY_SHORT_PIXELS;
public static final int MIN_DISPLAY_SHORT_CANDIDATE_PIXELS = 1080;
+ public static final boolean IS_HDR;
+ public static final float HDR_DISPLAY_AVERAGE_LUMINANCE;
public static final long TOTAL_MEMORY_MB;
// Media performance requires 6 GB minimum RAM, but keeping the following to 5 GB
// as activityManager.getMemoryInfo() returns around 5.4 GB on a 6 GB device.
public static final long MIN_MEMORY_PERF_CLASS_CANDIDATE_MB = 5 * 1024;
// Android T Media performance requires 8 GB min RAM, so setting lower as above
- public static final long MIN_MEMORY_PERF_CLASS_T_MB = 7 * 1024;
+ public static final long MIN_MEMORY_PERF_CLASS_T_MB = 6800;
private static final boolean MEETS_AVC_CODEC_PRECONDITIONS;
static {
@@ -127,6 +129,10 @@
DISPLAY_DPI = Math.max((int) (maxDiagonalPixels / diagonalInch),
context.getResources().getConfiguration().densityDpi);
+ IS_HDR = defaultDisplay.isHdr();
+ HDR_DISPLAY_AVERAGE_LUMINANCE =
+ defaultDisplay.getHdrCapabilities().getDesiredMaxAverageLuminance();
+
ActivityManager activityManager = context.getSystemService(ActivityManager.class);
ActivityManager.MemoryInfo memoryInfo = new ActivityManager.MemoryInfo();
activityManager.getMemoryInfo(memoryInfo);
@@ -136,6 +142,8 @@
DISPLAY_LONG_PIXELS = 0;
DISPLAY_SHORT_PIXELS = 0;
TOTAL_MEMORY_MB = 0;
+ IS_HDR = false;
+ HDR_DISPLAY_AVERAGE_LUMINANCE = 0;
}
MEETS_AVC_CODEC_PRECONDITIONS = meetsAvcCodecPreconditions();
}
diff --git a/tests/mediapc/src/android/mediapc/cts/CodecTestBase.java b/tests/mediapc/src/android/mediapc/cts/CodecTestBase.java
index 85d1a4e..812f6f0 100644
--- a/tests/mediapc/src/android/mediapc/cts/CodecTestBase.java
+++ b/tests/mediapc/src/android/mediapc/cts/CodecTestBase.java
@@ -18,6 +18,7 @@
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
+import static android.mediapc.cts.common.CodecMetrics.getMetrics;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
@@ -34,6 +35,7 @@
import android.media.MediaFormat;
import android.media.NotProvisionedException;
import android.media.ResourceBusyException;
+import android.mediapc.cts.common.CodecMetrics;
import android.os.Build;
import android.util.Log;
import android.util.Pair;
@@ -213,6 +215,8 @@
boolean mSignalEOSWithLastFrame;
int mInputCount;
int mOutputCount;
+ double mFrameDrops;
+ long mLastPresentationTimeUs = -1;
long mPrevOutputPts;
boolean mSignalledOutFormatChanged;
MediaFormat mOutFormat;
@@ -251,6 +255,16 @@
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mSawOutputEOS = true;
}
+
+ long expectedFrameDurationUs = 1000000 / 30;
+ long presentationTimeUs = info.presentationTimeUs;
+ if (mLastPresentationTimeUs != -1) {
+ if (presentationTimeUs > mLastPresentationTimeUs + expectedFrameDurationUs) {
+ mFrameDrops++;
+ }
+ }
+ mLastPresentationTimeUs = presentationTimeUs;
+
int outputCount = mOutputCount;
// handle output count prior to releasing the buffer as that can take time
if (info.size > 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
@@ -300,6 +314,8 @@
mSignalEOSWithLastFrame = signalEOSWithLastFrame;
mInputCount = 0;
mOutputCount = 0;
+ mFrameDrops = 0;
+ mLastPresentationTimeUs = -1;
mPrevOutputPts = Long.MIN_VALUE;
mSignalledOutFormatChanged = false;
}
@@ -900,7 +916,7 @@
* The following class decodes the given testFile using decoder created by the given decoderName
* in surface mode(uses PersistentInputSurface) and returns the achieved fps for decoding.
*/
-class Decode extends CodecDecoderTestBase implements Callable<Double> {
+class Decode extends CodecDecoderTestBase implements Callable<CodecMetrics> {
private static final String LOG_TAG = Decode.class.getSimpleName();
final String mDecoderName;
@@ -941,13 +957,13 @@
}
}
- public Double doDecode() throws Exception {
+ public CodecMetrics doDecode() throws Exception {
MediaFormat format = setUpSource(mTestFile);
ArrayList<MediaFormat> formats = new ArrayList<>();
formats.add(format);
// If the decoder doesn't support the formats, then return 0 to indicate that decode failed
if (!areFormatsSupported(mDecoderName, formats)) {
- return (Double) 0.0;
+ return getMetrics(0.0, 0.0);
}
mCodec = MediaCodec.createByCodecName(mDecoderName);
mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
@@ -958,7 +974,7 @@
} catch (Exception e) {
Log.e(LOG_TAG, "Stopping the test because codec.start() failed.", e);
mCodec.release();
- return (Double) 0.0;
+ return getMetrics(0.0, 0.0);
}
// capture timestamps at receipt of output buffers
@@ -981,17 +997,17 @@
((mEndTimeMillis - mStartTimeMillis) / 1000.0);
Log.d(LOG_TAG, "Decode Mime: " + mMime + " Decoder: " + mDecoderName +
" Achieved fps: " + fps);
- return fps;
+ return getMetrics(fps, mFrameDrops / 30);
}
@Override
- public Double call() throws Exception {
+ public CodecMetrics call() throws Exception {
try {
return doDecode();
} catch (Exception e) {
Log.d(LOG_TAG, "Decode Mime: " + mMime + " Decoder: " + mDecoderName
+ " Failed due to: " + e);
- return -1.0;
+ return getMetrics(-1.0, 0.0);
}
}
}
@@ -1017,7 +1033,7 @@
* The following class encodes a YUV video file to a given mimeType using encoder created by the
* given encoderName and configuring to 30fps format.
*/
-class Encode extends CodecEncoderTestBase implements Callable<Double> {
+class Encode extends CodecEncoderTestBase implements Callable<CodecMetrics> {
private static final String LOG_TAG = Encode.class.getSimpleName();
private final String mEncoderName;
@@ -1069,7 +1085,7 @@
}
- public Double doEncode() throws Exception {
+ public CodecMetrics doEncode() throws Exception {
MediaFormat format = setUpFormat();
mWidth = format.getInteger(MediaFormat.KEY_WIDTH);
mHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
@@ -1091,11 +1107,11 @@
((mEndTimeMillis - mStartTimeMillis) / 1000.0);
Log.d(LOG_TAG, "Encode Mime: " + mMime + " Encoder: " + mEncoderName +
" Achieved fps: " + fps);
- return fps;
+ return getMetrics(fps, mFrameDrops / 30);
}
@Override
- public Double call() throws Exception {
+ public CodecMetrics call() throws Exception {
return doEncode();
}
}
diff --git a/tests/mediapc/src/android/mediapc/cts/CodecTranscoderTestBase.java b/tests/mediapc/src/android/mediapc/cts/CodecTranscoderTestBase.java
index 00bb02e..5470c3b 100644
--- a/tests/mediapc/src/android/mediapc/cts/CodecTranscoderTestBase.java
+++ b/tests/mediapc/src/android/mediapc/cts/CodecTranscoderTestBase.java
@@ -16,6 +16,7 @@
package android.mediapc.cts;
+import static android.mediapc.cts.common.CodecMetrics.getMetrics;
import static android.mediav2.common.cts.CodecTestBase.PROFILE_HLG_MAP;
import static android.mediapc.cts.CodecTestBase.areFormatsSupported;
@@ -26,6 +27,7 @@
import android.media.MediaCodecInfo;
import android.media.MediaExtractor;
import android.media.MediaFormat;
+import android.mediapc.cts.common.CodecMetrics;
import android.util.Log;
import android.util.Pair;
import android.view.Surface;
@@ -44,6 +46,8 @@
String mTestFile;
int mBitrate;
int mFrameRate;
+ double mFrameDrops;
+ long mLastPresentationTimeUs = -1;
boolean mUseHighBitDepth;
MediaExtractor mExtractor;
int mMaxBFrames;
@@ -115,6 +119,8 @@
mDecInputCount = 0;
mDecOutputCount = 0;
mEncOutputCount = 0;
+ mFrameDrops = 0;
+ mLastPresentationTimeUs = -1;
}
void configureCodec(MediaFormat decFormat, MediaFormat encFormat, boolean isAsync,
@@ -172,6 +178,14 @@
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mSawDecOutputEOS = true;
}
+ long expectedFrameDurationUs = 1000000 / mFrameRate;
+ long presentationTimeUs = info.presentationTimeUs;
+ if (mLastPresentationTimeUs != -1) {
+ if (presentationTimeUs > mLastPresentationTimeUs + expectedFrameDurationUs) {
+ mFrameDrops++;
+ }
+ }
+ mLastPresentationTimeUs = presentationTimeUs;
if (info.size > 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
mDecOutputCount++;
}
@@ -182,6 +196,14 @@
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mSawEncOutputEOS = true;
}
+ long expectedFrameDurationUs = 1000000 / mFrameRate;
+ long presentationTimeUs = info.presentationTimeUs;
+ if (mLastPresentationTimeUs != -1) {
+ if (presentationTimeUs > mLastPresentationTimeUs + expectedFrameDurationUs) {
+ mFrameDrops++;
+ }
+ }
+ mLastPresentationTimeUs = presentationTimeUs;
if (info.size > 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
mEncOutputCount++;
}
@@ -371,7 +393,7 @@
/**
* The following class transcodes the given testFile and returns the achieved fps for transcoding.
*/
-class Transcode extends CodecTranscoderTestBase implements Callable<Double> {
+class Transcode extends CodecTranscoderTestBase implements Callable<CodecMetrics> {
private static final String LOG_TAG = Transcode.class.getSimpleName();
final String mDecoderName;
@@ -386,13 +408,13 @@
mIsAsync = isAsync;
}
- public Double doTranscode() throws Exception {
+ public CodecMetrics doTranscode() throws Exception {
MediaFormat decoderFormat = setUpSource(mTestFile);
ArrayList<MediaFormat> formats = new ArrayList<>();
formats.add(decoderFormat);
// If the decoder doesn't support the formats, then return 0 to indicate that decode failed
if (!areFormatsSupported(mDecoderName, formats)) {
- return (Double) 0.0;
+ return getMetrics(0.0, 0.0);
}
mDecoder = MediaCodec.createByCodecName(mDecoderName);
@@ -416,17 +438,17 @@
double fps = mEncOutputCount / ((end - start) / 1000.0);
Log.d(LOG_TAG, "Mime: " + mMime + " Decoder: " + mDecoderName + " Encoder: " +
mEncoderName + " Achieved fps: " + fps);
- return fps;
+ return getMetrics(fps, mFrameDrops / 30);
}
@Override
- public Double call() throws Exception {
+ public CodecMetrics call() throws Exception {
try {
return doTranscode();
} catch (Exception e) {
Log.d(LOG_TAG, "Mime: " + mMime + " Decoder: " + mDecoderName + " Encoder: "
+ mEncoderName + " Failed due to: " + e);
- return -1.0;
+ return getMetrics(-1.0, 0.0);
}
}
}
@@ -501,7 +523,7 @@
}
@Override
- public Double doTranscode() throws Exception {
+ public CodecMetrics doTranscode() throws Exception {
MediaFormat decoderFormat = setUpSource(mTestFile);
mDecoder = MediaCodec.createByCodecName(mDecoderName);
MediaFormat encoderFormat = setUpEncoderFormat(decoderFormat);
@@ -525,7 +547,7 @@
Log.d(LOG_TAG,
"Mime: " + mMime + " Decoder: " + mDecoderName + " Encoder: " + mEncoderName
+ " Achieved fps: " + fps);
- return fps;
+ return getMetrics(fps, mFrameDrops / 30);
}
@Override
diff --git a/tests/mediapc/src/android/mediapc/cts/MultiCodecPerfTestBase.java b/tests/mediapc/src/android/mediapc/cts/MultiCodecPerfTestBase.java
index 71e414f..9b1b89d 100644
--- a/tests/mediapc/src/android/mediapc/cts/MultiCodecPerfTestBase.java
+++ b/tests/mediapc/src/android/mediapc/cts/MultiCodecPerfTestBase.java
@@ -19,6 +19,7 @@
import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_SecurePlayback;
import static android.mediapc.cts.CodecDecoderTestBase.WIDEVINE_UUID;
import static android.mediapc.cts.CodecTestBase.selectHardwareCodecs;
+import static android.mediapc.cts.common.CodecMetrics.getMetrics;
import static org.junit.Assert.assertTrue;
@@ -29,6 +30,7 @@
import android.media.MediaDrm;
import android.media.MediaFormat;
import android.media.UnsupportedSchemeException;
+import android.mediapc.cts.common.CodecMetrics;
import android.mediapc.cts.common.Utils;
import android.net.ConnectivityManager;
import android.net.NetworkCapabilities;
@@ -42,9 +44,15 @@
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.Future;
public class MultiCodecPerfTestBase {
private static final String LOG_TAG = MultiCodecPerfTestBase.class.getSimpleName();
@@ -165,6 +173,24 @@
return selectHardwareCodecs(mime, formatsList, null, isEncoder, allCodecs);
}
+ protected CodecMetrics invokeWithThread(int maxInstances, Collection<? extends
+ Callable<CodecMetrics>> testList) throws ExecutionException, InterruptedException {
+ double measuredParams = 0;
+ double framesDroppedPerSecond = 0;
+ ExecutorService pool = Executors.newFixedThreadPool(maxInstances);
+ try {
+ List<Future<CodecMetrics>> resultList = pool.invokeAll(testList);
+ for (Future<CodecMetrics> result : resultList) {
+ CodecMetrics metrics = result.get();
+ measuredParams += metrics.fps();
+ framesDroppedPerSecond += metrics.fdps();
+ }
+ } finally {
+ pool.shutdown();
+ }
+ return getMetrics(measuredParams, framesDroppedPerSecond);
+ }
+
// Returns the max number of 30 fps instances that the given list of mimeCodecPairs
// supports. It also checks that the each codec supports a PerformancePoint that covers
// required number of 30 fps instances.
diff --git a/tests/mediapc/src/android/mediapc/cts/MultiDecoderPairPerfTest.java b/tests/mediapc/src/android/mediapc/cts/MultiDecoderPairPerfTest.java
index 6c80d5e..cec4e18 100644
--- a/tests/mediapc/src/android/mediapc/cts/MultiDecoderPairPerfTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/MultiDecoderPairPerfTest.java
@@ -21,6 +21,7 @@
import static android.mediapc.cts.CodecTestBase.mediaTypePrefix;
import android.media.MediaFormat;
+import android.mediapc.cts.common.CodecMetrics;
import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
import android.util.Pair;
@@ -40,9 +41,6 @@
import java.util.Collection;
import java.util.List;
import java.util.Map;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
/**
* The following test class calculates the maximum number of concurrent decode sessions that it can
@@ -230,6 +228,7 @@
int maxInstances = checkAndGetMaxSupportedInstancesForCodecCombinations(height, width,
mimeDecoderPairs, false, requiredMinInstances);
double achievedFrameRate = 0.0;
+ double frameDropsPerSec = 0.0;
boolean meetsPreconditions = (isFirstSecure || isSecondSecure) ?
meetsSecureDecodePreconditions() : true;
// secure test should not reach this point if secure codec doesn't support PP
@@ -278,27 +277,30 @@
testList.add(new Decode(mSecondPair.first, testFile, mSecondPair.second,
mIsAsync, isSecure));
}
- ExecutorService pool = Executors.newFixedThreadPool(maxInstances);
- List<Future<Double>> resultList = pool.invokeAll(testList);
- for (Future<Double> result : resultList) {
- achievedFrameRate += result.get();
- }
- pool.shutdown();
+ CodecMetrics result = invokeWithThread(maxInstances, testList);
+ achievedFrameRate = result.fps();
+ frameDropsPerSec = result.fdps();
}
PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
if (secureWithUnsecure) {
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_10;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_10_drop;
if (height > 1080) {
r5_1__H_1_10 = pce.addR5_1__H_1_10_4k();
+ r5_1__H_1_10_drop = pce.addR5_1__H_1_10_4k_drop();
+ r5_1__H_1_10_drop.setFrameDropsPerSecond(frameDropsPerSec);
} else {
r5_1__H_1_10 = pce.addR5_1__H_1_10_1080p();
}
r5_1__H_1_10.setConcurrentFps(achievedFrameRate);
} else if (bothSecure) {
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_9;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_9_drop;
if (height > 1080) {
r5_1__H_1_9 = pce.addR5_1__H_1_9_4k();
+ r5_1__H_1_9_drop = pce.addR5_1__H_1_9_4k_drop();
+ r5_1__H_1_9_drop.setFrameDropsPerSecond(frameDropsPerSec);
} else {
r5_1__H_1_9 = pce.addR5_1__H_1_9_1080p();
}
@@ -306,11 +308,14 @@
} else {
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_1;
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_2;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_2_drop;
if (height > 1080) {
r5_1__H_1_1 = pce.addR5_1__H_1_1_4k();
r5_1__H_1_2 = pce.addR5_1__H_1_2_4k();
+ r5_1__H_1_2_drop = pce.addR5_1__H_1_2_4k_drop();
r5_1__H_1_1.setConcurrentInstances(maxInstances);
r5_1__H_1_2.setConcurrentFps(achievedFrameRate);
+ r5_1__H_1_2_drop.setFrameDropsPerSecond(frameDropsPerSec);
} else if (height == 1080) {
r5_1__H_1_1 = pce.addR5_1__H_1_1_1080p();
r5_1__H_1_2 = pce.addR5_1__H_1_2_1080p();
diff --git a/tests/mediapc/src/android/mediapc/cts/MultiDecoderPerfTest.java b/tests/mediapc/src/android/mediapc/cts/MultiDecoderPerfTest.java
index ecc499c..4ac59ac 100644
--- a/tests/mediapc/src/android/mediapc/cts/MultiDecoderPerfTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/MultiDecoderPerfTest.java
@@ -21,6 +21,7 @@
import static android.mediapc.cts.CodecTestBase.mediaTypePrefix;
import android.media.MediaFormat;
+import android.mediapc.cts.common.CodecMetrics;
import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
import android.util.Pair;
@@ -40,9 +41,6 @@
import java.util.Collection;
import java.util.List;
import java.util.Map;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
/**
* The following test class validates the maximum number of concurrent decode sessions that it can
@@ -175,10 +173,10 @@
int maxInstances = checkAndGetMaxSupportedInstancesForCodecCombinations(height, width,
mimeDecoderPairs, false, requiredMinInstances);
double achievedFrameRate = 0.0;
+ double frameDropsPerSec = 0.0;
boolean meetsPreconditions = isSecure ? meetsSecureDecodePreconditions() : true;
if (meetsPreconditions && maxInstances >= requiredMinInstances) {
- ExecutorService pool = Executors.newFixedThreadPool(maxInstances);
List<Decode> testList = new ArrayList<>();
if (height > 1080 && !isSecure) {
int halfMaxInstances = maxInstances / 2;
@@ -193,19 +191,20 @@
testList.add(new Decode(mMime, mTestFile, mDecoderName, mIsAsync, isSecure));
}
}
- List<Future<Double>> resultList = pool.invokeAll(testList);
- for (Future<Double> result : resultList) {
- achievedFrameRate += result.get();
- }
- pool.shutdown();
+ CodecMetrics result = invokeWithThread(maxInstances, testList);
+ achievedFrameRate = result.fps();
+ frameDropsPerSec = result.fdps();
}
PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
if (isSecure) {
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_9;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_9_drop;
if(height > 1080){
r5_1__H_1_9 = pce.addR5_1__H_1_9_4k();
+ r5_1__H_1_9_drop = pce.addR5_1__H_1_9_4k_drop();
r5_1__H_1_9.setConcurrentFps(achievedFrameRate);
+ r5_1__H_1_9_drop.setFrameDropsPerSecond(frameDropsPerSec);
} else {
r5_1__H_1_9 = pce.addR5_1__H_1_9_1080p();
r5_1__H_1_9.setConcurrentFps(achievedFrameRate);
@@ -213,11 +212,14 @@
} else {
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_1;
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_2;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_2_drop;
if (height > 1080) {
r5_1__H_1_1 = pce.addR5_1__H_1_1_4k();
r5_1__H_1_2 = pce.addR5_1__H_1_2_4k();
+ r5_1__H_1_2_drop = pce.addR5_1__H_1_2_4k_drop();
r5_1__H_1_1.setConcurrentInstances(maxInstances);
r5_1__H_1_2.setConcurrentFps(achievedFrameRate);
+ r5_1__H_1_2_drop.setFrameDropsPerSecond(frameDropsPerSec);
} else if (height == 1080) {
r5_1__H_1_1 = pce.addR5_1__H_1_1_1080p();
r5_1__H_1_2 = pce.addR5_1__H_1_2_1080p();
diff --git a/tests/mediapc/src/android/mediapc/cts/MultiEncoderPairPerfTest.java b/tests/mediapc/src/android/mediapc/cts/MultiEncoderPairPerfTest.java
index 715242b..8d289b9 100644
--- a/tests/mediapc/src/android/mediapc/cts/MultiEncoderPairPerfTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/MultiEncoderPairPerfTest.java
@@ -21,6 +21,7 @@
import static android.mediapc.cts.CodecTestBase.mediaTypePrefix;
import android.media.MediaFormat;
+import android.mediapc.cts.common.CodecMetrics;
import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
import android.util.Pair;
@@ -39,9 +40,6 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
/**
* The following test class calculates the maximum number of concurrent encode sessions that it can
@@ -154,6 +152,7 @@
int maxInstances = checkAndGetMaxSupportedInstancesForCodecCombinations(height, width,
mimeEncoderPairs, true, requiredMinInstances);
double achievedFrameRate = 0.0;
+ double frameDropsPerSec = 0.0;
boolean firstPairAV1 = mFirstPair.first.equals(MediaFormat.MIMETYPE_VIDEO_AV1);
boolean secondPairAV1 = mSecondPair.first.equals(MediaFormat.MIMETYPE_VIDEO_AV1);
if (maxInstances >= requiredMinInstances) {
@@ -161,7 +160,6 @@
int firstPairInstances = maxInstances - secondPairInstances;
int secondPairInstances1080p = 2 * secondPairInstances / 3;
int firstPairInstances1080p = 2 * firstPairInstances / 3;
- ExecutorService pool = Executors.newFixedThreadPool(maxInstances);
List<Encode> testList = new ArrayList<>();
if (height > 1080) {
for (int i = 0; i < firstPairInstances1080p; i++) {
@@ -193,21 +191,22 @@
width, 30, bitrate));
}
}
- List<Future<Double>> resultList = pool.invokeAll(testList);
- for (Future<Double> result : resultList) {
- achievedFrameRate += result.get();
- }
- pool.shutdown();
+ CodecMetrics result = invokeWithThread(maxInstances, testList);
+ achievedFrameRate = result.fps();
+ frameDropsPerSec = result.fdps();
}
PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_3;
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_4;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_4_drop;
// Achieved frame rate is not compared as this test runs in byte buffer mode.
if (height > 1080) {
r5_1__H_1_3 = pce.addR5_1__H_1_3_4k();
r5_1__H_1_4 = pce.addR5_1__H_1_4_4k();
+ r5_1__H_1_4_drop = pce.addR5_1__H_1_4_4k_drop();
r5_1__H_1_3.setConcurrentInstances(maxInstances);
r5_1__H_1_4.setConcurrentFps(achievedFrameRate);
+ r5_1__H_1_4_drop.setFrameDropsPerSecond(frameDropsPerSec);
} else if (height == 1080) {
r5_1__H_1_3 = pce.addR5_1__H_1_3_1080p();
r5_1__H_1_4 = pce.addR5_1__H_1_4_1080p();
diff --git a/tests/mediapc/src/android/mediapc/cts/MultiEncoderPerfTest.java b/tests/mediapc/src/android/mediapc/cts/MultiEncoderPerfTest.java
index 460cf9d..b7da39b 100644
--- a/tests/mediapc/src/android/mediapc/cts/MultiEncoderPerfTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/MultiEncoderPerfTest.java
@@ -21,6 +21,7 @@
import static android.mediapc.cts.CodecTestBase.mediaTypePrefix;
import android.media.MediaFormat;
+import android.mediapc.cts.common.CodecMetrics;
import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
import android.util.Pair;
@@ -39,9 +40,6 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
/**
* The following test class validates the maximum number of concurrent encode sessions that it can
@@ -135,9 +133,9 @@
int maxInstances = checkAndGetMaxSupportedInstancesForCodecCombinations(height, width,
mimeEncoderPairs, true, requiredMinInstances);
double achievedFrameRate = 0.0;
+ double frameDropsPerSec = 0.0;
boolean hasAV1 = mMime.equals(MediaFormat.MIMETYPE_VIDEO_AV1);
if (maxInstances >= requiredMinInstances) {
- ExecutorService pool = Executors.newFixedThreadPool(maxInstances);
List<Encode> testList = new ArrayList<>();
if (height > 1080) {
int instances4k = maxInstances / 3;
@@ -161,21 +159,22 @@
new Encode(mMime, mEncoderName, mIsAsync, height, width, 30, bitrate));
}
}
- List<Future<Double>> resultList = pool.invokeAll(testList);
- for (Future<Double> result : resultList) {
- achievedFrameRate += result.get();
- }
- pool.shutdown();
+ CodecMetrics result = invokeWithThread(maxInstances, testList);
+ achievedFrameRate = result.fps();
+ frameDropsPerSec = result.fdps();
}
PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_3;
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_4;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_4_drop;
// Achieved frame rate is not compared as this test runs in byte buffer mode.
if (height > 1080) {
r5_1__H_1_3 = pce.addR5_1__H_1_3_4k();
r5_1__H_1_4 = pce.addR5_1__H_1_4_4k();
+ r5_1__H_1_4_drop = pce.addR5_1__H_1_4_4k_drop();
r5_1__H_1_3.setConcurrentInstances(maxInstances);
r5_1__H_1_4.setConcurrentFps(achievedFrameRate);
+ r5_1__H_1_4_drop.setFrameDropsPerSecond(frameDropsPerSec);
} else if (height == 1080) {
r5_1__H_1_3 = pce.addR5_1__H_1_3_1080p();
r5_1__H_1_4 = pce.addR5_1__H_1_4_1080p();
diff --git a/tests/mediapc/src/android/mediapc/cts/MultiTranscoderPerfTest.java b/tests/mediapc/src/android/mediapc/cts/MultiTranscoderPerfTest.java
index ed13021..ee4cfb5 100644
--- a/tests/mediapc/src/android/mediapc/cts/MultiTranscoderPerfTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/MultiTranscoderPerfTest.java
@@ -23,6 +23,7 @@
import static org.junit.Assert.assertTrue;
import android.media.MediaFormat;
+import android.mediapc.cts.common.CodecMetrics;
import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
import android.util.Pair;
@@ -188,6 +189,7 @@
checkAndGetMaxSupportedInstancesForCodecCombinations(height, width, mimeCodecPairs,
false, requiredMinInstances);
double achievedFrameRate = 0.0;
+ double frameDropsPerSec = 0.0;
if (false) {
// if we had a reason not to even try running the tests, we would report 0s.
achievedFrameRate = 0.0;
@@ -215,7 +217,7 @@
useHighBitDepth));
}
}
- List<Future<Double>> decodeResultList = null;
+ List<Future<CodecMetrics>> decodeResultList = null;
if (maxInstances % 2 == 1) {
List<DecodeToSurface> decodeList = new ArrayList<>();
mActivityRule.getActivity().waitTillSurfaceIsCreated();
@@ -228,23 +230,25 @@
mIsAsync));
decodeResultList = pool.invokeAll(decodeList);
}
- List<Future<Double>> transcodeResultList = pool.invokeAll(transcodeList);
- for (Future<Double> result : transcodeResultList) {
- Double fps = result.get();
+ List<Future<CodecMetrics>> transcodeResultList = pool.invokeAll(transcodeList);
+ for (Future<CodecMetrics> result : transcodeResultList) {
+ Double fps = result.get().fps();
if (fps < 0) {
achievedFrameRate = -1;
} else if (achievedFrameRate >= 0) {
achievedFrameRate += fps;
}
+ frameDropsPerSec += result.get().fdps();
}
if (decodeResultList != null) {
- for (Future<Double> result : decodeResultList) {
- Double fps = result.get();
+ for (Future<CodecMetrics> result : decodeResultList) {
+ Double fps = result.get().fps();
if (fps < 0) {
achievedFrameRate = -1;
} else if (achievedFrameRate >= 0) {
achievedFrameRate += fps;
}
+ frameDropsPerSec += result.get().fdps();
}
}
}
@@ -257,16 +261,22 @@
PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_5;
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_6;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_6_drop;
PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_19;
+ PerformanceClassEvaluator.ConcurrentCodecRequirement r5_1__H_1_19_drop;
if (height > 1080) {
if (useHighBitDepth) {
r5_1__H_1_19 = pce.addR5_1__H_1_19();
+ r5_1__H_1_19_drop = pce.addR5_1__H_1_19_4k_drop();
r5_1__H_1_19.setConcurrentFps(achievedFrameRate);
+ r5_1__H_1_19_drop.setFrameDropsPerSecond(frameDropsPerSec);
} else {
r5_1__H_1_5 = pce.addR5_1__H_1_5_4k();
r5_1__H_1_6 = pce.addR5_1__H_1_6_4k();
+ r5_1__H_1_6_drop = pce.addR5_1__H_1_6_4k_drop();
r5_1__H_1_5.setConcurrentInstances(maxInstances);
r5_1__H_1_6.setConcurrentFps(achievedFrameRate);
+ r5_1__H_1_6_drop.setFrameDropsPerSecond(frameDropsPerSec);
}
} else if (height == 1080) {
r5_1__H_1_5 = pce.addR5_1__H_1_5_1080p();
diff --git a/tests/mediapc/src/android/mediapc/cts/PerformanceClassTest.java b/tests/mediapc/src/android/mediapc/cts/PerformanceClassTest.java
index 27685fb..9970b80 100644
--- a/tests/mediapc/src/android/mediapc/cts/PerformanceClassTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/PerformanceClassTest.java
@@ -27,6 +27,7 @@
import android.media.MediaDrm;
import android.media.MediaFormat;
import android.media.UnsupportedSchemeException;
+import android.mediapc.cts.common.HdrDisplayRequirement;
import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
import android.util.Log;
@@ -205,4 +206,16 @@
pce.submitAndCheck();
}
+
+ @Test
+ @CddTest(requirements = {"2.2.7.3/7.1.1.3/H-3-1"})
+ public void testDisplayHdr() {
+ PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
+ HdrDisplayRequirement r7_1_1_3__h_3_1 = pce.addR7_1_1_3__H_3_1();
+
+ r7_1_1_3__h_3_1.setIsHdr(Utils.IS_HDR);
+ r7_1_1_3__h_3_1.setDisplayLuminance(Utils.HDR_DISPLAY_AVERAGE_LUMINANCE);
+
+ pce.submitAndCheck();
+ }
}
diff --git a/tests/mediapc/src/android/mediapc/cts/VideoCodecRequirementsTest.java b/tests/mediapc/src/android/mediapc/cts/VideoCodecRequirementsTest.java
index fc5a7fa..e627ff1 100644
--- a/tests/mediapc/src/android/mediapc/cts/VideoCodecRequirementsTest.java
+++ b/tests/mediapc/src/android/mediapc/cts/VideoCodecRequirementsTest.java
@@ -17,6 +17,8 @@
package android.mediapc.cts;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_Format32bitABGR2101010;
+import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_DynamicColorAspects;
+import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_HlgEditing;
import static android.media.MediaCodecInfo.CodecProfileLevel.AV1Level51;
import static android.media.MediaCodecInfo.CodecProfileLevel.AV1ProfileMain10;
import static android.media.MediaCodecInfo.CodecProfileLevel.AV1ProfileMain8;
@@ -28,26 +30,39 @@
import static android.mediapc.cts.CodecTestBase.selectCodecs;
import static android.mediapc.cts.CodecTestBase.selectHardwareCodecs;
+import static androidx.test.platform.app.InstrumentationRegistry.getInstrumentation;
+
import static org.junit.Assert.assertTrue;
import static java.lang.Math.max;
+import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.ImageDecoder;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CameraManager;
+import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecInfo.VideoCapabilities.PerformancePoint;
import android.media.MediaFormat;
+import android.media.MediaRecorder;
+import android.media.codec.Flags;
import android.mediapc.cts.common.PerformanceClassEvaluator;
import android.mediapc.cts.common.Utils;
+import android.platform.test.annotations.RequiresFlagsEnabled;
import android.util.Log;
import android.util.Range;
+import android.util.Size;
+import androidx.annotation.Nullable;
import androidx.test.filters.LargeTest;
import androidx.test.filters.SmallTest;
import com.android.compatibility.common.util.CddTest;
+import com.android.compatibility.common.util.MediaUtils;
import org.junit.Before;
import org.junit.Rule;
@@ -57,6 +72,8 @@
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
@@ -110,6 +127,27 @@
return codecSet;
}
+ @Nullable
+ private static Size getMaxSupportedRecordingSize() throws CameraAccessException {
+ if (!MediaUtils.hasCamera()) return null;
+
+ Context context = getInstrumentation().getTargetContext();
+ CameraManager cm = context.getSystemService(CameraManager.class);
+ String[] cameraIdList = cm.getCameraIdList();
+
+ for (String cameraId : cameraIdList) {
+ CameraCharacteristics characteristics = cm.getCameraCharacteristics(cameraId);
+ StreamConfigurationMap map =
+ characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ if (map != null) {
+ return Arrays.stream(map.getOutputSizes(MediaRecorder.class))
+ .max(Comparator.comparingInt(size -> size.getWidth() * size.getHeight()))
+ .orElse(null);
+ }
+ }
+ return null;
+ }
+
/**
* Validates AV1 hardware decoder is present and supports: Main 10, Level 4.1, Film Grain
*/
@@ -126,7 +164,7 @@
boolean oneCodecDecoding = false;
for (String codec : av1HwDecoders) {
Decode decode = new Decode(MIMETYPE_VIDEO_AV1, FILE_AV1_REQ_SUPPORT, codec, true);
- double achievedRate = decode.doDecode();
+ double achievedRate = decode.doDecode().fps();
if (achievedRate > 0) {
oneCodecDecoding = true;
}
@@ -261,6 +299,136 @@
}
/**
+ * MUST support the Feature_HlgEditing feature for all hardware AV1 and HEVC
+ * encoders present on the device at 4K resolution or the largest Camera-supported
+ * resolution, whichever is less.
+ */
+ @SmallTest
+ @RequiresFlagsEnabled(Flags.FLAG_HLG_EDITING)
+ @Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_SMALL_TEST_MS)
+ @CddTest(requirement = "5.1/H-1-20")
+ public void testHlgEditingSupport() throws CameraAccessException {
+ final String[] mediaTypes =
+ {MediaFormat.MIMETYPE_VIDEO_HEVC, MIMETYPE_VIDEO_AV1};
+
+ boolean isFeatureSupported = true;
+ Size size4k = new Size(3840, 2160);
+ int frameSize4k = size4k.getWidth() * size4k.getHeight();
+ Size maxRecordingSize = getMaxSupportedRecordingSize();
+ if (maxRecordingSize == null) {
+ maxRecordingSize = size4k;
+ } else {
+ int frameSize = maxRecordingSize.getWidth() * maxRecordingSize.getHeight();
+ maxRecordingSize = frameSize < frameSize4k ? maxRecordingSize : size4k;
+ }
+
+ outerloop:
+ for (String mediaType : mediaTypes) {
+ ArrayList<String> hwEncoders = selectHardwareCodecs(mediaType, null, null, true);
+ for (String encoder : hwEncoders) {
+ MediaFormat format =
+ MediaFormat.createVideoFormat(mediaType, maxRecordingSize.getWidth(),
+ maxRecordingSize.getHeight());
+ format.setFeatureEnabled(FEATURE_HlgEditing, true);
+ if (!MediaUtils.supports(encoder, format)) {
+ isFeatureSupported = false;
+ break outerloop;
+ }
+ }
+ }
+
+ PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
+ PerformanceClassEvaluator.VideoCodecRequirement HlgEditingSupportReq =
+ pce.addR5_1__H_1_20();
+ HlgEditingSupportReq.setHlgEditingSupportedReq(isFeatureSupported);
+
+ pce.submitAndCheck();
+ }
+
+ /**
+ * [5.1/H-1-21] MUST support FEATURE_DynamicColorAspects for all hardware video decoders
+ * (AVC, HEVC, VP9, AV1 or later)
+ */
+ @SmallTest
+ @RequiresFlagsEnabled(Flags.FLAG_DYNAMIC_COLOR_ASPECTS)
+ @Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_SMALL_TEST_MS)
+ @CddTest(requirement = "5.1/H-1-21")
+ public void testDynamicColorAspectFeature() {
+ final String[] mediaTypes =
+ {MediaFormat.MIMETYPE_VIDEO_AVC, MediaFormat.MIMETYPE_VIDEO_HEVC,
+ MediaFormat.MIMETYPE_VIDEO_VP9, MediaFormat.MIMETYPE_VIDEO_AV1};
+
+ boolean isSupported = true;
+ for (String mediaType : mediaTypes) {
+ isSupported = selectHardwareCodecs(mediaType, null, null, false).stream()
+ .allMatch(decoder -> {
+ CodecCapabilities caps =
+ getCodecInfo(decoder).getCapabilitiesForType(mediaType);
+ return caps != null && caps.isFeatureSupported(FEATURE_DynamicColorAspects);
+ });
+ if (!isSupported) {
+ break;
+ }
+ }
+
+ PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
+ PerformanceClassEvaluator.VideoCodecRequirement DynamicColorAspectsReq =
+ pce.addR5_1__H_1_21();
+ DynamicColorAspectsReq.setDynamicColorAspectsSupportReq(isSupported);
+
+ pce.submitAndCheck();
+ }
+
+ /**
+ * MUST support portrait resolution for all hardware codecs. AV1 codecs are limited to only
+ * 1080p resolution while others should support 4k or camera preferred resolution
+ * (whichever is less)
+ */
+ @SmallTest
+ @Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_SMALL_TEST_MS)
+ @CddTest(requirement = "5.12/H-1-22")
+ public void testPortraitResolutionSupport() throws CameraAccessException {
+ final String[] mediaTypes =
+ {MediaFormat.MIMETYPE_VIDEO_AVC, MediaFormat.MIMETYPE_VIDEO_HEVC,
+ MediaFormat.MIMETYPE_VIDEO_AV1, MediaFormat.MIMETYPE_VIDEO_VP9};
+
+ boolean isSupported = true;
+ Size requiredSize, maxRequiredSize, maxRecordingSize;
+
+ outerloop:
+ for (String mediaType : mediaTypes) {
+ maxRequiredSize = mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AV1)
+ ? new Size(1920, 1080) : new Size(3840, 2160);
+ maxRecordingSize = getMaxSupportedRecordingSize();
+ if (maxRecordingSize == null) {
+ requiredSize = maxRequiredSize;
+ } else {
+ int maxRequiredFrameSize = maxRequiredSize.getWidth() * maxRequiredSize.getHeight();
+ int maxRecFrameSize = maxRecordingSize.getWidth() * maxRecordingSize.getHeight();
+ requiredSize = maxRequiredFrameSize < maxRecFrameSize
+ ? maxRequiredSize : maxRecordingSize;
+ }
+ for (boolean isEncoder : new boolean[] {true, false}) {
+ Size finalRequiredSize = requiredSize;
+ Size rotatedSize = new Size(requiredSize.getHeight(), requiredSize.getWidth());
+ isSupported = selectHardwareCodecs(mediaType, null, null, isEncoder).stream()
+ .allMatch(codec -> MediaUtils.supports(codec, mediaType, finalRequiredSize)
+ && MediaUtils.supports(codec, mediaType, rotatedSize));
+ if (!isSupported) {
+ break outerloop;
+ }
+ }
+ }
+
+ PerformanceClassEvaluator pce = new PerformanceClassEvaluator(this.mTestName);
+ PerformanceClassEvaluator.VideoCodecRequirement portraitResolutionSupportReq =
+ pce.addR5_1__H_1_22();
+ portraitResolutionSupportReq.setPortraitResolutionSupportreq(isSupported);
+
+ pce.submitAndCheck();
+ }
+
+ /**
* MUST support RGBA_1010102 color format for all hardware AV1 and HEVC encoders present on
* the device.
*/
diff --git a/tests/rollback/src/com/android/cts/rollback/RollbackManagerTest.java b/tests/rollback/src/com/android/cts/rollback/RollbackManagerTest.java
index e3ca9ea..da8c7d2 100644
--- a/tests/rollback/src/com/android/cts/rollback/RollbackManagerTest.java
+++ b/tests/rollback/src/com/android/cts/rollback/RollbackManagerTest.java
@@ -638,29 +638,6 @@
}
/**
- * Tests we fail to enable rollbacks if rollbackLifetime times out.
- */
- @Test
- @RequiresFlagsEnabled(Flags.FLAG_ROLLBACK_LIFETIME)
- public void testEnableRollbackLifetimeTimeoutFailsRollback() throws Exception {
-
- Install.single(TestApp.A1).commit();
- RollbackUtils.waitForUnavailableRollback(TestApp.A);
-
- RollbackManager rm = RollbackUtils.getRollbackManager();
- rm.blockRollbackManager(TimeUnit.SECONDS.toMillis(1));
- Install.single(TestApp.A2).setEnableRollback()
- .setRollbackLifetimeMillis(100).commit();
- assertThat(InstallUtils.getInstalledVersion(TestApp.A)).isEqualTo(2);
-
- // Give plenty of time for RollbackManager to unblock and attempt
- // to make the rollback available before asserting that the
- // rollback was not made available.
- Thread.sleep(TimeUnit.SECONDS.toMillis(2));
- assertThat(RollbackUtils.getAvailableRollback(TestApp.A)).isNull();
- }
-
- /**
* Tests we fail to enable rollbacks if enable-rollback times out for any child session.
*/
@Test
diff --git a/tests/tests/attributionsource/src/android/attributionsource/cts/RuntimePermissionsAppOpTrackingTest.kt b/tests/tests/attributionsource/src/android/attributionsource/cts/RuntimePermissionsAppOpTrackingTest.kt
index e2e7884..55055e2 100644
--- a/tests/tests/attributionsource/src/android/attributionsource/cts/RuntimePermissionsAppOpTrackingTest.kt
+++ b/tests/tests/attributionsource/src/android/attributionsource/cts/RuntimePermissionsAppOpTrackingTest.kt
@@ -394,9 +394,18 @@
val recognizerRef = AtomicReference<SpeechRecognizer>()
var currentOperationComplete = CountDownLatch(1)
+ // Makes sure that all runnable for setting up temporary recognition service is done
+ // before moving on to start the recognizer.
instrumentation.runOnMainSync {
- val recognizer = SpeechRecognizer.createSpeechRecognizer(context,
- ComponentName(RECEIVER2_PACKAGE_NAME, RECOGNITION_SERVICE))
+ instrumentation.uiAutomation
+ .adoptShellPermissionIdentity("android.permission.MANAGE_SPEECH_RECOGNITION")
+ val recognizer = SpeechRecognizer.createOnDeviceTestingSpeechRecognizer(context)
+ recognizer.setTemporaryOnDeviceRecognizer(ComponentName(RECEIVER2_PACKAGE_NAME, RECOGNITION_SERVICE))
+ recognizerRef.set(recognizer)
+ }
+
+ instrumentation.runOnMainSync {
+ val recognizer = recognizerRef.get()
recognizer.setRecognitionListener(object : RecognitionListener {
override fun onReadyForSpeech(params: Bundle?) {}
@@ -415,8 +424,6 @@
val recoIntent = Intent()
recoIntent.putExtra(OPERATION, OPERATION_INJECT_RECO_WITHOUT_ATTRIBUTION)
recognizer.startListening(recoIntent)
-
- recognizerRef.set(recognizer)
}
try {
@@ -503,7 +510,11 @@
intThat(attributionChainIdMatcher))
} finally {
// Take down the recognition service
- instrumentation.runOnMainSync { recognizerRef.get().destroy() }
+ instrumentation.runOnMainSync {
+ recognizerRef.get().setTemporaryOnDeviceRecognizer(null)
+ recognizerRef.get().destroy()
+ instrumentation.uiAutomation.dropShellPermissionIdentity()
+ }
}
}
}
@@ -528,9 +539,18 @@
val recognizerRef = AtomicReference<SpeechRecognizer>()
var currentOperationComplete = CountDownLatch(1)
+ // Makes sure that all runnable for setting up temporary recognition service is done
+ // before moving on to start the recognizer.
instrumentation.runOnMainSync {
- val recognizer = SpeechRecognizer.createSpeechRecognizer(context,
- ComponentName(RECEIVER2_PACKAGE_NAME, RECOGNITION_SERVICE))
+ instrumentation.uiAutomation
+ .adoptShellPermissionIdentity("android.permission.MANAGE_SPEECH_RECOGNITION")
+ val recognizer = SpeechRecognizer.createOnDeviceTestingSpeechRecognizer(context)
+ recognizer.setTemporaryOnDeviceRecognizer(ComponentName(RECEIVER2_PACKAGE_NAME, RECOGNITION_SERVICE))
+ recognizerRef.set(recognizer)
+ }
+
+ instrumentation.runOnMainSync {
+ val recognizer = recognizerRef.get()
recognizer.setRecognitionListener(object : RecognitionListener {
override fun onReadyForSpeech(params: Bundle?) {}
@@ -549,8 +569,6 @@
val recoIntent = Intent()
recoIntent.putExtra(OPERATION, OPERATION_MIC_RECO_WITH_ATTRIBUTION)
recognizer.startListening(recoIntent)
-
- recognizerRef.set(recognizer)
}
try {
@@ -637,7 +655,11 @@
ATTRIBUTION_FLAG_TRUSTED), intThat(attributionChainIdMatcher))
} finally {
// Take down the recognition service
- instrumentation.runOnMainSync { recognizerRef.get().destroy() }
+ instrumentation.runOnMainSync {
+ recognizerRef.get().setTemporaryOnDeviceRecognizer(null)
+ recognizerRef.get().destroy()
+ instrumentation.uiAutomation.dropShellPermissionIdentity()
+ }
}
}
}
diff --git a/tests/tests/bluetooth/src/android/bluetooth/cts/BluetoothDeviceTest.java b/tests/tests/bluetooth/src/android/bluetooth/cts/BluetoothDeviceTest.java
index 700093e6..28339f7 100644
--- a/tests/tests/bluetooth/src/android/bluetooth/cts/BluetoothDeviceTest.java
+++ b/tests/tests/bluetooth/src/android/bluetooth/cts/BluetoothDeviceTest.java
@@ -61,7 +61,6 @@
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
-import org.junit.function.ThrowingRunnable;
import org.junit.runner.RunWith;
import java.io.IOException;
@@ -630,15 +629,8 @@
BluetoothSocket rfcommSocket = mFakeDevice
.createInsecureRfcommSocketToServiceRecord(mFakeUuid);
- mUiAutomation.dropShellPermissionIdentity();
- // This should throw a SecurityException because one of BLUETOOTH_PRIVILEGED or
- // BLUETOOTH_CONNECT permission is missing.
- enforceConnectAndPrivileged(() -> l2capSocket.getL2capLocalChannelId());
- enforceConnectAndPrivileged(() -> l2capSocket.getL2capRemoteChannelId());
- enforceConnectAndPrivileged(() -> rfcommSocket.getL2capLocalChannelId());
- enforceConnectAndPrivileged(() -> rfcommSocket.getL2capRemoteChannelId());
-
mUiAutomation.adoptShellPermissionIdentity(BLUETOOTH_CONNECT, BLUETOOTH_PRIVILEGED);
+
// This should throw a BluetoothSocketException because it is not L2CAP socket
assertThrows("Unknown L2CAP socket", BluetoothSocketException.class,
() -> rfcommSocket.getL2capLocalChannelId());
@@ -651,14 +643,4 @@
assertThrows("Socket closed", BluetoothSocketException.class,
() -> l2capSocket.getL2capRemoteChannelId());
}
-
- private void enforceConnectAndPrivileged(ThrowingRunnable runnable) {
- // Verify throws SecurityException without permission.BLUETOOTH_PRIVILEGED
- mUiAutomation.adoptShellPermissionIdentity(BLUETOOTH_CONNECT);
- assertThrows(SecurityException.class, runnable);
-
- // Verify throws SecurityException without permission.BLUETOOTH_CONNECT
- mUiAutomation.adoptShellPermissionIdentity(BLUETOOTH_PRIVILEGED);
- assertThrows(SecurityException.class, runnable);
- }
}
diff --git a/tests/tests/database/src/android/database/sqlite/cts/SQLiteDatabaseTest.java b/tests/tests/database/src/android/database/sqlite/cts/SQLiteDatabaseTest.java
index b694320..dc382d0 100644
--- a/tests/tests/database/src/android/database/sqlite/cts/SQLiteDatabaseTest.java
+++ b/tests/tests/database/src/android/database/sqlite/cts/SQLiteDatabaseTest.java
@@ -2315,7 +2315,7 @@
// Compare the actual version to the permitted SQLite release. The test can compare to
// multiple releases here, if multiple releases are permitted.
final int[] expectedVersion342 = { 3, 42, 0 };
- final int[] expectedVersion344 = { 3, 44, 2 };
+ final int[] expectedVersion344 = { 3, 44, 3 };
if (versionIsOkay(expectedVersion342, actual)
|| versionIsOkay(expectedVersion344, actual)) {
return;
diff --git a/tests/tests/graphics/src/android/graphics/cts/BitmapTest.java b/tests/tests/graphics/src/android/graphics/cts/BitmapTest.java
index 8c47359..f1e8aa0 100644
--- a/tests/tests/graphics/src/android/graphics/cts/BitmapTest.java
+++ b/tests/tests/graphics/src/android/graphics/cts/BitmapTest.java
@@ -2516,16 +2516,8 @@
}
int nativeFormat = nGetFormat(bm);
- if (pair.config == Bitmap.Config.RGBA_F16) {
- // It is possible the system does not support RGBA_F16 in HARDWARE.
- // In that case, it will fall back to ARGB_8888.
- assertTrue(nativeFormat == ANDROID_BITMAP_FORMAT_RGBA_8888
- || nativeFormat == ANDROID_BITMAP_FORMAT_RGBA_F16);
- } else if (pair.config == Bitmap.Config.RGBA_1010102) {
- // Devices not supporting RGBA_1010102 in hardware should fallback to ARGB_8888
- assertTrue(nativeFormat == ANDROID_BITMAP_FORMAT_RGBA_8888
- || nativeFormat == ANDROID_BITMAP_FORMAT_RGBA_1010102);
- } else {
+ // We allow everything to fall back to 8888
+ if (nativeFormat != ANDROID_BITMAP_FORMAT_RGBA_8888) {
assertEquals("Config: " + pair.config, pair.format, nativeFormat);
}
}
diff --git a/tests/tests/hardware/src/android/hardware/input/cts/tests/SonyDualshock4BluetoothTest.java b/tests/tests/hardware/src/android/hardware/input/cts/tests/SonyDualshock4BluetoothTest.java
index d8f339a..b3027ed 100644
--- a/tests/tests/hardware/src/android/hardware/input/cts/tests/SonyDualshock4BluetoothTest.java
+++ b/tests/tests/hardware/src/android/hardware/input/cts/tests/SonyDualshock4BluetoothTest.java
@@ -25,6 +25,7 @@
import com.android.cts.kernelinfo.KernelInfo;
+import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
@@ -58,6 +59,7 @@
testInputBatteryEvents(R.raw.sony_dualshock4_bluetooth_batteryeventtests);
}
+ @Ignore("b/329585708: Disabled on Android 14 due to changes in touchpad processing")
@Test
public void testAllTouch() throws Throwable {
try (PointerCaptureSession session = new PointerCaptureSession()) {
diff --git a/tests/tests/media/audio/Android.bp b/tests/tests/media/audio/Android.bp
index 76013d1..9e65fda 100644
--- a/tests/tests/media/audio/Android.bp
+++ b/tests/tests/media/audio/Android.bp
@@ -81,6 +81,7 @@
"androidx.test.ext.junit",
"compatibility-device-util-axt",
"cts-media-common",
+ "cts-wm-util",
"ctstestrunner-axt",
"flag-junit",
"guava-android-testlib",
diff --git a/tests/tests/media/audio/src/android/media/audio/cts/AudioHalVersionInfoTest.java b/tests/tests/media/audio/src/android/media/audio/cts/AudioHalVersionInfoTest.java
index 0580a21..873295a 100644
--- a/tests/tests/media/audio/src/android/media/audio/cts/AudioHalVersionInfoTest.java
+++ b/tests/tests/media/audio/src/android/media/audio/cts/AudioHalVersionInfoTest.java
@@ -39,6 +39,7 @@
*
* @throws Exception
*/
+ @SuppressWarnings("SelfComparison")
@Test
public void testComparator() throws Exception {
int listSize = AudioHalVersionInfo.VERSIONS.size();
diff --git a/tests/tests/media/audio/src/android/media/audio/cts/RemoteSubmixTest.java b/tests/tests/media/audio/src/android/media/audio/cts/RemoteSubmixTest.java
new file mode 100644
index 0000000..9a30b67
--- /dev/null
+++ b/tests/tests/media/audio/src/android/media/audio/cts/RemoteSubmixTest.java
@@ -0,0 +1,243 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.audio.cts;
+
+import static android.media.AudioAttributes.ALLOW_CAPTURE_BY_ALL;
+
+import static org.hamcrest.Matchers.greaterThan;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNotSame;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import android.media.AudioAttributes;
+import android.media.AudioAttributes.AttributeUsage;
+import android.media.AudioAttributes.CapturePolicy;
+import android.media.AudioFormat;
+import android.media.AudioManager;
+import android.media.AudioPlaybackCaptureConfiguration;
+import android.media.AudioRecord;
+import android.media.MediaPlayer;
+import android.media.cts.MediaProjectionActivity;
+import android.media.projection.MediaProjection;
+import android.platform.test.annotations.Presubmit;
+import android.view.KeyEvent;
+
+import androidx.test.platform.app.InstrumentationRegistry;
+import androidx.test.rule.ActivityTestRule;
+import androidx.test.uiautomator.UiDevice;
+
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+
+import java.nio.ByteBuffer;
+
+/**
+ * Validate that there is no discontinuity in the AudioRecord data from Remote Submix.
+ *
+ * The tests do the following:
+ * - Start AudioRecord and MediaPlayer.
+ * - Play sine wav audio and read the recorded audio in rawBuffer.
+ * - Add screen lock during playback.
+ * - Stop MediaPlayer and AudioRecord, and then unlock the device.
+ * - Verify that the recorded audio doesn't have any discontinuity.
+ *
+ * Testing at sample level that audio playback and record do not make any alterations to input
+ * signal.
+ */
+
+@Presubmit
+public class RemoteSubmixTest {
+ private static final String TAG = "RemoteSubmixTest";
+ private static final int SAMPLE_RATE = 44100;
+ private static final int DURATION_IN_SEC = 1;
+ private static final int ENCODING_FORMAT = AudioFormat.ENCODING_PCM_16BIT;
+ private static final int CHANNEL_MASK = AudioFormat.CHANNEL_IN_MONO;
+ private static final int BUFFER_SIZE = SAMPLE_RATE * DURATION_IN_SEC
+ * Integer.bitCount(CHANNEL_MASK)
+ * Short.BYTES; // Size in bytes for 16bit mono at 44.1k/s
+ private static final int TEST_ITERATIONS = 10; // Using iterations for regression failure
+ private static final int RETRY_RECORD_READ = 3;
+
+ private AudioManager mAudioManager;
+ private MediaProjectionActivity mActivity;
+ private MediaProjection mMediaProjection;
+ @Rule
+ public ActivityTestRule<MediaProjectionActivity> mActivityRule =
+ new ActivityTestRule<>(MediaProjectionActivity.class);
+
+ @Before
+ public void setup() throws Exception {
+ mActivity = mActivityRule.getActivity();
+ mAudioManager = mActivity.getSystemService(AudioManager.class);
+ mMediaProjection = mActivity.waitForMediaProjection();
+ }
+
+ private AudioRecord createPlaybackCaptureRecord() throws Exception {
+ AudioPlaybackCaptureConfiguration apcConfig =
+ new AudioPlaybackCaptureConfiguration.Builder(mMediaProjection)
+ .addMatchingUsage(AudioAttributes.USAGE_MEDIA)
+ .build();
+
+ AudioFormat audioFormat = new AudioFormat.Builder()
+ .setEncoding(ENCODING_FORMAT)
+ .setSampleRate(SAMPLE_RATE)
+ .setChannelMask(CHANNEL_MASK)
+ .build();
+
+ assertEquals(
+ "matchingUsages", AudioAttributes.USAGE_MEDIA, apcConfig.getMatchingUsages()[0]);
+
+ AudioRecord audioRecord = new AudioRecord.Builder()
+ .setAudioPlaybackCaptureConfig(apcConfig)
+ .setAudioFormat(audioFormat)
+ .build();
+
+ assertEquals("AudioRecord failed to initialized", AudioRecord.STATE_INITIALIZED,
+ audioRecord.getState());
+
+ return audioRecord;
+ }
+
+ private MediaPlayer createMediaPlayer(
+ @CapturePolicy int capturePolicy, int resid, @AttributeUsage int usage) {
+ MediaPlayer mediaPlayer = MediaPlayer.create(mActivity, resid,
+ new AudioAttributes.Builder()
+ .setContentType(AudioAttributes.CONTENT_TYPE_MUSIC)
+ .setUsage(usage)
+ .setAllowedCapturePolicy(capturePolicy)
+ .build(),
+ mAudioManager.generateAudioSessionId());
+ return mediaPlayer;
+ }
+
+ private static ByteBuffer readToBuffer(AudioRecord audioRecord, int bufferSize)
+ throws Exception {
+ assertEquals("AudioRecord is not recording", AudioRecord.RECORDSTATE_RECORDING,
+ audioRecord.getRecordingState());
+ ByteBuffer buffer = ByteBuffer.allocateDirect(bufferSize);
+ int retry = RETRY_RECORD_READ;
+ boolean silence = true;
+ while (silence && buffer.hasRemaining()) {
+ assertNotSame(buffer.remaining() + "/" + bufferSize + " remaining", 0, retry--);
+ int written = audioRecord.read(buffer, buffer.remaining());
+ assertThat("audioRecord did not read frames", written, greaterThan(0));
+ for (int i = 0; i < written; i++) {
+ if (buffer.get() != 0) {
+ silence = false;
+ break;
+ }
+ }
+ }
+ buffer.rewind();
+ return buffer;
+ }
+
+ public void testPlaybackCapture(boolean testWithScreenLock) throws Exception {
+ MediaPlayer mediaPlayer = createMediaPlayer(
+ ALLOW_CAPTURE_BY_ALL, R.raw.sine1320hz5sec, AudioAttributes.USAGE_MEDIA);
+ AudioRecord audioRecord = createPlaybackCaptureRecord();
+ ByteBuffer rawBuffer = null;
+
+ try {
+ audioRecord.startRecording();
+ mediaPlayer.start();
+
+ assertEquals(AudioRecord.RECORDSTATE_RECORDING, audioRecord.getRecordingState());
+ assertTrue(mediaPlayer.isPlaying());
+
+ if (testWithScreenLock) {
+ UiDevice.getInstance(InstrumentationRegistry.getInstrumentation())
+ .pressKeyCode(KeyEvent.KEYCODE_POWER);
+ }
+
+ rawBuffer = readToBuffer(audioRecord, BUFFER_SIZE);
+
+ audioRecord.stop();
+ mediaPlayer.stop();
+
+ assertEquals(AudioRecord.RECORDSTATE_STOPPED, audioRecord.getRecordingState());
+ assertFalse(mediaPlayer.isPlaying());
+
+ } catch (Exception e) {
+ throw e;
+ } finally {
+ if (testWithScreenLock) {
+ UiDevice.getInstance(InstrumentationRegistry.getInstrumentation())
+ .pressKeyCode(KeyEvent.KEYCODE_WAKEUP);
+ UiDevice.getInstance(InstrumentationRegistry.getInstrumentation())
+ .executeShellCommand("wm dismiss-keyguard");
+ }
+
+ audioRecord.release();
+ mediaPlayer.release();
+ }
+
+ assertNotNull("Recorded data is null ", rawBuffer);
+
+ short[] recordArray = new short[BUFFER_SIZE / Short.BYTES];
+
+ for (int i = 0; i < recordArray.length; i++) {
+ recordArray[i] = rawBuffer.getShort();
+ }
+
+ int recordingStartIndex = -1;
+
+ // Skip leading silence of the Recorded Audio
+ for (int i = 0; i < recordArray.length; i++) {
+ if (recordArray[i] != 0) {
+ recordingStartIndex = i;
+ break;
+ }
+ }
+
+ assertFalse("No audio recorded", recordingStartIndex == -1);
+ // Validate that there is no continuous silence in recorded sine audio
+ for (int i = recordingStartIndex; i < recordArray.length; i++) {
+ assertFalse("Discontunuity found in the Record Audio\n",
+ recordArray[i] == 0 && recordArray[i + 1] == 0);
+ }
+ }
+
+ @Test
+ public void testRemoteSubmixRecordingContinuity() {
+ for (int i = 0; i < TEST_ITERATIONS; i++) {
+ try {
+ testPlaybackCapture(/* testWithScreenLock */ false);
+ } catch (Exception e) {
+ fail("testPlaybackCapture throws exception: " + e + " at the " + i
+ + "th iteration");
+ }
+ }
+ }
+
+ @Test
+ public void testRemoteSubmixRecordingContinuityWithScreenLock() {
+ for (int i = 0; i < TEST_ITERATIONS; i++) {
+ try {
+ testPlaybackCapture(/* testWithScreenLock */ true);
+ } catch (Exception e) {
+ fail("testPlaybackCapture with screen lock throws exception: " + e + " at the " + i
+ + "th iteration");
+ }
+ }
+ }
+}
diff --git a/tests/tests/media/common/Android.bp b/tests/tests/media/common/Android.bp
index be689bc..597eec9 100644
--- a/tests/tests/media/common/Android.bp
+++ b/tests/tests/media/common/Android.bp
@@ -109,6 +109,7 @@
"compatibility-device-util-axt",
"junit",
"platform-test-annotations",
+ "ctsmediav2common",
],
platform_apis: true,
libs: [
diff --git a/tests/tests/media/common/OWNERS b/tests/tests/media/common/OWNERS
index f6cd87d..642e4b3 100644
--- a/tests/tests/media/common/OWNERS
+++ b/tests/tests/media/common/OWNERS
@@ -1,5 +1,5 @@
# Bug component: 1344
include platform/frameworks/av:/media/OWNERS
+# Bug component: 1345447 = per-file *MediaProjection*
per-file *MediaProjection* = file:platform/frameworks/base:/media/java/android/media/projection/OWNERS
-per-file *MediaProjection* = nmusgrave@google.com
diff --git a/tests/tests/media/drmframework/AndroidTest.xml b/tests/tests/media/drmframework/AndroidTest.xml
index b8d6a9b..2bf04ec 100644
--- a/tests/tests/media/drmframework/AndroidTest.xml
+++ b/tests/tests/media/drmframework/AndroidTest.xml
@@ -42,7 +42,7 @@
</target_preparer>
<target_preparer class="com.android.compatibility.common.tradefed.targetprep.MediaPreparer">
<option name="push-all" value="true" />
- <option name="media-folder-name" value="CtsMediaDrmFrameworkTestCases-2.1" />
+ <option name="media-folder-name" value="CtsMediaDrmFrameworkTestCases-3.0" />
<option name="dynamic-config-module" value="CtsMediaDrmFrameworkTestCases" />
</target_preparer>
<target_preparer class="com.android.tradefed.targetprep.suite.SuiteApkInstaller">
diff --git a/tests/tests/media/drmframework/DynamicConfig.xml b/tests/tests/media/drmframework/DynamicConfig.xml
index 69c08ba..4628f16 100644
--- a/tests/tests/media/drmframework/DynamicConfig.xml
+++ b/tests/tests/media/drmframework/DynamicConfig.xml
@@ -45,6 +45,6 @@
<value>http://redirector.gvt1.com/videoplayback?id=c80658495af60617&itag=17&source=youtube&ip=0.0.0.0&ipbits=0&expire=19000000000&sparams=ip,ipbits,expire,id,itag,source&signature=70E979A621001201BC18622BDBF914FA870BDA40.6E78890B80F4A33A18835F775B1FF64F0A4D0003&key=ik0&user=android-device-test</value>
</entry>
<entry key="media_files_url">
- <value>https://dl.google.com/android/xts/cts/tests/tests/media/drmframework/CtsMediaDrmFrameworkTestCases-2.1.zip</value>
+ <value>https://dl.google.com/android/xts/cts/tests/tests/media/drmframework/CtsMediaDrmFrameworkTestCases-3.0.zip</value>
</entry>
</dynamicConfig>
diff --git a/tests/tests/media/drmframework/OWNERS b/tests/tests/media/drmframework/OWNERS
index a44657f..1344266 100644
--- a/tests/tests/media/drmframework/OWNERS
+++ b/tests/tests/media/drmframework/OWNERS
@@ -1,7 +1,6 @@
# Bug component: 49079
# android-drm-team
conglin@google.com
-edwinwong@google.com
fredgc@google.com
juce@google.com
kylealexander@google.com
@@ -10,3 +9,6 @@
robertshih@google.com
sigquit@google.com
vickymin@google.com
+per-file src/android/media/drmframework/cts/CodecDecoderDrmTest.java = file:platform/frameworks/av:/media/janitors/codec_OWNERS
+per-file src/android/media/drmframework/cts/CodecDecoderMultiAccessUnitDrmTest.java = file:platform/frameworks/av:/media/janitors/codec_OWNERS
+per-file src/android/media/drmframework/cts/CodecDecoderBlockModelMultiAccessUnitDrmTest.java = file:platform/frameworks/av:/media/janitors/codec_OWNERS
diff --git a/tests/tests/media/drmframework/copy_media.sh b/tests/tests/media/drmframework/copy_media.sh
index 80ba35f..c5f8bdc 100755
--- a/tests/tests/media/drmframework/copy_media.sh
+++ b/tests/tests/media/drmframework/copy_media.sh
@@ -17,4 +17,4 @@
[ -z "$MEDIA_ROOT_DIR" ] && MEDIA_ROOT_DIR=$(dirname $0)/..
source $MEDIA_ROOT_DIR/common/copy_media_utils.sh
get_adb_options "$@"
-copy_media "drmframework" "CtsMediaDrmFrameworkTestCases-2.1"
+copy_media "drmframework" "CtsMediaDrmFrameworkTestCases-3.0"
diff --git a/tests/tests/media/drmframework/src/android/media/drmframework/cts/CodecDecoderBlockModelMultiAccessUnitDrmTest.java b/tests/tests/media/drmframework/src/android/media/drmframework/cts/CodecDecoderBlockModelMultiAccessUnitDrmTest.java
new file mode 100644
index 0000000..4c6062c
--- /dev/null
+++ b/tests/tests/media/drmframework/src/android/media/drmframework/cts/CodecDecoderBlockModelMultiAccessUnitDrmTest.java
@@ -0,0 +1,174 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.drmframework.cts;
+
+import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_MultipleFrames;
+import static android.media.drmframework.cts.CodecDecoderDrmTest.convert;
+
+import static org.junit.Assert.fail;
+import static org.junit.Assume.assumeTrue;
+
+import android.media.MediaCodec;
+import android.media.MediaCryptoException;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.NotProvisionedException;
+import android.media.ResourceBusyException;
+import android.media.UnsupportedSchemeException;
+import android.mediav2.common.cts.CodecDecoderBlockModelDrmTestBase;
+import android.mediav2.common.cts.CodecDecoderBlockModelMultiAccessUnitDrmTestBase;
+import android.mediav2.common.cts.OutputManager;
+import android.os.Build;
+import android.platform.test.annotations.AppModeFull;
+import android.platform.test.annotations.RequiresFlagsEnabled;
+
+import androidx.test.filters.LargeTest;
+import androidx.test.filters.SdkSuppress;
+
+import com.android.compatibility.common.util.ApiTest;
+import com.android.media.codec.flags.Flags;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.UUID;
+
+/**
+ * Test secure mediacodec api, decoders and their interactions in byte buffer mode
+ * <p>
+ * The test decodes a clear key scheme encrypted clip and stores the result in ByteBuffer in
+ * block model mode and block model large audio buffer mode. The test expects consistent output
+ * in both scenarios.
+ * <p>
+ */
+@SdkSuppress(minSdkVersion = Build.VERSION_CODES.VANILLA_ICE_CREAM, codeName = "VanillaIceCream")
+@AppModeFull(reason = "Instant apps cannot access the SD card")
+@RequiresFlagsEnabled(Flags.FLAG_LARGE_AUDIO_FRAME)
+@LargeTest
+@RunWith(Parameterized.class)
+public class CodecDecoderBlockModelMultiAccessUnitDrmTest
+ extends CodecDecoderBlockModelMultiAccessUnitDrmTestBase {
+ private static final String MEDIA_DIR = WorkDir.getMediaDirString();
+ private static final UUID CLEAR_KEY_IDENTIFIER =
+ new UUID(0x1077efecc0b24d02L, 0xace33c1e52e2fb4bL);
+ private static final byte[] DRM_INIT_DATA = convert(new int[]{
+ // BMFF box header (4 bytes size + 'pssh')
+ 0x00, 0x00, 0x00, 0x34, 0x70, 0x73, 0x73, 0x68,
+ // Full box header (version = 1 flags = 0)
+ 0x01, 0x00, 0x00, 0x00,
+ // W3C Common PSSH box SystemID
+ 0x10, 0x77, 0xef, 0xec, 0xc0, 0xb2, 0x4d, 0x02, 0xac, 0xe3, 0x3c,
+ 0x1e, 0x52, 0xe2, 0xfb, 0x4b,
+ // Number of key ids
+ 0x00, 0x00, 0x00, 0x01,
+ // Key id
+ 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30,
+ 0x30, 0x30, 0x30, 0x30, 0x30,
+ // size of data, must be zero
+ 0x00, 0x00, 0x00, 0x00});
+ private static final byte[] CLEAR_KEY_CENC = convert(new int[]{
+ // Content key
+ 0x3f, 0x0a, 0x33, 0xf3, 0x40, 0x98, 0xb9, 0xe2,
+ 0x2b, 0xc0, 0x78, 0xe0, 0xa1, 0xb5, 0xe8, 0x54});
+ private static final int[][] OUT_SIZE_IN_MS = {
+ {1000, 250}, // max out size, threshold batch out size
+ {1000, 100},
+ {500, 20},
+ {100, 100},
+ {40, 100}
+ };
+
+ public CodecDecoderBlockModelMultiAccessUnitDrmTest(String decoder, String mediaType,
+ String testFile, String allTestParams) {
+ super(decoder, mediaType, MEDIA_DIR + testFile, allTestParams);
+ }
+
+ @Parameterized.Parameters(name = "{index}_{0}_{1}")
+ public static Collection<Object[]> input() {
+ final boolean isEncoder = false;
+ final boolean needAudio = true;
+ final boolean needVideo = false;
+ final List<Object[]> exhaustiveArgsList = new ArrayList<>(Arrays.asList(new Object[][]{
+ {MediaFormat.MIMETYPE_AUDIO_AAC, "bbb_stereo_48kHz_192kbps_aac_cenc.mp4"},
+ {MediaFormat.MIMETYPE_AUDIO_FLAC, "bbb_stereo_48kHz_flac_cenc.mp4"},
+ {MediaFormat.MIMETYPE_AUDIO_MPEG, "bbb_stereo_48kHz_192kbps_mp3_cenc.mp4"},
+ {MediaFormat.MIMETYPE_AUDIO_OPUS, "bbb_stereo_48kHz_192kbps_opus_cenc.mp4"},
+ }));
+ return prepareParamList(exhaustiveArgsList, isEncoder, needAudio, needVideo, false);
+ }
+
+ /**
+ * Check description of class {@link CodecDecoderBlockModelMultiAccessUnitDrmTest}
+ */
+ @ApiTest(apis = {"android.media.MediaCodec#configure",
+ "android.media.MediaCodec.Request#setEncryptedMultiFrameLinearBlock",
+ "android.media.MediaCodec#CONFIGURE_FLAG_USE_BLOCK_MODEL",
+ "android.media.MediaCodec.Request#setEncryptedLinearBlock",
+ "android.media.MediaFormat#KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE",
+ "android.media.MediaFormat#KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE",
+ "android.media.MediaCodec.Callback#onOutputBuffersAvailable"})
+ @Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
+ public void testSimpleDecode() throws IOException, InterruptedException,
+ UnsupportedSchemeException, NotProvisionedException, ResourceBusyException,
+ MediaCryptoException {
+ assumeTrue(mCodecName + " does not support FEATURE_MultipleFrames",
+ isFeatureSupported(mCodecName, mMediaType, FEATURE_MultipleFrames));
+
+ CodecDecoderBlockModelDrmTestBase codecDecoderBlockModelDrmtb =
+ new CodecDecoderBlockModelDrmTestBase(mCodecName, mMediaType, null, mAllTestParams);
+ codecDecoderBlockModelDrmtb.setUpCrypto(CLEAR_KEY_IDENTIFIER, DRM_INIT_DATA,
+ new byte[][]{CLEAR_KEY_CENC});
+ codecDecoderBlockModelDrmtb.decodeToMemory(mTestFile, mCodecName, 0,
+ MediaExtractor.SEEK_TO_CLOSEST_SYNC, Integer.MAX_VALUE);
+ codecDecoderBlockModelDrmtb.tearDownCrypto();
+ OutputManager ref = codecDecoderBlockModelDrmtb.getOutputManager();
+
+ mSaveToMem = true;
+ mOutputBuff = new OutputManager(ref.getSharedErrorLogs());
+ MediaFormat format = setUpSource(mTestFile);
+ int maxSampleSize = getMaxSampleSizeForMediaType(mTestFile, mMediaType);
+ mCodec = MediaCodec.createByCodecName(mCodecName);
+ for (int[] outSizeInMs : OUT_SIZE_IN_MS) {
+ configureKeysForLargeAudioBlockModelFrameMode(format, maxSampleSize, outSizeInMs[0],
+ outSizeInMs[1]);
+ mOutputBuff.reset();
+ setUpCrypto(CLEAR_KEY_IDENTIFIER, DRM_INIT_DATA, new byte[][]{CLEAR_KEY_CENC});
+ configureCodec(format, true, true, false);
+ mMaxInputLimitMs = outSizeInMs[0];
+ mCodec.start();
+ mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+ doWork(Integer.MAX_VALUE);
+ queueEOS();
+ waitForAllOutputs();
+ mCodec.reset();
+ tearDownCrypto();
+ if (!ref.equalsByteOutput(mOutputBuff)) {
+ fail("Output of decoder component when fed with multiple access units in single"
+ + " enqueue call differs from output received when each access unit is fed"
+ + " separately. \n" + mTestConfig + mTestEnv + mOutputBuff.getErrMsg());
+ }
+ }
+ mCodec.release();
+ mExtractor.release();
+ }
+}
diff --git a/tests/tests/media/drmframework/src/android/media/drmframework/cts/CodecDecoderDrmTest.java b/tests/tests/media/drmframework/src/android/media/drmframework/cts/CodecDecoderDrmTest.java
new file mode 100644
index 0000000..9cc9328
--- /dev/null
+++ b/tests/tests/media/drmframework/src/android/media/drmframework/cts/CodecDecoderDrmTest.java
@@ -0,0 +1,140 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.drmframework.cts;
+
+import static org.junit.Assert.fail;
+
+import android.media.MediaCryptoException;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.NotProvisionedException;
+import android.media.ResourceBusyException;
+import android.media.UnsupportedSchemeException;
+import android.mediav2.common.cts.CodecDecoderBlockModelDrmTestBase;
+import android.mediav2.common.cts.CodecDecoderDrmTestBase;
+import android.mediav2.common.cts.OutputManager;
+
+import androidx.test.filters.LargeTest;
+
+import com.android.compatibility.common.util.ApiTest;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.UUID;
+
+/**
+ * Test secure mediacodec api, decoders and their interactions in byte buffer mode
+ * <p>
+ * The test decodes a clear key scheme encrypted clip and stores the result in ByteBuffer in
+ * normal mode and block model mode. The test expects consistent output in both scenarios.
+ * <p>
+ */
+@RunWith(Parameterized.class)
+public class CodecDecoderDrmTest extends CodecDecoderDrmTestBase {
+ private static final String MEDIA_DIR = WorkDir.getMediaDirString();
+ private static final UUID CLEAR_KEY_IDENTIFIER =
+ new UUID(0x1077efecc0b24d02L, 0xace33c1e52e2fb4bL);
+ private static final byte[] DRM_INIT_DATA = convert(new int[]{
+ // BMFF box header (4 bytes size + 'pssh')
+ 0x00, 0x00, 0x00, 0x34, 0x70, 0x73, 0x73, 0x68,
+ // Full box header (version = 1 flags = 0)
+ 0x01, 0x00, 0x00, 0x00,
+ // W3C Common PSSH box SystemID
+ 0x10, 0x77, 0xef, 0xec, 0xc0, 0xb2, 0x4d, 0x02, 0xac, 0xe3, 0x3c,
+ 0x1e, 0x52, 0xe2, 0xfb, 0x4b,
+ // Number of key ids
+ 0x00, 0x00, 0x00, 0x01,
+ // Key id
+ 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30,
+ 0x30, 0x30, 0x30, 0x30, 0x30,
+ // size of data, must be zero
+ 0x00, 0x00, 0x00, 0x00});
+ private static final byte[] CLEAR_KEY_CENC = convert(new int[]{
+ // Content key
+ 0x3f, 0x0a, 0x33, 0xf3, 0x40, 0x98, 0xb9, 0xe2,
+ 0x2b, 0xc0, 0x78, 0xe0, 0xa1, 0xb5, 0xe8, 0x54});
+
+ public CodecDecoderDrmTest(String decoder, String mediaType, String testFile,
+ String allTestParams) {
+ super(decoder, mediaType, MEDIA_DIR + testFile, allTestParams);
+ }
+
+ @Parameterized.Parameters(name = "{index}_{0}_{1}")
+ public static Collection<Object[]> input() {
+ final boolean isEncoder = false;
+ final boolean needAudio = true;
+ final boolean needVideo = false;
+ final List<Object[]> exhaustiveArgsList = new ArrayList<>(Arrays.asList(new Object[][]{
+ {MediaFormat.MIMETYPE_AUDIO_AAC, "bbb_stereo_48kHz_192kbps_aac_cenc.mp4"},
+ {MediaFormat.MIMETYPE_AUDIO_FLAC, "bbb_stereo_48kHz_flac_cenc.mp4"},
+ {MediaFormat.MIMETYPE_AUDIO_MPEG, "bbb_stereo_48kHz_192kbps_mp3_cenc.mp4"},
+ {MediaFormat.MIMETYPE_AUDIO_OPUS, "bbb_stereo_48kHz_192kbps_opus_cenc.mp4"},
+ }));
+ return prepareParamList(exhaustiveArgsList, isEncoder, needAudio, needVideo, false);
+ }
+
+ static byte[] convert(int[] intArray) {
+ byte[] byteArray = new byte[intArray.length];
+ for (int i = 0; i < intArray.length; ++i) {
+ byteArray[i] = (byte) intArray[i];
+ }
+ return byteArray;
+ }
+
+ /**
+ * Check description of class {@link CodecDecoderDrmTest}
+ */
+ @ApiTest(apis = {"android.media.MediaCodec#configure",
+ "android.media.MediaCodec#queueSecureInputBuffer",
+ "android.media.MediaCodec#CONFIGURE_FLAG_USE_BLOCK_MODEL",
+ "android.media.MediaCodec.Request#setEncryptedLinearBlock"})
+ @LargeTest
+ @Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
+ public void testSimpleDecode() throws IOException, InterruptedException,
+ UnsupportedSchemeException, NotProvisionedException, ResourceBusyException,
+ MediaCryptoException {
+ CodecDecoderDrmTestBase cddrmtb =
+ new CodecDecoderDrmTestBase(mCodecName, mMediaType, null, mAllTestParams);
+ cddrmtb.setUpCrypto(CLEAR_KEY_IDENTIFIER, DRM_INIT_DATA, new byte[][]{CLEAR_KEY_CENC});
+ cddrmtb.decodeToMemory(mTestFile, mCodecName, 0, MediaExtractor.SEEK_TO_CLOSEST_SYNC,
+ Integer.MAX_VALUE);
+ cddrmtb.tearDownCrypto();
+
+ if (IS_AT_LEAST_R) {
+ OutputManager ref = cddrmtb.getOutputManager();
+ CodecDecoderBlockModelDrmTestBase cdbmdrmtb = new CodecDecoderBlockModelDrmTestBase(
+ mCodecName, mMediaType, null, mAllTestParams);
+ OutputManager test = new OutputManager(ref.getSharedErrorLogs());
+ cdbmdrmtb.setUpCrypto(CLEAR_KEY_IDENTIFIER, DRM_INIT_DATA,
+ new byte[][]{CLEAR_KEY_CENC});
+ cdbmdrmtb.decodeToMemory(mTestFile, mCodecName, test, 0,
+ MediaExtractor.SEEK_TO_CLOSEST_SYNC, Integer.MAX_VALUE);
+ cdbmdrmtb.tearDownCrypto();
+ if (!ref.equals(test)) {
+ fail("Output in block model mode is not same as output in normal mode. \n"
+ + mTestConfig + mTestEnv + test.getErrMsg());
+ }
+ }
+ }
+}
diff --git a/tests/tests/media/drmframework/src/android/media/drmframework/cts/CodecDecoderMultiAccessUnitDrmTest.java b/tests/tests/media/drmframework/src/android/media/drmframework/cts/CodecDecoderMultiAccessUnitDrmTest.java
new file mode 100644
index 0000000..8e71892
--- /dev/null
+++ b/tests/tests/media/drmframework/src/android/media/drmframework/cts/CodecDecoderMultiAccessUnitDrmTest.java
@@ -0,0 +1,180 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.media.drmframework.cts;
+
+import static android.media.drmframework.cts.CodecDecoderDrmTest.convert;
+import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_MultipleFrames;
+
+import static org.junit.Assert.fail;
+import static org.junit.Assume.assumeTrue;
+
+import android.media.MediaCodec;
+import android.media.MediaCryptoException;
+import android.media.MediaExtractor;
+import android.media.MediaFormat;
+import android.media.NotProvisionedException;
+import android.media.ResourceBusyException;
+import android.media.UnsupportedSchemeException;
+import android.mediav2.common.cts.CodecDecoderDrmTestBase;
+import android.mediav2.common.cts.CodecDecoderMultiAccessUnitDrmTestBase;
+import android.mediav2.common.cts.OutputManager;
+import android.os.Build;
+import android.platform.test.annotations.AppModeFull;
+import android.platform.test.annotations.RequiresFlagsEnabled;
+
+import androidx.test.filters.LargeTest;
+import androidx.test.filters.SdkSuppress;
+
+import com.android.compatibility.common.util.ApiTest;
+import com.android.media.codec.flags.Flags;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+import java.util.UUID;
+
+/**
+ * Test secure mediacodec api, decoders and their interactions in byte buffer mode
+ * <p>
+ * The test decodes a clear key scheme encrypted clip and stores the result in ByteBuffer in
+ * normal mode and large audio buffer mode. The test expects consistent output in both scenarios.
+ * <p>
+ */
+@SdkSuppress(minSdkVersion = Build.VERSION_CODES.VANILLA_ICE_CREAM, codeName = "VanillaIceCream")
+@AppModeFull(reason = "Instant apps cannot access the SD card")
+@RequiresFlagsEnabled(Flags.FLAG_LARGE_AUDIO_FRAME)
+@LargeTest
+@RunWith(Parameterized.class)
+public class CodecDecoderMultiAccessUnitDrmTest extends CodecDecoderMultiAccessUnitDrmTestBase {
+ private static final String MEDIA_DIR = WorkDir.getMediaDirString();
+ private static final UUID CLEAR_KEY_IDENTIFIER =
+ new UUID(0x1077efecc0b24d02L, 0xace33c1e52e2fb4bL);
+ private static final byte[] DRM_INIT_DATA = convert(new int[]{
+ // BMFF box header (4 bytes size + 'pssh')
+ 0x00, 0x00, 0x00, 0x34, 0x70, 0x73, 0x73, 0x68,
+ // Full box header (version = 1 flags = 0)
+ 0x01, 0x00, 0x00, 0x00,
+ // W3C Common PSSH box SystemID
+ 0x10, 0x77, 0xef, 0xec, 0xc0, 0xb2, 0x4d, 0x02, 0xac, 0xe3, 0x3c,
+ 0x1e, 0x52, 0xe2, 0xfb, 0x4b,
+ // Number of key ids
+ 0x00, 0x00, 0x00, 0x01,
+ // Key id
+ 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30, 0x30,
+ 0x30, 0x30, 0x30, 0x30, 0x30,
+ // size of data, must be zero
+ 0x00, 0x00, 0x00, 0x00});
+ private static final byte[] CLEAR_KEY_CENC = convert(new int[]{
+ // Content key
+ 0x3f, 0x0a, 0x33, 0xf3, 0x40, 0x98, 0xb9, 0xe2,
+ 0x2b, 0xc0, 0x78, 0xe0, 0xa1, 0xb5, 0xe8, 0x54});
+ private static final int[][] OUT_SIZE_IN_MS = {
+ {1000, 250}, // max out size, threshold batch out size
+ {1000, 100},
+ {500, 20},
+ {100, 100},
+ {40, 100}
+ };
+
+ public CodecDecoderMultiAccessUnitDrmTest(String decoder, String mediaType, String testFile,
+ String allTestParams) {
+ super(decoder, mediaType, MEDIA_DIR + testFile, allTestParams);
+ }
+
+ @Parameterized.Parameters(name = "{index}_{0}_{1}")
+ public static Collection<Object[]> input() {
+ final boolean isEncoder = false;
+ final boolean needAudio = true;
+ final boolean needVideo = false;
+ final List<Object[]> exhaustiveArgsList = new ArrayList<>(Arrays.asList(new Object[][]{
+ {MediaFormat.MIMETYPE_AUDIO_AAC, "bbb_stereo_48kHz_192kbps_aac_cenc.mp4"},
+ {MediaFormat.MIMETYPE_AUDIO_FLAC, "bbb_stereo_48kHz_flac_cenc.mp4"},
+ {MediaFormat.MIMETYPE_AUDIO_MPEG, "bbb_stereo_48kHz_192kbps_mp3_cenc.mp4"},
+ {MediaFormat.MIMETYPE_AUDIO_OPUS, "bbb_stereo_48kHz_192kbps_opus_cenc.mp4"},
+ }));
+ return prepareParamList(exhaustiveArgsList, isEncoder, needAudio, needVideo, false);
+ }
+
+ /**
+ * Check description of class {@link CodecDecoderMultiAccessUnitDrmTest}
+ */
+ @ApiTest(apis = {"android.media.MediaCodec#configure",
+ "android.media.MediaCodec#queueSecureInputBuffer",
+ "android.media.MediaFormat#KEY_BUFFER_BATCH_MAX_OUTPUT_SIZE",
+ "android.media.MediaFormat#KEY_BUFFER_BATCH_THRESHOLD_OUTPUT_SIZE",
+ "android.media.MediaCodec#queueSecureInputBuffers",
+ "android.media.MediaCodec.Callback#onOutputBuffersAvailable"})
+ @Test(timeout = PER_TEST_TIMEOUT_LARGE_TEST_MS)
+ public void testSimpleDecode() throws IOException, InterruptedException,
+ UnsupportedSchemeException, NotProvisionedException, ResourceBusyException,
+ MediaCryptoException {
+ assumeTrue(mCodecName + " does not support FEATURE_MultipleFrames",
+ isFeatureSupported(mCodecName, mMediaType, FEATURE_MultipleFrames));
+
+ CodecDecoderDrmTestBase cddrmtb =
+ new CodecDecoderDrmTestBase(mCodecName, mMediaType, null, mAllTestParams);
+ cddrmtb.setUpCrypto(CLEAR_KEY_IDENTIFIER, DRM_INIT_DATA, new byte[][]{CLEAR_KEY_CENC});
+ cddrmtb.decodeToMemory(mTestFile, mCodecName, 0, MediaExtractor.SEEK_TO_CLOSEST_SYNC,
+ Integer.MAX_VALUE);
+ cddrmtb.tearDownCrypto();
+ OutputManager ref = cddrmtb.getOutputManager();
+
+ boolean[] boolStates = {true, false};
+ mSaveToMem = true;
+ OutputManager testA = new OutputManager(ref.getSharedErrorLogs());
+ OutputManager testB = new OutputManager(ref.getSharedErrorLogs());
+ MediaFormat format = setUpSource(mTestFile);
+ int maxSampleSize = getMaxSampleSizeForMediaType(mTestFile, mMediaType);
+ mCodec = MediaCodec.createByCodecName(mCodecName);
+ for (int[] outSizeInMs : OUT_SIZE_IN_MS) {
+ configureKeysForLargeAudioFrameMode(format, maxSampleSize, outSizeInMs[0],
+ outSizeInMs[1]);
+ for (boolean eosType : boolStates) {
+ mOutputBuff = eosType ? testA : testB;
+ mOutputBuff.reset();
+ setUpCrypto(CLEAR_KEY_IDENTIFIER, DRM_INIT_DATA, new byte[][]{CLEAR_KEY_CENC});
+ configureCodec(format, true, eosType, false);
+ mMaxInputLimitMs = outSizeInMs[0];
+ mCodec.start();
+ mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
+ doWork(Integer.MAX_VALUE);
+ queueEOS();
+ waitForAllOutputs();
+ mCodec.reset();
+ tearDownCrypto();
+ if (!ref.equalsByteOutput(mOutputBuff)) {
+ fail("Output of decoder component when fed with multiple access units in "
+ + "single enqueue call differs from output received when each access "
+ + "unit is fed separately. \n"
+ + mTestConfig + mTestEnv + mOutputBuff.getErrMsg());
+ }
+ }
+ if (!testA.equals(testB)) {
+ fail("Output of decoder component is not consistent across runs. \n" + mTestConfig
+ + mTestEnv + testB.getErrMsg());
+ }
+ }
+ mCodec.release();
+ mExtractor.release();
+ }
+}
diff --git a/tests/tests/media/drmframework/src/android/media/drmframework/cts/WorkDir.java b/tests/tests/media/drmframework/src/android/media/drmframework/cts/WorkDir.java
index dedc223..293e798 100644
--- a/tests/tests/media/drmframework/src/android/media/drmframework/cts/WorkDir.java
+++ b/tests/tests/media/drmframework/src/android/media/drmframework/cts/WorkDir.java
@@ -20,6 +20,6 @@
class WorkDir extends WorkDirBase {
public static final String getMediaDirString() {
- return getMediaDirString("CtsMediaDrmFrameworkTestCases-2.1");
+ return getMediaDirString("CtsMediaDrmFrameworkTestCases-3.0");
}
}
diff --git a/tests/tests/media/misc/src/android/media/misc/cts/CamcorderProfileTest.java b/tests/tests/media/misc/src/android/media/misc/cts/CamcorderProfileTest.java
index 5e0dd0f..53e4e48 100644
--- a/tests/tests/media/misc/src/android/media/misc/cts/CamcorderProfileTest.java
+++ b/tests/tests/media/misc/src/android/media/misc/cts/CamcorderProfileTest.java
@@ -21,6 +21,7 @@
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import android.content.Context;
@@ -31,6 +32,8 @@
import android.media.CamcorderProfile;
import android.media.EncoderProfiles;
import android.media.MediaCodecInfo;
+import android.media.MediaCodecInfo.CodecProfileLevel;
+import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaRecorder;
import android.util.Log;
@@ -561,6 +564,192 @@
}
}
+ MediaCodecList mCodecList = new MediaCodecList(MediaCodecList.ALL_CODECS);
+
+ private void checkSupportedEncoder(
+ String mediaType, int width, int height, int frameRate, int profile) {
+ MediaFormat format = MediaFormat.createVideoFormat(mediaType, width, height);
+ format.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
+ format.setInteger(MediaFormat.KEY_PROFILE, profile);
+ format.setInteger(MediaFormat.KEY_LEVEL, 0 /* unknown */);
+ for (MediaCodecInfo info : mCodecList.getCodecInfos()) {
+ if (!info.isEncoder() || !info.isHardwareAccelerated()) {
+ continue;
+ }
+ MediaCodecInfo.CodecCapabilities caps = info.getCapabilitiesForType(mediaType);
+ if (caps == null) {
+ continue;
+ }
+ if (caps.isFormatSupported(format)) {
+ return;
+ }
+ }
+ fail(
+ "No supported encoder found: "
+ + width
+ + "x"
+ + height
+ + "@"
+ + frameRate
+ + " for "
+ + mediaType
+ + " at profile "
+ + profile);
+ }
+
+ private boolean checkHdrProfile(
+ int cameraId, String mediaType, int camHdrFormat, List<Integer> mediaHdrProfiles) {
+ boolean hasSupportedProfiles = false;
+ for (Integer quality : ALL_SUPPORTED_QUALITIES) {
+ if (!CamcorderProfile.hasProfile(cameraId, quality)) {
+ continue;
+ }
+ CamcorderProfile profile = getWithOptionalId(quality, cameraId);
+ if (profile == null) {
+ continue;
+ }
+ EncoderProfiles allProfiles =
+ CamcorderProfile.getAll(String.valueOf(cameraId), quality);
+ for (EncoderProfiles.VideoProfile videoProfile : allProfiles.getVideoProfiles()) {
+ Log.i(
+ TAG,
+ "Video encoder profile: cameraId="
+ + cameraId
+ + " mediaType="
+ + videoProfile.getMediaType()
+ + " hdrFormat="
+ + videoProfile.getHdrFormat()
+ + " profile="
+ + videoProfile.getProfile()
+ + " "
+ + videoProfile.getWidth()
+ + "x"
+ + videoProfile.getHeight()
+ + "@"
+ + videoProfile.getFrameRate());
+ if (!videoProfile.getMediaType().equals(mediaType)
+ || videoProfile.getHdrFormat() != camHdrFormat) {
+ continue;
+ }
+ assertTrue(
+ "Unexpected video profile: "
+ + videoProfile.getProfile()
+ + " expected to be one in "
+ + mediaHdrProfiles,
+ mediaHdrProfiles.contains(videoProfile.getProfile()));
+ checkSupportedEncoder(
+ mediaType,
+ videoProfile.getWidth(),
+ videoProfile.getHeight(),
+ videoProfile.getFrameRate(),
+ videoProfile.getProfile());
+ hasSupportedProfiles = true;
+ }
+ }
+ return hasSupportedProfiles;
+ }
+
+ private void checkAllHdrProfile(
+ String mediaType, int camHdrFormat, List<Integer> mediaHdrProfiles) {
+ int nCamera = Camera.getNumberOfCameras();
+ Context context = InstrumentationRegistry.getContext();
+ assertNotNull("did not find context", context);
+ boolean hasSupportedProfiles = false;
+ for (int cameraId = 0; cameraId < nCamera; cameraId++) {
+ boolean isExternal = false;
+ try {
+ isExternal = CameraUtils.isExternal(context, cameraId);
+ } catch (Exception e) {
+ Log.e(TAG, "Unable to query external camera: " + e);
+ }
+
+ if (!isExternal) {
+ if (checkHdrProfile(cameraId, mediaType, camHdrFormat, mediaHdrProfiles)) {
+ hasSupportedProfiles = true;
+ }
+ }
+ }
+ assumeTrue(
+ "No profile detected for mediaType="
+ + mediaType
+ + " hdrFormat="
+ + camHdrFormat,
+ hasSupportedProfiles);
+ }
+
+ @Test
+ public void testHevcHlgEncoderSupport() {
+ checkAllHdrProfile(
+ MediaFormat.MIMETYPE_VIDEO_HEVC,
+ EncoderProfiles.VideoProfile.HDR_HLG,
+ List.of(CodecProfileLevel.HEVCProfileMain10));
+ }
+
+ @Test
+ public void testHevcHdr10EncoderSupport() {
+ checkAllHdrProfile(
+ MediaFormat.MIMETYPE_VIDEO_HEVC,
+ EncoderProfiles.VideoProfile.HDR_HDR10,
+ List.of(CodecProfileLevel.HEVCProfileMain10HDR10));
+ }
+
+ @Test
+ public void testHevcHdr10PlusEncoderSupport() {
+ checkAllHdrProfile(
+ MediaFormat.MIMETYPE_VIDEO_HEVC,
+ EncoderProfiles.VideoProfile.HDR_HDR10PLUS,
+ List.of(CodecProfileLevel.HEVCProfileMain10HDR10Plus));
+ }
+
+ @Test
+ public void testVp9HlgEncoderSupport() {
+ checkAllHdrProfile(
+ MediaFormat.MIMETYPE_VIDEO_VP9,
+ EncoderProfiles.VideoProfile.HDR_HLG,
+ List.of(CodecProfileLevel.VP9Profile2, CodecProfileLevel.VP9Profile3));
+ }
+
+ @Test
+ public void testVp9Hdr10EncoderSupport() {
+ checkAllHdrProfile(
+ MediaFormat.MIMETYPE_VIDEO_VP9,
+ EncoderProfiles.VideoProfile.HDR_HDR10,
+ List.of(CodecProfileLevel.VP9Profile2HDR, CodecProfileLevel.VP9Profile3HDR));
+ }
+
+ @Test
+ public void testVp9Hdr10PlusEncoderSupport() {
+ checkAllHdrProfile(
+ MediaFormat.MIMETYPE_VIDEO_VP9,
+ EncoderProfiles.VideoProfile.HDR_HDR10PLUS,
+ List.of(CodecProfileLevel.VP9Profile2HDR10Plus,
+ CodecProfileLevel.VP9Profile3HDR10Plus));
+ }
+
+ @Test
+ public void testAv1HlgEncoderSupport() {
+ checkAllHdrProfile(
+ MediaFormat.MIMETYPE_VIDEO_AV1,
+ EncoderProfiles.VideoProfile.HDR_HLG,
+ List.of(CodecProfileLevel.AV1ProfileMain10));
+ }
+
+ @Test
+ public void testAv1Hdr10EncoderSupport() {
+ checkAllHdrProfile(
+ MediaFormat.MIMETYPE_VIDEO_AV1,
+ EncoderProfiles.VideoProfile.HDR_HDR10,
+ List.of(CodecProfileLevel.AV1ProfileMain10HDR10));
+ }
+
+ @Test
+ public void testAv1Hdr10PlusEncoderSupport() {
+ checkAllHdrProfile(
+ MediaFormat.MIMETYPE_VIDEO_AV1,
+ EncoderProfiles.VideoProfile.HDR_HDR10PLUS,
+ List.of(CodecProfileLevel.AV1ProfileMain10HDR10Plus));
+ }
+
private boolean isSizeSupported(int width, int height, List<Size> sizes) {
if (sizes == null) return true;
diff --git a/tests/tests/media/projection/OWNERS b/tests/tests/media/projection/OWNERS
index d963c71..e59b3e2 100644
--- a/tests/tests/media/projection/OWNERS
+++ b/tests/tests/media/projection/OWNERS
@@ -1,3 +1,2 @@
-# Bug component: 970984
+# Bug component: 1345447
include platform/frameworks/base:/media/java/android/media/projection/OWNERS
-nmusgrave@google.com
diff --git a/tests/tests/mediacujtest/common/src/android/media/cujcommon/cts/CujTestBase.java b/tests/tests/mediacujtest/common/src/android/media/cujcommon/cts/CujTestBase.java
index ead087a..3fb5397 100644
--- a/tests/tests/mediacujtest/common/src/android/media/cujcommon/cts/CujTestBase.java
+++ b/tests/tests/mediacujtest/common/src/android/media/cujcommon/cts/CujTestBase.java
@@ -112,12 +112,10 @@
if (!mIsScrollTest) {
mActivity.runOnUiThread(() -> {
mActivity.prepareMediaItems(mediaUrls);
- mActivity.run();
});
} else {
mScrollActivity.runOnUiThread(() -> {
mScrollActivity.prepareMediaItems(mediaUrls);
- mScrollActivity.run();
});
}
diff --git a/tests/tests/mediacujtest/common/src/android/media/cujcommon/cts/MainActivity.java b/tests/tests/mediacujtest/common/src/android/media/cujcommon/cts/MainActivity.java
index 838c1aa..d364922 100644
--- a/tests/tests/mediacujtest/common/src/android/media/cujcommon/cts/MainActivity.java
+++ b/tests/tests/mediacujtest/common/src/android/media/cujcommon/cts/MainActivity.java
@@ -77,19 +77,12 @@
}
/**
- * Prepare the player and play the list
- */
- public void run() {
- mPlayer.prepare();
- mPlayer.play();
- }
-
- /**
- * Resume the player.
+ * Prepare and play the player.
*/
@Override
- protected void onResume() {
- super.onResume();
+ protected void onStart() {
+ super.onStart();
+ mPlayer.prepare();
mPlayer.play();
}
@@ -98,8 +91,8 @@
*/
@Override
protected void onStop() {
+ mPlayer.stop();
super.onStop();
- mPlayer.pause();
}
/**
diff --git a/tests/tests/mediacujtest/common/src/android/media/cujcommon/cts/ScrollTestActivity.java b/tests/tests/mediacujtest/common/src/android/media/cujcommon/cts/ScrollTestActivity.java
index 72c711f..d5c152f 100644
--- a/tests/tests/mediacujtest/common/src/android/media/cujcommon/cts/ScrollTestActivity.java
+++ b/tests/tests/mediacujtest/common/src/android/media/cujcommon/cts/ScrollTestActivity.java
@@ -95,21 +95,13 @@
}
/**
- * Prepare the player and play the first player.
- */
- public void run() {
- mFirstPlayer.prepare();
- mSecondPlayer.prepare();
- mIsFirstSurfaceActive = true;
- mFirstPlayer.play();
- }
-
- /**
- * Resume the first player.
+ * Prepare both players, and start the first player.
*/
@Override
- protected void onResume() {
- super.onResume();
+ protected void onStart() {
+ super.onStart();
+ mFirstPlayer.prepare();
+ mSecondPlayer.prepare();
mIsFirstSurfaceActive = true;
mFirstPlayer.play();
}
@@ -119,9 +111,9 @@
*/
@Override
protected void onStop() {
+ mFirstPlayer.stop();
+ mSecondPlayer.stop();
super.onStop();
- mFirstPlayer.pause();
- mSecondPlayer.pause();
}
/**
diff --git a/tests/tests/nfc/src/android/nfc/cts/NfcAdapterTest.java b/tests/tests/nfc/src/android/nfc/cts/NfcAdapterTest.java
index 0632677..2717821 100644
--- a/tests/tests/nfc/src/android/nfc/cts/NfcAdapterTest.java
+++ b/tests/tests/nfc/src/android/nfc/cts/NfcAdapterTest.java
@@ -401,6 +401,39 @@
}
}
+ @Test
+ @RequiresFlagsEnabled(Flags.FLAG_NFC_OEM_EXTENSION)
+ public void testOemExtension() throws InterruptedException {
+ CountDownLatch tagDetectedCountDownLatch = new CountDownLatch(1);
+ NfcAdapter nfcAdapter = NfcAdapter.getDefaultAdapter(mContext);
+ NfcOemExtension nfcOemExtension = nfcAdapter.getNfcOemExtension();
+ Assert.assertNotNull(nfcAdapter);
+ NfcOemExtensionCallback cb =
+ new NfcOemExtensionCallback(tagDetectedCountDownLatch);
+ try {
+ nfcOemExtension.registerCallback(
+ Executors.newSingleThreadExecutor(), cb);
+
+ // TODO: Fix these tests as we add more functionality to this API surface.
+ nfcOemExtension.clearPreference();
+ } finally {
+ nfcOemExtension.unregisterCallback(cb);
+ }
+ }
+
+ private class NfcOemExtensionCallback implements NfcOemExtension.Callback {
+ private final CountDownLatch mTagDetectedCountDownLatch;
+
+ NfcOemExtensionCallback(CountDownLatch countDownLatch) {
+ mTagDetectedCountDownLatch = countDownLatch;
+ }
+
+ @Override
+ public void onTagConnected(boolean connected, Tag tag) {
+ mTagDetectedCountDownLatch.countDown();
+ }
+ }
+
private class NfcVendorNciCallback implements NfcAdapter.NfcVendorNciCallback {
private final CountDownLatch mRspCountDownLatch;
private final CountDownLatch mNtfCountDownLatch;
diff --git a/tests/tests/notification/src/android/app/notification/current/cts/NotificationManagerZenTest.java b/tests/tests/notification/src/android/app/notification/current/cts/NotificationManagerZenTest.java
index 47a9ac1..22ae40d 100644
--- a/tests/tests/notification/src/android/app/notification/current/cts/NotificationManagerZenTest.java
+++ b/tests/tests/notification/src/android/app/notification/current/cts/NotificationManagerZenTest.java
@@ -2037,105 +2037,6 @@
assertTrue(mListener.mIntercepted.get(sbn.getKey()));
}
- @CddTest(requirements = {"2.2.3/3.8.4/H-1-1"})
- @Test
- public void testContactAffinityByPhoneOrder() throws Exception {
- insertSingleContact(ALICE, ALICE_PHONE, ALICE_EMAIL, true);
- insertSingleContact(BOB, BOB_PHONE, BOB_EMAIL, false);
- // Not Charlie
-
- mNotificationManager.setInterruptionFilter(INTERRUPTION_FILTER_ALL);
- sendNotifications(MODE_PHONE, false, false);
-
- int rankA= 0, rankB = 0, rankC = 0;
- for (int i = 0; i < 6; i++) {
- List<String> orderedKeys = new ArrayList<>(
- Arrays.asList(mListener.mRankingMap.getOrderedKeys()));
- rankA = findTagInKeys(ALICE, orderedKeys);
- rankB = findTagInKeys(BOB, orderedKeys);
- rankC = findTagInKeys(CHARLIE, orderedKeys);
- // ordered by contact affinity: A, B, C
- if (rankA < rankB && rankB < rankC) {
- // yay
- break;
- }
- sleep();
- }
- // ordered by contact affinity: A, B, C
- if (rankA < rankB && rankB < rankC) {
- // yay
- } else {
- fail("Notifications out of order. Actual order: Alice: " + rankA + " Bob: " + rankB
- + " Charlie: " + rankC);
- }
- }
-
- @CddTest(requirements = {"2.2.3/3.8.4/H-1-1"})
- @Test
- public void testContactUriByUriOrder() throws Exception {
- insertSingleContact(ALICE, ALICE_PHONE, ALICE_EMAIL, true);
- insertSingleContact(BOB, BOB_PHONE, BOB_EMAIL, false);
- // Not Charlie
-
- mNotificationManager.setInterruptionFilter(INTERRUPTION_FILTER_ALL);
- sendNotifications(MODE_URI, false, false);
-
- int rankA= 0, rankB = 0, rankC = 0;
- for (int i = 0; i < 6; i++) {
- List<String> orderedKeys = new ArrayList<>(
- Arrays.asList(mListener.mRankingMap.getOrderedKeys()));
- rankA = findTagInKeys(ALICE, orderedKeys);
- rankB = findTagInKeys(BOB, orderedKeys);
- rankC = findTagInKeys(CHARLIE, orderedKeys);
- // ordered by contact affinity: A, B, C
- if (rankA < rankB && rankB < rankC) {
- // yay
- break;
- }
- sleep();
- }
- // ordered by contact affinity: A, B, C
- if (rankA < rankB && rankB < rankC) {
- // yay
- } else {
- fail("Notifications out of order. Actual order: Alice: " + rankA + " Bob: " + rankB
- + " Charlie: " + rankC);
- }
- }
-
- @CddTest(requirements = {"2.2.3/3.8.4/H-1-1"})
- @Test
- public void testContactUriByEmailOrder() throws Exception {
- insertSingleContact(ALICE, ALICE_PHONE, ALICE_EMAIL, true);
- insertSingleContact(BOB, BOB_PHONE, BOB_EMAIL, false);
- // Not Charlie
-
- mNotificationManager.setInterruptionFilter(INTERRUPTION_FILTER_ALL);
- sendNotifications(MODE_EMAIL, false, false);
-
- int rankA= 0, rankB = 0, rankC = 0;
- for (int i = 0; i < 6; i++) {
- List<String> orderedKeys = new ArrayList<>(
- Arrays.asList(mListener.mRankingMap.getOrderedKeys()));
- rankA = findTagInKeys(ALICE, orderedKeys);
- rankB = findTagInKeys(BOB, orderedKeys);
- rankC = findTagInKeys(CHARLIE, orderedKeys);
- // ordered by contact affinity: A, B, C
- if (rankA < rankB && rankB < rankC) {
- // yay
- break;
- }
- sleep();
- }
- // ordered by contact affinity: A, B, C
- if (rankA < rankB && rankB < rankC) {
- // yay
- } else {
- fail("Notifications out of order. Actual order: Alice: " + rankA + " Bob: " + rankB
- + " Charlie: " + rankC);
- }
- }
-
@Test
@RequiresFlagsEnabled(Flags.FLAG_MODES_API)
public void testAddAutomaticZenRule_mergesAllowChannels() throws Exception {
diff --git a/tests/tests/os/TEST_MAPPING b/tests/tests/os/TEST_MAPPING
index 4dcc471..1d94ebd 100644
--- a/tests/tests/os/TEST_MAPPING
+++ b/tests/tests/os/TEST_MAPPING
@@ -92,10 +92,6 @@
"exclude-filter": "android.os.cts.FileObserverTest#testFileObserver_multipleFilesFull"
},
{
- // TODO(b/180726379)
- "exclude-filter": "android.os.cts.SeccompTest#testKernelPrecedenceTests"
- },
- {
// TODO(b/214271070)
"exclude-filter": "android.os.storage.cts.StorageManagerTest#testAttemptMountNonObb"
},
diff --git a/tests/tests/os/src/android/os/cts/HwBinderTest.java b/tests/tests/os/src/android/os/cts/HwBinderTest.java
index 9bc6a73..d6f5c29 100644
--- a/tests/tests/os/src/android/os/cts/HwBinderTest.java
+++ b/tests/tests/os/src/android/os/cts/HwBinderTest.java
@@ -21,6 +21,7 @@
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
+import static org.junit.Assume.assumeNoException;
import android.hidl.manager.V1_0.IServiceManager;
import android.hidl.manager.V1_0.IServiceNotification;
@@ -215,7 +216,14 @@
ServiceNotification notification = new ServiceNotification();
IServiceManager manager = IServiceManager.getService();
- manager.registerForNotifications(IServiceManager.kInterfaceName, "default", notification);
+ try {
+ manager.registerForNotifications(
+ IServiceManager.kInterfaceName, "default", notification);
+ } catch (android.os.RemoteException e) {
+ assumeNoException("HIDL is not installed on this device", e);
+ return;
+ }
+
Calendar deadline = Calendar.getInstance();
deadline.add(Calendar.SECOND, 10);
diff --git a/tests/tests/permissionpolicy/res/raw/android_manifest_q2.xml b/tests/tests/permissionpolicy/res/raw/android_manifest_q2.xml
index 710b5f8..92fee51 100644
--- a/tests/tests/permissionpolicy/res/raw/android_manifest_q2.xml
+++ b/tests/tests/permissionpolicy/res/raw/android_manifest_q2.xml
@@ -2281,7 +2281,7 @@
<!-- @SystemApi @hide Allows changing Thread network state and access to Thread network
credentials such as Network Key and PSKc.
<p>Not for use by third-party applications.
- @FlaggedApi("com.android.net.thread.flags.thread_enabled_platform") -->
+ @FlaggedApi("com.android.net.thread.platform.flags.thread_enabled_platform") -->
<permission android:name="android.permission.THREAD_NETWORK_PRIVILEGED"
android:protectionLevel="signature|privileged" />
@@ -2331,12 +2331,12 @@
<!-- Allows system apps to call methods to register itself as a mDNS offload engine.
<p>Not for use by third-party or privileged applications.
@SystemApi
- @FlaggedApi("com.android.net.flags.register_nsd_offload_engine")
+ @FlaggedApi("android.net.platform.flags.register_nsd_offload_engine")
@hide This should only be used by system apps.
-->
<permission android:name="android.permission.REGISTER_NSD_OFFLOAD_ENGINE"
android:protectionLevel="signature"
- android:featureFlag="com.android.net.flags.register_nsd_offload_engine" />
+ android:featureFlag="android.net.platform.flags.register_nsd_offload_engine" />
<!-- ======================================= -->
<!-- Permissions for short range, peripheral networks -->
@@ -3629,7 +3629,7 @@
<!-- Allows an application to set policy related to <a
href="https://www.threadgroup.org">Thread</a> network.
- @FlaggedApi("com.android.net.thread.flags.thread_user_restriction_enabled")
+ @FlaggedApi("com.android.net.thread.platform.flags.thread_user_restriction_enabled")
-->
<permission android:name="android.permission.MANAGE_DEVICE_POLICY_THREAD_NETWORK"
android:protectionLevel="internal|role" />
@@ -6772,13 +6772,6 @@
<permission android:name="android.permission.USE_BIOMETRIC_INTERNAL"
android:protectionLevel="signature" />
- <!-- Allows privileged apps to access the background face authentication.
- @SystemApi
- @FlaggedApi("android.hardware.biometrics.face_background_authentication")
- @hide -->
- <permission android:name="android.permission.USE_BACKGROUND_FACE_AUTHENTICATION"
- android:protectionLevel="signature|privileged" />
-
<!-- Allows the system to control the BiometricDialog (SystemUI). Reserved for the system. @hide -->
<permission android:name="android.permission.MANAGE_BIOMETRIC_DIALOG"
android:protectionLevel="signature" />
@@ -7986,7 +7979,7 @@
@hide
-->
<permission android:name="android.permission.GET_APP_METADATA"
- android:protectionLevel="signature|installer" />
+ android:protectionLevel="signature|installer|verifier" />
<!-- @hide @SystemApi Allows an application to stage HealthConnect's remote data so that
HealthConnect can later integrate it. -->
@@ -8370,6 +8363,12 @@
android:process=":ui">
</activity>
+ <activity android:name="com.android.internal.app.SetScreenLockDialogActivity"
+ android:theme="@style/Theme.Dialog.Confirmation"
+ android:excludeFromRecents="true"
+ android:process=":ui">
+ </activity>
+
<activity android:name="com.android.internal.app.BlockedAppActivity"
android:theme="@style/Theme.Dialog.Confirmation"
android:excludeFromRecents="true"
diff --git a/tests/tests/sharesheet/Android.bp b/tests/tests/sharesheet/Android.bp
index 4971071..b459328 100644
--- a/tests/tests/sharesheet/Android.bp
+++ b/tests/tests/sharesheet/Android.bp
@@ -13,7 +13,7 @@
// limitations under the License.
package {
- default_team: "trendy_team_system_ui_please_use_a_more_specific_subteam_if_possible_",
+ default_team: "trendy_team_capture_and_share",
default_applicable_licenses: ["Android-Apache-2.0"],
}
diff --git a/tests/tests/speech/OWNERS b/tests/tests/speech/OWNERS
index e72b790..33a9b78 100644
--- a/tests/tests/speech/OWNERS
+++ b/tests/tests/speech/OWNERS
@@ -1,5 +1,4 @@
# Bug component: 63521
volnov@google.com
-eugeniom@google.com
schfan@google.com
-andreaambu@google.com
\ No newline at end of file
+andreaambu@google.com
diff --git a/tests/tests/tv/Android.bp b/tests/tests/tv/Android.bp
index 438393f..68d997d 100644
--- a/tests/tests/tv/Android.bp
+++ b/tests/tests/tv/Android.bp
@@ -18,7 +18,11 @@
android_library {
name: "CtsTvTestCases_lib",
- srcs: ["src/**/*.java", "src/**/*.aidl"],
+ srcs: [
+ "src/**/*.java",
+ "src/**/*.aidl",
+ ":tuner_cts_configuration_v1",
+ ],
libs: [
"platform-test-annotations",
"android.test.runner",
diff --git a/tests/tests/tv/config/tuner/Android.bp b/tests/tests/tv/config/tuner/Android.bp
new file mode 100644
index 0000000..87cf029
--- /dev/null
+++ b/tests/tests/tv/config/tuner/Android.bp
@@ -0,0 +1,26 @@
+// Copyright (C) 2024 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+xsd_config {
+ name: "tuner_cts_configuration_v1",
+ srcs: ["tuner_cts_config.xsd"],
+ package_name: "android.media.tv.tuner.cts.configuration.v1",
+ nullability: true,
+ gen_has: true,
+ api_dir: "tuner-cts-config-api",
+}
diff --git a/tests/tests/tv/config/tuner/tuner-cts-config-api/current.txt b/tests/tests/tv/config/tuner/tuner-cts-config-api/current.txt
new file mode 100644
index 0000000..30936c9
--- /dev/null
+++ b/tests/tests/tv/config/tuner/tuner-cts-config-api/current.txt
@@ -0,0 +1,65 @@
+// Signature format: 2.0
+package android.media.tv.tuner.cts.configuration.v1 {
+
+ public class DvbsCapability {
+ ctor public DvbsCapability();
+ method @Nullable public java.math.BigInteger getTargetSymbolRate();
+ method public boolean hasTargetSymbolRate();
+ method public void setTargetSymbolRate(@Nullable java.math.BigInteger);
+ }
+
+ public class LnbCapability {
+ ctor public LnbCapability();
+ method @Nullable public android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltages getSupportedVoltages();
+ method public boolean hasSupportedVoltages();
+ method public void setSupportedVoltages(@Nullable android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltages);
+ }
+
+ public enum LnbSupportedVoltage {
+ method @NonNull public String getRawName();
+ enum_constant public static final android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltage VOLTAGE_11V;
+ enum_constant public static final android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltage VOLTAGE_12V;
+ enum_constant public static final android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltage VOLTAGE_13V;
+ enum_constant public static final android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltage VOLTAGE_14V;
+ enum_constant public static final android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltage VOLTAGE_15V;
+ enum_constant public static final android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltage VOLTAGE_18V;
+ enum_constant public static final android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltage VOLTAGE_19V;
+ enum_constant public static final android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltage VOLTAGE_5V;
+ enum_constant public static final android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltage VOLTAGE_NONE;
+ }
+
+ public class LnbSupportedVoltages {
+ ctor public LnbSupportedVoltages();
+ method @Nullable public java.util.List<android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltage> getVoltage();
+ }
+
+ public class TunerCtsConfiguration {
+ ctor public TunerCtsConfiguration();
+ method @Nullable public android.media.tv.tuner.cts.configuration.v1.DvbsCapability getDvbsCapability();
+ method @Nullable public android.media.tv.tuner.cts.configuration.v1.LnbCapability getLnbCapability();
+ method @Nullable public java.math.BigInteger getTargetFrontendId();
+ method @Nullable public android.media.tv.tuner.cts.configuration.v1.Version getVersion();
+ method public boolean hasDvbsCapability();
+ method public boolean hasLnbCapability();
+ method public boolean hasTargetFrontendId();
+ method public boolean hasVersion();
+ method public void setDvbsCapability(@Nullable android.media.tv.tuner.cts.configuration.v1.DvbsCapability);
+ method public void setLnbCapability(@Nullable android.media.tv.tuner.cts.configuration.v1.LnbCapability);
+ method public void setTargetFrontendId(@Nullable java.math.BigInteger);
+ method public void setVersion(@Nullable android.media.tv.tuner.cts.configuration.v1.Version);
+ }
+
+ public enum Version {
+ method @NonNull public String getRawName();
+ enum_constant public static final android.media.tv.tuner.cts.configuration.v1.Version _1_0;
+ }
+
+ public class XmlParser {
+ ctor public XmlParser();
+ method @Nullable public static android.media.tv.tuner.cts.configuration.v1.TunerCtsConfiguration read(@NonNull java.io.InputStream) throws javax.xml.datatype.DatatypeConfigurationException, java.io.IOException, org.xmlpull.v1.XmlPullParserException;
+ method @Nullable public static String readText(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
+ method public static void skip(@NonNull org.xmlpull.v1.XmlPullParser) throws java.io.IOException, org.xmlpull.v1.XmlPullParserException;
+ }
+
+}
+
diff --git a/tests/tests/tv/config/tuner/tuner-cts-config-api/last_current.txt b/tests/tests/tv/config/tuner/tuner-cts-config-api/last_current.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/tests/tv/config/tuner/tuner-cts-config-api/last_current.txt
diff --git a/tests/tests/tv/config/tuner/tuner-cts-config-api/last_removed.txt b/tests/tests/tv/config/tuner/tuner-cts-config-api/last_removed.txt
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tests/tests/tv/config/tuner/tuner-cts-config-api/last_removed.txt
diff --git a/tests/tests/tv/config/tuner/tuner-cts-config-api/removed.txt b/tests/tests/tv/config/tuner/tuner-cts-config-api/removed.txt
new file mode 100644
index 0000000..d802177
--- /dev/null
+++ b/tests/tests/tv/config/tuner/tuner-cts-config-api/removed.txt
@@ -0,0 +1 @@
+// Signature format: 2.0
diff --git a/tests/tests/tv/config/tuner/tuner_cts_config.xsd b/tests/tests/tv/config/tuner/tuner_cts_config.xsd
new file mode 100644
index 0000000..78d3d0e
--- /dev/null
+++ b/tests/tests/tv/config/tuner/tuner_cts_config.xsd
@@ -0,0 +1,83 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!-- Copyright (C) 2021 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- Please check cts/tests/tests/tv/res/raw/tuner_cts_config_v1_default.xml as an example. -->
+<xs:schema version="2.0"
+ elementFormDefault="qualified"
+ attributeFormDefault="unqualified"
+ xmlns:xs="http://www.w3.org/2001/XMLSchema">
+ <!-- List the dynamic config versions supported by tuner testing. -->
+ <xs:simpleType name="Version">
+ <xs:restriction base="xs:decimal">
+ <xs:enumeration value="1.0"/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <!-- FRONTEND SESSION -->
+ <xs:simpleType name="FrontendId">
+ <xs:restriction base="xs:integer">
+ <xs:minInclusive value="0"/>
+ </xs:restriction>
+ </xs:simpleType>
+
+ <!-- LNB SESSION -->
+ <xs:simpleType name="LnbSupportedVoltage">
+ <xs:restriction base="xs:string">
+ <xs:enumeration value="VOLTAGE_NONE"/>
+ <xs:enumeration value="VOLTAGE_5V"/>
+ <xs:enumeration value="VOLTAGE_11V"/>
+ <xs:enumeration value="VOLTAGE_12V"/>
+ <xs:enumeration value="VOLTAGE_13V"/>
+ <xs:enumeration value="VOLTAGE_14V"/>
+ <xs:enumeration value="VOLTAGE_15V"/>
+ <xs:enumeration value="VOLTAGE_18V"/>
+ <xs:enumeration value="VOLTAGE_19V"/>
+ </xs:restriction>
+ </xs:simpleType>
+ <xs:complexType name="LnbSupportedVoltages">
+ <xs:sequence>
+ <xs:element name="voltage" type="LnbSupportedVoltage" minOccurs="2" maxOccurs="unbounded"/>
+ </xs:sequence>
+ </xs:complexType>
+ <xs:complexType name="LnbCapability">
+ <xs:sequence>
+ <xs:element name="supportedVoltages" type="LnbSupportedVoltages" minOccurs="1" maxOccurs="1"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <!-- DVBS SESSION -->
+ <xs:simpleType name="SymbolRate">
+ <xs:restriction base="xs:integer"/>
+ </xs:simpleType>
+ <xs:complexType name="DvbsCapability">
+ <xs:sequence>
+ <xs:element name="targetSymbolRate" type="SymbolRate" minOccurs="1" maxOccurs="1"/>
+ </xs:sequence>
+ </xs:complexType>
+
+ <!-- TUNER CTS CONFIGURATION -->
+ <xs:element name="tunerCtsConfiguration">
+ <xs:complexType>
+ <xs:sequence>
+ <xs:element name="targetFrontendId" type="FrontendId" minOccurs="1" maxOccurs="1"/>
+ <xs:element name="lnbCapability" type="LnbCapability" minOccurs="0" maxOccurs="1"/>
+ <xs:element name="dvbsCapability" type="DvbsCapability" minOccurs="0" maxOccurs="1"/>
+ </xs:sequence>
+ <xs:attribute name="version" type="Version"/>
+ </xs:complexType>
+ </xs:element>
+
+</xs:schema>
diff --git a/tests/tests/tv/res/raw/tuner_cts_config_v1_default.xml b/tests/tests/tv/res/raw/tuner_cts_config_v1_default.xml
new file mode 100644
index 0000000..a2063b1
--- /dev/null
+++ b/tests/tests/tv/res/raw/tuner_cts_config_v1_default.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
+<!-- Copyright (C) 2024 The Android Open Source Project
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+ http://www.apache.org/licenses/LICENSE-2.0
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<tunerCtsConfiguration>
+ <!-- The index of the frontend in available frontends return from HAL level-->
+ <targetFrontendId>0</targetFrontendId>
+
+ <!-- The capability of lnb. (Optional) -->
+ <lnbCapability>
+ <supportedVoltages>
+ <voltage>VOLTAGE_NONE</voltage>
+ <voltage>VOLTAGE_5V</voltage>
+ </supportedVoltages>
+ </lnbCapability>
+
+ <!-- The capability of dvbs. (Optional) -->
+ <dvbsCapability>
+ <targetSymbolRate>25000000</targetSymbolRate>
+ </dvbsCapability>
+</tunerCtsConfiguration>
diff --git a/tests/tests/tv/src/android/media/tv/tuner/cts/SharedFilterTestService.java b/tests/tests/tv/src/android/media/tv/tuner/cts/SharedFilterTestService.java
index d8381fd..6ed80ce 100644
--- a/tests/tests/tv/src/android/media/tv/tuner/cts/SharedFilterTestService.java
+++ b/tests/tests/tv/src/android/media/tv/tuner/cts/SharedFilterTestService.java
@@ -19,14 +19,15 @@
import android.app.Service;
import android.content.Context;
import android.content.Intent;
+import android.media.tv.tuner.dvr.DvrPlayback;
import android.media.tv.tuner.Tuner;
import android.media.tv.tuner.cts.ISharedFilterTestServer;
+import android.media.tv.tuner.dvr.OnPlaybackStatusChangedListener;
import android.media.tv.tuner.filter.Filter;
import android.media.tv.tuner.filter.FilterCallback;
import android.media.tv.tuner.filter.FilterEvent;
import android.media.tv.tuner.filter.SharedFilter;
import android.media.tv.tuner.filter.SharedFilterCallback;
-import android.media.tv.tuner.frontend.FrontendInfo;
import android.os.IBinder;
import android.util.Log;
@@ -37,8 +38,8 @@
private static final String TAG = "SharedFilterTestService";
private Context mContext = null;
private Tuner mTuner = null;
+ private DvrPlayback mDvrPlayback = null;
private Filter mFilter = null;
- private boolean mTuning = false;
@Override
public IBinder onBind(Intent intent) {
@@ -60,19 +61,17 @@
mFilter = TunerTest.createTsSectionFilter(
mTuner, getExecutor(), getFilterCallback());
- // Tune a frontend before start the filter
- List<FrontendInfo> infos = mTuner.getAvailableFrontendInfos();
- mTuner.tune(TunerTest.createFrontendSettings(infos.get(0)));
- mTuning = true;
+ // Open dvr playback as data source
+ mDvrPlayback = mTuner.openDvrPlayback(100, getExecutor(), getPlaybackListener());
return mFilter.acquireSharedFilterToken();
}
@Override
public void closeFilter() {
- if (mTuning) {
- mTuner.cancelTuning();
- mTuning = false;
+ if (mDvrPlayback != null) {
+ mDvrPlayback.close();
+ mDvrPlayback = null;
}
mFilter.close();
mFilter = null;
@@ -80,10 +79,6 @@
@Override
public void freeSharedFilterToken(String token) {
- if (mTuning) {
- mTuner.cancelTuning();
- mTuning = false;
- }
mFilter.freeSharedFilterToken(token);
}
@@ -144,4 +139,11 @@
}
private Executor getExecutor() { return Runnable::run; }
+
+ private OnPlaybackStatusChangedListener getPlaybackListener() {
+ return new OnPlaybackStatusChangedListener() {
+ @Override
+ public void onPlaybackStatusChanged(int status) {}
+ };
+ }
}
diff --git a/tests/tests/tv/src/android/media/tv/tuner/cts/TunerFrontendTest.java b/tests/tests/tv/src/android/media/tv/tuner/cts/TunerFrontendTest.java
index 6644d1f..74054a5 100644
--- a/tests/tests/tv/src/android/media/tv/tuner/cts/TunerFrontendTest.java
+++ b/tests/tests/tv/src/android/media/tv/tuner/cts/TunerFrontendTest.java
@@ -23,6 +23,7 @@
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assume.assumeNotNull;
import android.content.Context;
import android.content.pm.PackageManager;
@@ -1121,6 +1122,7 @@
@Test
public void testFrontendInfoWithLongFrequency() throws Exception {
List<Integer> ids = mTuner.getFrontendIds();
+ assumeNotNull(ids);
List<FrontendInfo> infos = mTuner.getAvailableFrontendInfos();
Map<Integer, FrontendInfo> infoMap = new HashMap<>();
for (FrontendInfo info : infos) {
diff --git a/tests/tests/tv/src/android/media/tv/tuner/cts/TunerTest.java b/tests/tests/tv/src/android/media/tv/tuner/cts/TunerTest.java
index 6f9a7e3..41de820 100644
--- a/tests/tests/tv/src/android/media/tv/tuner/cts/TunerTest.java
+++ b/tests/tests/tv/src/android/media/tv/tuner/cts/TunerTest.java
@@ -23,6 +23,7 @@
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
+import static org.junit.Assume.assumeNotNull;
import android.content.ComponentName;
import android.content.Context;
@@ -37,8 +38,12 @@
import android.media.tv.tuner.LnbCallback;
import android.media.tv.tuner.Tuner;
import android.media.tv.tuner.TunerVersionChecker;
+import android.media.tv.tuner.cts.configuration.v1.LnbSupportedVoltage;
+import android.media.tv.tuner.cts.configuration.v1.TunerCtsConfiguration;
+import android.media.tv.tuner.cts.configuration.v1.XmlParser;
import android.media.tv.tuner.dvr.DvrPlayback;
import android.media.tv.tuner.dvr.DvrRecorder;
+import android.media.tv.tuner.dvr.DvrSettings;
import android.media.tv.tuner.dvr.OnPlaybackStatusChangedListener;
import android.media.tv.tuner.dvr.OnRecordStatusChangedListener;
import android.media.tv.tuner.filter.AlpFilterConfiguration;
@@ -83,6 +88,7 @@
import android.media.tv.tuner.frontend.DtmbFrontendSettings;
import android.media.tv.tuner.frontend.DvbcFrontendCapabilities;
import android.media.tv.tuner.frontend.DvbcFrontendSettings;
+import android.media.tv.tuner.frontend.DvbsCodeRate;
import android.media.tv.tuner.frontend.DvbsFrontendCapabilities;
import android.media.tv.tuner.frontend.DvbsFrontendSettings;
import android.media.tv.tuner.frontend.DvbtFrontendCapabilities;
@@ -111,6 +117,7 @@
import android.os.IBinder;
import android.os.Looper;
import android.os.Message;
+import android.tv.cts.R;
import android.util.SparseIntArray;
import androidx.test.InstrumentationRegistry;
@@ -121,13 +128,19 @@
import org.junit.After;
import org.junit.Before;
+import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStream;
import java.time.Duration;
import java.time.Instant;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.List;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.CountDownLatch;
@@ -148,6 +161,12 @@
private static final int TIMEOUT_MS = 10 * 1000; // 10 seconds
private static final int SCAN_TIMEOUT_MS = 2 * 60 * 1000; // 2 minutes
private static final long TIMEOUT_BINDER_SERVICE_SEC = 2;
+ private static final String DEFAULT_TUNER_CTS_CONFIGURATION_FILE =
+ "/product/etc/tuner_cts_config_V1.xml";
+ private static final String VENDOR_TUNER_CTS_CONFIGURATION_FILE =
+ "/vendor/etc/tuner_cts_config_V1.xml";
+
+ private static TunerCtsConfiguration sTunerCtsConfiguration;
private Context mContext;
private Tuner mTuner;
@@ -440,6 +459,38 @@
}
}
+ private static TunerCtsConfiguration getDefaultConfiguration() throws Exception {
+ Context context = InstrumentationRegistry.getTargetContext();
+ InputStream inputStream = context
+ .getResources().openRawResource(R.raw.tuner_cts_config_v1_default);
+ TunerCtsConfiguration tunerCtsConfiguration = XmlParser.read(inputStream);
+ return tunerCtsConfiguration;
+ }
+
+ private static TunerCtsConfiguration readTunerCtsConfiguration(File config) throws Exception {
+ try (InputStream inputStream = new BufferedInputStream(new FileInputStream(config))) {
+ TunerCtsConfiguration tunerCtsConfiguration = XmlParser.read(inputStream);
+ return tunerCtsConfiguration;
+ }
+ }
+
+ private static void loadTunerCtsConfiguration() throws Exception {
+ File config = new File(DEFAULT_TUNER_CTS_CONFIGURATION_FILE);
+ if (!config.isFile()) {
+ config = new File(VENDOR_TUNER_CTS_CONFIGURATION_FILE);
+ }
+ if (!config.isFile()) {
+ sTunerCtsConfiguration = getDefaultConfiguration();
+ } else {
+ sTunerCtsConfiguration = readTunerCtsConfiguration(config);
+ }
+ }
+
+ @BeforeClass
+ public static void setUpClass() throws Exception {
+ loadTunerCtsConfiguration();
+ }
+
@Before
public void setUp() throws Exception {
mContext = InstrumentationRegistry.getTargetContext();
@@ -488,7 +539,8 @@
if (ids == null) return;
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
int res = mTuner.tune(createFrontendSettings(info));
hwInfo = mTuner.getCurrentFrontendHardwareInfo();
if (TunerVersionChecker.isHigherOrEqualVersionTo(TunerVersionChecker.TUNER_VERSION_2_0)) {
@@ -507,7 +559,8 @@
if (ids == null) return;
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
int res = mTuner.tune(createFrontendSettings(info));
assertEquals(Tuner.RESULT_SUCCESS, res);
if (TunerVersionChecker.isHigherOrEqualVersionTo(TunerVersionChecker.TUNER_VERSION_3_0)) {
@@ -547,7 +600,8 @@
if (ids == null) return;
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
int res = mTuner.tune(createFrontendSettings(info));
assertEquals(Tuner.RESULT_SUCCESS, res);
res = mTuner.cancelTuning();
@@ -559,7 +613,10 @@
res = mTuner.cancelTuning();
assertEquals(Tuner.RESULT_SUCCESS, res);
- for (int i = 1; i < ids.size(); i++) {
+ for (int i = 0; i < ids.size(); i++) {
+ if (i == targetFrontendId) {
+ continue;
+ }
FrontendInfo info2 = mTuner.getFrontendInfoById(ids.get(i));
if (info2.getType() != info.getType()) {
res = mTuner.tune(createFrontendSettings(info2));
@@ -574,7 +631,8 @@
if (frontendInfos == null) return;
assertFalse(frontendInfos.isEmpty());
- FrontendInfo frontendInfo = frontendInfos.get(0);
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo frontendInfo = frontendInfos.get(targetFrontendId);
int result = mTuner.applyFrontend(frontendInfo);
assertEquals(Tuner.RESULT_SUCCESS, result);
@@ -601,7 +659,8 @@
if (ids == null) return;
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
int res = mTuner.scan(
createFrontendSettings(info),
Tuner.SCAN_TYPE_AUTO,
@@ -1035,7 +1094,8 @@
public void testLnb() throws Exception {
Lnb lnb = mTuner.openLnb(getExecutor(), getLnbCallback());
if (lnb == null) return;
- assertEquals(lnb.setVoltage(Lnb.VOLTAGE_5V), Tuner.RESULT_SUCCESS);
+ int targetLnbVoltage = getTargetLnbVoltage();
+ assertEquals(lnb.setVoltage(targetLnbVoltage), Tuner.RESULT_SUCCESS);
assertEquals(lnb.setTone(Lnb.TONE_NONE), Tuner.RESULT_SUCCESS);
assertEquals(
lnb.setSatellitePosition(Lnb.POSITION_A), Tuner.RESULT_SUCCESS);
@@ -1060,8 +1120,11 @@
lnbCB1.resetOnDiseqcMessageCalled();
List<Integer> ids = mTuner.getFrontendIds();
+ // We don't accept a device connect to LNB but no frontend.
+ assertNotNull(ids);
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
FrontendSettings feSettings = createFrontendSettings(info);
int res = mTuner.tune(feSettings);
assertEquals(Tuner.RESULT_SUCCESS, res);
@@ -1118,7 +1181,8 @@
List<Integer> ids = mTuner.getFrontendIds();
if (ids == null) return;
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
int res = mTuner.tune(createFrontendSettings(info));
assertEquals(Tuner.RESULT_SUCCESS, res);
@@ -1151,7 +1215,8 @@
if (ids == null)
return;
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
int res = mTuner.tune(createFrontendSettings(info));
assertEquals(Tuner.RESULT_SUCCESS, res);
@@ -1240,7 +1305,8 @@
if (ids == null) return;
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
int res = mTuner.tune(createFrontendSettings(info));
assertEquals(Tuner.RESULT_SUCCESS, res);
@@ -1279,7 +1345,8 @@
if (ids == null) return;
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
int res = mTuner.tune(createFrontendSettings(info));
assertEquals(Tuner.RESULT_SUCCESS, res);
@@ -1328,7 +1395,8 @@
if (ids == null) return;
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
int res = mTuner.tune(createFrontendSettings(info));
assertEquals(Tuner.RESULT_SUCCESS, res);
@@ -1590,8 +1658,10 @@
@Test
public void testResourceReclaimed() throws Exception {
List<Integer> ids = mTuner.getFrontendIds();
+ assumeNotNull(ids);
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
FrontendSettings feSettings = createFrontendSettings(info);
// first tune with mTuner to acquire resource
@@ -1612,8 +1682,10 @@
@Test
public void testResourceReclaimedDifferentThread() throws Exception {
List<Integer> ids = mTuner.getFrontendIds();
+ assumeNotNull(ids);
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
FrontendSettings feSettings = createFrontendSettings(info);
// first tune with mTuner to acquire resource
@@ -1661,9 +1733,10 @@
@Test
public void testResourceReclaimedDifferentProcess() throws Exception {
List<Integer> ids = mTuner.getFrontendIds();
- int frontendIndex = 0;
+ assumeNotNull(ids);
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(frontendIndex));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
FrontendSettings feSettings = createFrontendSettings(info);
// set up the test server
@@ -1689,7 +1762,7 @@
tunerResourceTestServer.createTuner(200);
// now tune on higher priority tuner to get mTuner reclaimed
- res = tunerResourceTestServer.tune(frontendIndex);
+ res = tunerResourceTestServer.tune(targetFrontendId);
assertEquals(Tuner.RESULT_SUCCESS, res);
try {
@@ -1716,7 +1789,7 @@
assertNotNull(mTuner.getFrontendInfo());
tunerResourceTestServer.createTuner(200);
- tunerResourceTestServer.tuneAsync(frontendIndex);
+ tunerResourceTestServer.tuneAsync(targetFrontendId);
// adjust timing to induce race/deadlock
int sleepMS = 4;
@@ -1739,7 +1812,8 @@
if (frontendInfos == null) return;
assertFalse(frontendInfos.isEmpty());
- FrontendInfo frontendInfo = frontendInfos.get(0);
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo frontendInfo = frontendInfos.get(targetFrontendId);
int result = mTuner.applyFrontend(frontendInfo);
assertEquals(Tuner.RESULT_SUCCESS, result);
@@ -1755,8 +1829,10 @@
public void testShareFrontendFromTuner() throws Exception {
Tuner tuner100 = new Tuner(mContext, null, 100);
List<Integer> ids = tuner100.getFrontendIds();
+ assumeNotNull(ids);
assertFalse(ids.isEmpty());
- FrontendInfo info = tuner100.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = tuner100.getFrontendInfoById(ids.get(targetFrontendId));
FrontendSettings feSettings = createFrontendSettings(info);
int[] statusTypes = {1};
boolean exceptionThrown = false;
@@ -1768,7 +1844,7 @@
res = tuner200.tune(feSettings);
assertEquals(Tuner.RESULT_SUCCESS, res);
- info = tuner200.getFrontendInfoById(ids.get(0));
+ info = tuner200.getFrontendInfoById(ids.get(targetFrontendId));
res = tuner200.tune(feSettings);
assertEquals(Tuner.RESULT_SUCCESS, res);
@@ -1846,11 +1922,10 @@
private void testTransferFeOwnershipSingleTuner() {
List<Integer> ids = mTuner.getFrontendIds();
- if (ids == null) {
- return;
- }
+ assumeNotNull(ids);
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
FrontendSettings feSettings = createFrontendSettings(info);
// SCENARIO 1 - transfer and close the previous owner
@@ -1921,9 +1996,10 @@
private void testTransferFeAndCiCamOwnership() {
List<Integer> ids = mTuner.getFrontendIds();
- assertNotNull(ids);
+ assumeNotNull(ids);
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
FrontendSettings feSettings = createFrontendSettings(info);
// Create tuner and tune to get frontend resource
@@ -1982,9 +2058,10 @@
private void testTransferFeAndLnbOwnership() {
List<Integer> ids = mTuner.getFrontendIds();
- assertNotNull(ids);
+ assumeNotNull(ids);
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
FrontendSettings feSettings = createFrontendSettings(info);
// Create tuner and tune to acquire frontend resource
@@ -2057,9 +2134,10 @@
Tuner other = new Tuner(mContext, null, 100);
List<Integer> ids = other.getFrontendIds();
- if (ids == null) return;
+ assumeNotNull(ids);
assertFalse(ids.isEmpty());
- FrontendInfo info = other.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = other.getFrontendInfoById(ids.get(targetFrontendId));
FrontendSettings feSettings = createFrontendSettings(info);
int res = other.tune(feSettings);
@@ -2114,13 +2192,12 @@
@Test
public void testCloseFrontend() throws Exception {
List<Integer> ids = mTuner.getFrontendIds();
- if (ids == null) {
- return;
- }
+ assumeNotNull(ids);
// SCENARIO 1 - without Lnb
assertFalse(ids.isEmpty());
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
FrontendSettings feSettings = createFrontendSettings(info);
int res = mTuner.tune(feSettings);
assertEquals(Tuner.RESULT_SUCCESS, res);
@@ -2140,8 +2217,8 @@
}
assertTrue(frontendClosed);
- // now tune to a different setting
- info = mTuner.getFrontendInfoById(ids.get(1));
+ // SCENARIO 2 - with Lnb
+ info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
feSettings = createFrontendSettings(info);
mTuner.tune(feSettings);
assertEquals(Tuner.RESULT_SUCCESS, res);
@@ -2149,8 +2226,7 @@
FrontendStatus status = mTuner.getFrontendStatus(statusCapabilities);
assertNotNull(status);
- // SCENARIO 2 - with Lnb
-
+ // open lnb
TunerTestLnbCallback lnbCB1 = new TunerTestLnbCallback();
Lnb lnb = mTuner.openLnb(getExecutor(), lnbCB1);
if (lnb == null) {
@@ -2169,7 +2245,7 @@
}
assertTrue(frontendClosed);
- info = mTuner.getFrontendInfoById(ids.get(0));
+ info = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
feSettings = createFrontendSettings(info);
mTuner.tune(feSettings);
assertEquals(Tuner.RESULT_SUCCESS, res);
@@ -2629,13 +2705,9 @@
String token2 = f.acquireSharedFilterToken();
assertTrue(token2 == null);
- // Tune a frontend before start the filter
- List<Integer> ids = mTuner.getFrontendIds();
- assertFalse(ids.isEmpty());
-
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
- int res = mTuner.tune(createFrontendSettings(info));
- assertEquals(Tuner.RESULT_SUCCESS, res);
+ // Use DvrPlayback as data source
+ DvrPlayback d = mTuner.openDvrPlayback(100, getExecutor(), getPlaybackListener());
+ assertNotNull(d);
Settings settings = SectionSettingsWithTableInfo
.builder(Filter.TYPE_TS)
@@ -2660,9 +2732,7 @@
assertEquals(f.read(new byte[3], 0, 3), 0);
assertEquals(f.stop(), Tuner.RESULT_INVALID_STATE);
- res = mTuner.cancelTuning();
- assertEquals(Tuner.RESULT_SUCCESS, res);
-
+ d.close();
f.freeSharedFilterToken(token1);
f.close();
f = null;
@@ -2777,18 +2847,14 @@
String token = f.acquireSharedFilterToken();
assertTrue(token != null);
- // Tune a frontend before start the shared filter
- List<Integer> ids = mTuner.getFrontendIds();
- assertFalse(ids.isEmpty());
+ // Use DvrPlayer as data source
+ DvrPlayback d = mTuner.openDvrPlayback(100, getExecutor(), getPlaybackListener());
+ assertNotNull(d);
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
- int res = mTuner.tune(createFrontendSettings(info));
- assertEquals(Tuner.RESULT_SUCCESS, res);
assertTrue(mSharedFilterTestServer.verifySharedFilter(token));
- res = mTuner.cancelTuning();
- assertEquals(Tuner.RESULT_SUCCESS, res);
+ d.close();
f.freeSharedFilterToken(token);
f.close();
f = null;
@@ -2821,19 +2887,18 @@
f.configure(config);
- // Tune a frontend before starting the filter
- List<Integer> ids = mTuner.getFrontendIds();
- assertFalse(ids.isEmpty());
-
- FrontendInfo info = mTuner.getFrontendInfoById(ids.get(0));
- int res = mTuner.tune(createFrontendSettings(info));
- assertEquals(Tuner.RESULT_SUCCESS, res);
+ DvrPlayback d = mTuner.openDvrPlayback(100, getExecutor(), getPlaybackListener());
+ assertNotNull(d);
+ d.configure(getDvrSettings());
// start / stop prevents initial race condition after first setting the time delay.
f.start();
f.stop();
mLockLatch = new CountDownLatch(1);
+ d.start();
+ byte[] buffer = getTestDataBuffer();
+ d.read(buffer, 0, buffer.length);
f.start();
assertTrue(mLockLatch.await(TIMEOUT_MS, TimeUnit.MILLISECONDS));
@@ -2841,8 +2906,8 @@
Duration timeElapsed = Duration.between(start, finish);
assertTrue(timeElapsed.toMillis() >= timeDelayInMs);
- res = mTuner.cancelTuning();
- assertEquals(Tuner.RESULT_SUCCESS, res);
+ d.stop();
+ d.close();
} else {
assertEquals(Tuner.RESULT_UNAVAILABLE, status);
}
@@ -2865,6 +2930,7 @@
@Test
public void testMaxNumberOfFrontends() throws Exception {
List<Integer> ids = mTuner.getFrontendIds();
+ assumeNotNull(ids);
assertFalse(ids.isEmpty());
for (int i = 0; i < ids.size(); i++) {
int type = mTuner.getFrontendInfoById(ids.get(i)).getType();
@@ -2901,7 +2967,8 @@
}
}
// validate the behavior of tune
- FrontendInfo info1 = mTuner.getFrontendInfoById(ids.get(0));
+ int targetFrontendId = sTunerCtsConfiguration.getTargetFrontendId().intValueExact();
+ FrontendInfo info1 = mTuner.getFrontendInfoById(ids.get(targetFrontendId));
FrontendSettings feSettings1 = createFrontendSettings(info1);
int type1 = info1.getType();
if (TunerVersionChecker.isHigherOrEqualVersionTo(
@@ -3233,9 +3300,12 @@
DvbcFrontendSettings
.builder()
.setFrequencyLong(490000000)
+ .setBandwidth(DvbcFrontendSettings.BANDWIDTH_8MHZ)
.setModulation(modulation)
.setInnerFec(fec)
.setAnnex(annex)
+ .setSpectralInversion(
+ FrontendSettings.FRONTEND_SPECTRAL_INVERSION_NORMAL)
.build();
settings.setEndFrequencyLong(maxFreq);
return settings;
@@ -3244,12 +3314,22 @@
DvbsFrontendCapabilities dvbsCaps = (DvbsFrontendCapabilities) caps;
int modulation = getFirstCapable(dvbsCaps.getModulationCapability());
int standard = getFirstCapable(dvbsCaps.getStandardCapability());
+ long innerFec = getFirstCapable(dvbsCaps.getInnerFecCapability());
+ DvbsCodeRate codeRate = DvbsCodeRate
+ .builder()
+ .setInnerFec(innerFec)
+ .build();
+ int symbolRate = sTunerCtsConfiguration.getDvbsCapability()
+ .getTargetSymbolRate().intValueExact();
DvbsFrontendSettings settings =
DvbsFrontendSettings
.builder()
.setFrequencyLong(950000000) //950Mhz
.setModulation(modulation)
+ .setCodeRate(codeRate)
+ .setRolloff(DvbsFrontendSettings.ROLLOFF_0_20)
.setStandard(standard)
+ .setSymbolRate(symbolRate)
.build();
settings.setEndFrequencyLong(maxFreq);
return settings;
@@ -3269,6 +3349,7 @@
.setBandwidth(bandwidth)
.setConstellation(constellation)
.setHierarchy(hierarchy)
+ .setHighPriority(true)
.setHighPriorityCodeRate(codeRate)
.setLowPriorityCodeRate(codeRate)
.setGuardInterval(guardInterval)
@@ -3391,6 +3472,27 @@
return null;
}
+ private DvrSettings getDvrSettings() {
+ return DvrSettings
+ .builder()
+ .setStatusMask(Filter.STATUS_DATA_READY)
+ .setLowThreshold(200L)
+ .setHighThreshold(800L)
+ .setPacketSize(188L)
+ .setDataFormat(DvrSettings.DATA_FORMAT_TS)
+ .build();
+ }
+
+ private byte[] getTestDataBuffer() {
+ byte[] data = new byte[]{
+ 0x47, 0x40, 0x00, 0x10, 0x00, 0x00, (byte) 0xB0, 0x0D, 0x00, 0x01,
+ (byte) 0xC1, 0x00, 0x00, 0x00, 0x01, (byte) 0xF0, 0x00, 0x2A, (byte) 0xB1,
+ 0x04, (byte) 0xB2
+ };
+
+ return Arrays.copyOf(data, 188);
+ }
+
static public int getFirstCapable(int caps) {
if (caps == 0) return 0;
int mask = 1;
@@ -3567,4 +3669,39 @@
mTunerHandlerTaskComplete.open();
}
}
+
+ private int getTargetLnbVoltage() {
+ if (!sTunerCtsConfiguration.hasLnbCapability()) {
+ return Lnb.VOLTAGE_NONE;
+ }
+ List<LnbSupportedVoltage> lnbSupportedVoltages =
+ sTunerCtsConfiguration.getLnbCapability().getSupportedVoltages().getVoltage();
+ int targetLnbVoltage = lnbSupportedVoltages.stream()
+ .filter(v -> v != LnbSupportedVoltage.VOLTAGE_NONE)
+ .findFirst()
+ .map(v -> {
+ switch (v) {
+ case VOLTAGE_5V:
+ return Lnb.VOLTAGE_5V;
+ case VOLTAGE_11V:
+ return Lnb.VOLTAGE_11V;
+ case VOLTAGE_12V:
+ return Lnb.VOLTAGE_12V;
+ case VOLTAGE_13V:
+ return Lnb.VOLTAGE_13V;
+ case VOLTAGE_14V:
+ return Lnb.VOLTAGE_14V;
+ case VOLTAGE_15V:
+ return Lnb.VOLTAGE_15V;
+ case VOLTAGE_18V:
+ return Lnb.VOLTAGE_18V;
+ case VOLTAGE_19V:
+ return Lnb.VOLTAGE_19V;
+ default:
+ return Lnb.VOLTAGE_NONE;
+ }
+ })
+ .orElse(Lnb.VOLTAGE_NONE);
+ return targetLnbVoltage;
+ }
}
diff --git a/tests/tests/vcn/AndroidManifest.xml b/tests/tests/vcn/AndroidManifest.xml
index 87525a1..b37e174 100644
--- a/tests/tests/vcn/AndroidManifest.xml
+++ b/tests/tests/vcn/AndroidManifest.xml
@@ -27,6 +27,8 @@
<uses-permission android:name="android.permission.INTERNET"/>
<!--Allow tests to read telephony configurations -->
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
+ <!--Allow tests to set global settings -->
+ <uses-permission android:name="android.permission.WRITE_SECURE_SETTINGS" />
<application android:label="CtsVcnTestCases">
<uses-library android:name="android.test.runner" />
diff --git a/tests/tests/vcn/src/android/net/vcn/cts/VcnManagerTest.java b/tests/tests/vcn/src/android/net/vcn/cts/VcnManagerTest.java
index 08709f6..9c30d05 100644
--- a/tests/tests/vcn/src/android/net/vcn/cts/VcnManagerTest.java
+++ b/tests/tests/vcn/src/android/net/vcn/cts/VcnManagerTest.java
@@ -19,6 +19,9 @@
import static android.content.pm.PackageManager.FEATURE_TELEPHONY;
import static android.ipsec.ike.cts.IkeTunUtils.PortPair;
import static android.net.ConnectivityDiagnosticsManager.DataStallReport.DETECTION_METHOD_DNS_EVENTS;
+import static android.net.ConnectivitySettingsManager.CAPTIVE_PORTAL_MODE_PROMPT;
+import static android.net.ConnectivitySettingsManager.getCaptivePortalMode;
+import static android.net.ConnectivitySettingsManager.setCaptivePortalMode;
import static android.net.NetworkCapabilities.NET_CAPABILITY_CBS;
import static android.net.NetworkCapabilities.NET_CAPABILITY_NOT_METERED;
import static android.net.NetworkCapabilities.NET_CAPABILITY_NOT_RESTRICTED;
@@ -136,6 +139,7 @@
private final TelephonyManager mTelephonyManager;
private final ConnectivityManager mConnectivityManager;
private final CarrierConfigManager mCarrierConfigManager;
+ private final int mOldCaptivePortalMode;
public VcnManagerTest() {
mContext = InstrumentationRegistry.getContext();
@@ -144,6 +148,8 @@
mTelephonyManager = mContext.getSystemService(TelephonyManager.class);
mConnectivityManager = mContext.getSystemService(ConnectivityManager.class);
mCarrierConfigManager = mContext.getSystemService(CarrierConfigManager.class);
+
+ mOldCaptivePortalMode = getCaptivePortalMode(mContext, CAPTIVE_PORTAL_MODE_PROMPT);
}
@Before
@@ -151,10 +157,14 @@
assumeTrue(mContext.getPackageManager().hasSystemFeature(FEATURE_TELEPHONY));
getInstrumentation().getUiAutomation().adoptShellPermissionIdentity();
+
+ // Ensure Internet probing check will be performed on VCN networks
+ setCaptivePortalMode(mContext, CAPTIVE_PORTAL_MODE_PROMPT);
}
@After
public void tearDown() throws Exception {
+ setCaptivePortalMode(mContext, mOldCaptivePortalMode);
getInstrumentation().getUiAutomation().dropShellPermissionIdentity();
}
diff --git a/tests/tests/wifi/AndroidManifest.xml b/tests/tests/wifi/AndroidManifest.xml
index c618fc5..412841a 100644
--- a/tests/tests/wifi/AndroidManifest.xml
+++ b/tests/tests/wifi/AndroidManifest.xml
@@ -42,7 +42,8 @@
(as opposed to HTTPS). -->
<application android:usesCleartextTraffic="true">
<uses-library android:name="android.test.runner"/>
- <activity android:name=".WaitForResultActivity" android:screenOrientation="portrait"/>
+ <activity android:name=".WaitForResultActivity"
+ android:configChanges="orientation|screenSize|screenLayout|keyboardHidden"/>
<service
android:name="android.net.wifi.sharedconnectivity.service.cts.TestSharedConnectivityService"
android:exported="true">
diff --git a/tests/video/src/android/video/cts/HevcVp9ClaimsPerformanceTest.java b/tests/video/src/android/video/cts/HevcVp9ClaimsPerformanceTest.java
new file mode 100644
index 0000000..046b2c5
--- /dev/null
+++ b/tests/video/src/android/video/cts/HevcVp9ClaimsPerformanceTest.java
@@ -0,0 +1,116 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.video.cts;
+
+import static org.junit.Assert.assertTrue;
+
+import android.media.MediaFormat;
+import android.mediav2.common.cts.CodecTestBase;
+import android.mediav2.common.cts.CodecTestBase.ComponentClass;
+
+import androidx.test.filters.SmallTest;
+
+import com.android.compatibility.common.util.CddTest;
+import com.android.compatibility.common.util.MediaUtils;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.List;
+
+/**
+ * The CDD requires devices to support certain combinations of resolution+framerate for video
+ * processing. Some of these requirements vary based on the presence of a hardware codec.
+ * <p>
+ * This test is an extension to {@link VideoCodecClaimsPerformanceTest} which checks whether the
+ * device claims to support at least one of H.265 or VP9 decoding of 720, 1080 and UHD profiles.
+ */
+@RunWith(Parameterized.class)
+public class HevcVp9ClaimsPerformanceTest {
+ private final List<VideoCodecClaimsPerformanceTestBase> mBaseInstances = new ArrayList<>();
+
+ public HevcVp9ClaimsPerformanceTest(List<String> mediaTypes, int width, int height,
+ int fps, boolean isEncoder, ComponentClass componentClass, String allTestParams) {
+ for (String mediaType : mediaTypes) {
+ mBaseInstances.add(
+ new VideoCodecClaimsPerformanceTestBase(mediaType, width, height, fps,
+ isEncoder, componentClass, allTestParams));
+ }
+ }
+
+ @Parameterized.Parameters(name = "{index}_{0}_{1}_{2}_{3}_{4}")
+ public static Collection<Object[]> input() {
+ final boolean isDispHtAtleastUHD = CodecTestBase.MAX_DISPLAY_HEIGHT_LAND >= 2160;
+ final boolean isDispHtAtleastFHD = CodecTestBase.MAX_DISPLAY_HEIGHT_LAND >= 1080;
+ final boolean isDispHtAtleastHD = CodecTestBase.MAX_DISPLAY_HEIGHT_LAND >= 720;
+
+ // mediaTypes, width, height, fps, isEncoder, componentClass
+ final List<Object[]> argsList = new ArrayList<>();
+
+ // Video Decoder Requirements
+ // hevc, vp9
+ // 5.3.5/C-2-1, 5.3.7/C-3-1
+ if (isDispHtAtleastHD) {
+ argsList.add(new Object[]{new ArrayList<>(Arrays.asList(MediaFormat.MIMETYPE_VIDEO_HEVC,
+ MediaFormat.MIMETYPE_VIDEO_VP9)), 1280, 720, 30, false, ComponentClass.ALL});
+ }
+ if (isDispHtAtleastFHD) {
+ argsList.add(new Object[]{new ArrayList<>(Arrays.asList(MediaFormat.MIMETYPE_VIDEO_HEVC,
+ MediaFormat.MIMETYPE_VIDEO_VP9)), 1920, 1080, 30, false, ComponentClass.ALL});
+ if (MediaUtils.isTv()) {
+ argsList.add(new Object[]{new ArrayList<>(
+ Arrays.asList(MediaFormat.MIMETYPE_VIDEO_HEVC,
+ MediaFormat.MIMETYPE_VIDEO_VP9)), 1920, 1080, 60, false,
+ ComponentClass.HARDWARE});
+ }
+ }
+ if (isDispHtAtleastUHD) {
+ argsList.add(new Object[]{new ArrayList<>(Arrays.asList(MediaFormat.MIMETYPE_VIDEO_HEVC,
+ MediaFormat.MIMETYPE_VIDEO_VP9)), 3840, 2160, 60, false, ComponentClass.ALL});
+ }
+
+ int argLength = argsList.get(0).length;
+ final List<Object[]> updatedArgsList = new ArrayList<>();
+ for (Object[] arg : argsList) {
+ Object[] argUpdate = new Object[argLength + 1];
+ System.arraycopy(arg, 0, argUpdate, 0, argLength);
+ argUpdate[argLength] = CodecTestBase.paramToString(argUpdate);
+ updatedArgsList.add(argUpdate);
+ }
+ return updatedArgsList;
+ }
+
+ /**
+ * Check description of class {@link HevcVp9ClaimsPerformanceTest}
+ */
+ @CddTest(requirements = {"5.3.5/C-2-1", "5.3.7/C-3-1"})
+ @SmallTest
+ @Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_SMALL_TEST_MS)
+ public void testDeviceClaimsPerformanceSupported() {
+ boolean result = false;
+ StringBuilder testConfig = new StringBuilder();
+ for (VideoCodecClaimsPerformanceTestBase baseInstance : mBaseInstances) {
+ result |= baseInstance.deviceClaimsPerformanceSupported();
+ testConfig.append(baseInstance.mTestConfig);
+ }
+ assertTrue(testConfig.toString(), result);
+ }
+}
diff --git a/tests/videocodec/src/android/videocodec/cts/VideoEncoderQualityRegressionBFrameTest.java b/tests/videocodec/src/android/videocodec/cts/VideoEncoderQualityRegressionBFrameTest.java
index d8cbecb..5cf38f5 100644
--- a/tests/videocodec/src/android/videocodec/cts/VideoEncoderQualityRegressionBFrameTest.java
+++ b/tests/videocodec/src/android/videocodec/cts/VideoEncoderQualityRegressionBFrameTest.java
@@ -103,7 +103,7 @@
ArrayList<MediaFormat> fmts = new ArrayList<>();
for (int j = 0; j < cfgs.length; j++) {
cfgs[j] = getVideoEncoderCfgParams(mMediaType, WIDTH, HEIGHT, BIT_RATES[j],
- mBitRateMode, KEY_FRAME_INTERVAL, FRAME_RATE, B_FRAMES[i]);
+ mBitRateMode, KEY_FRAME_INTERVAL, FRAME_RATE, B_FRAMES[i], null);
fmts.add(cfgs[j].getFormat());
}
Assume.assumeTrue("Encoder: " + mCodecName + " doesn't support formats.",
@@ -112,7 +112,7 @@
}
Predicate<Double> predicate = bdRate -> bdRate < 0.000001d;
getQualityRegressionForCfgs(cfgsUnion, testInstances, encoderNames, res, FRAME_LIMIT,
- FRAME_RATE, true, predicate);
+ FRAME_RATE, null, true, predicate);
}
@ApiTest(apis = {"android.media.MediaFormat#KEY_BITRATE",
diff --git a/tests/videocodec/src/android/videocodec/cts/VideoEncoderQualityRegressionCodecTest.java b/tests/videocodec/src/android/videocodec/cts/VideoEncoderQualityRegressionCodecTest.java
index 599fda1..e1c4353 100644
--- a/tests/videocodec/src/android/videocodec/cts/VideoEncoderQualityRegressionCodecTest.java
+++ b/tests/videocodec/src/android/videocodec/cts/VideoEncoderQualityRegressionCodecTest.java
@@ -104,7 +104,8 @@
ArrayList<MediaFormat> fmts = new ArrayList<>();
for (int j = 0; j < cfgsOfMediaType.length; j++) {
cfgsOfMediaType[j] = getVideoEncoderCfgParams(mediaTypes[i], WIDTH, HEIGHT,
- BIT_RATES[j], mBitRateMode, KEY_FRAME_INTERVAL, FRAME_RATE, B_FRAMES[0]);
+ BIT_RATES[j], mBitRateMode, KEY_FRAME_INTERVAL, FRAME_RATE, B_FRAMES[0],
+ null);
fmts.add(cfgsOfMediaType[j].getFormat());
}
if (mediaTypes[i].equals(mMediaType)) {
@@ -121,6 +122,6 @@
}
Predicate<Double> predicate = bdRate -> bdRate < 0d;
getQualityRegressionForCfgs(cfgsUnion, testInstances, encoderNames, res, FRAME_LIMIT,
- FRAME_RATE, true, predicate);
+ FRAME_RATE, null, true, predicate);
}
}
diff --git a/tests/videocodec/src/android/videocodec/cts/VideoEncoderQualityRegressionTestBase.java b/tests/videocodec/src/android/videocodec/cts/VideoEncoderQualityRegressionTestBase.java
index 4b768ee..9c3b996 100644
--- a/tests/videocodec/src/android/videocodec/cts/VideoEncoderQualityRegressionTestBase.java
+++ b/tests/videocodec/src/android/videocodec/cts/VideoEncoderQualityRegressionTestBase.java
@@ -25,10 +25,12 @@
import static org.junit.Assume.assumeNotNull;
import static org.junit.Assume.assumeTrue;
+import android.graphics.Rect;
import android.mediav2.common.cts.CompareStreams;
import android.mediav2.common.cts.EncoderConfigParams;
import android.mediav2.common.cts.RawResource;
import android.util.Log;
+import android.util.Pair;
import org.junit.After;
import org.junit.AfterClass;
@@ -40,6 +42,7 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
+import java.util.Map;
import java.util.function.Predicate;
/**
@@ -101,16 +104,19 @@
protected static EncoderConfigParams getVideoEncoderCfgParams(String mediaType, int width,
int height, int bitRate, int bitRateMode, int keyFrameInterval, int frameRate,
- int maxBFrames) {
- return new EncoderConfigParams.Builder(mediaType)
+ int maxBFrames, Pair<String, Boolean> feature) {
+ EncoderConfigParams.Builder foreman = new EncoderConfigParams.Builder(mediaType)
.setWidth(width)
.setHeight(height)
.setBitRate(bitRate)
.setBitRateMode(bitRateMode)
.setKeyFrameInterval(keyFrameInterval)
.setFrameRate(frameRate)
- .setMaxBFrames(maxBFrames)
- .build();
+ .setMaxBFrames(maxBFrames);
+ if (feature != null) {
+ foreman.setFeature(feature.first, feature.second);
+ }
+ return foreman.build();
}
private native double nativeGetBDRate(double[] qualitiesA, double[] ratesA, double[] qualitiesB,
@@ -118,7 +124,8 @@
protected void getQualityRegressionForCfgs(List<EncoderConfigParams[]> cfgsUnion,
VideoEncoderValidationTestBase[] testInstances, String[] encoderNames, RawResource res,
- int frameLimit, int frameRate, boolean setLoopBack, Predicate<Double> predicate)
+ int frameLimit, int frameRate, Map<Long, List<Rect>> frameCropRects,
+ boolean setLoopBack, Predicate<Double> predicate)
throws IOException, InterruptedException {
assertEquals("Quality comparison is done between two sets", 2, cfgsUnion.size());
assertTrue("Minimum of 4 points are required for polynomial curve fitting",
@@ -140,7 +147,7 @@
CompareStreams cs = null;
try {
cs = new CompareStreams(res, mediaType,
- testInstances[i].getMuxedOutputFilePath(), true, true);
+ testInstances[i].getMuxedOutputFilePath(), frameCropRects, true, true);
final double[] globalPSNR = cs.getGlobalPSNR();
double weightedPSNR = (6 * globalPSNR[0] + globalPSNR[1] + globalPSNR[2]) / 8;
psnrs[i][j] = weightedPSNR;
diff --git a/tests/videocodec/src/android/videocodec/cts/VideoEncoderRoiTest.java b/tests/videocodec/src/android/videocodec/cts/VideoEncoderRoiTest.java
new file mode 100644
index 0000000..b8450f3a
--- /dev/null
+++ b/tests/videocodec/src/android/videocodec/cts/VideoEncoderRoiTest.java
@@ -0,0 +1,316 @@
+/*
+ * Copyright (C) 2024 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.videocodec.cts;
+
+import static android.media.codec.Flags.FLAG_REGION_OF_INTEREST;
+import static android.media.MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR;
+import static android.media.MediaFormat.QpOffsetRect;
+import static android.mediav2.common.cts.CodecTestBase.ComponentClass.HARDWARE;
+import static android.mediav2.common.cts.CodecTestBase.areFormatsSupported;
+import static android.mediav2.common.cts.CodecTestBase.isFeatureSupported;
+import static android.mediav2.common.cts.CodecTestBase.prepareParamList;
+import static android.videocodec.cts.VideoEncoderInput.SELFIEGROUP_FULLHD_PORTRAIT;
+import static android.videocodec.cts.VideoEncoderInput.getRawResource;
+import static android.videocodec.cts.VideoEncoderInput.CompressedResource;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assume.assumeTrue;
+
+import android.graphics.Rect;
+import android.media.MediaCodec;
+import android.media.MediaCodecInfo;
+import android.media.MediaFormat;
+import android.mediav2.common.cts.EncoderConfigParams;
+import android.mediav2.common.cts.RawResource;
+import android.os.Build;
+import android.os.Bundle;
+import android.platform.test.annotations.AppModeFull;
+import android.platform.test.annotations.LargeTest;
+import android.platform.test.annotations.RequiresFlagsEnabled;
+import android.util.Pair;
+
+import androidx.test.filters.SdkSuppress;
+
+import com.android.compatibility.common.util.ApiTest;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+
+import java.io.IOException;
+import java.lang.reflect.Field;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.function.Predicate;
+
+/**
+ * Test for Feature_Roi.
+ * <p>
+ * For encoders that support Roi encoding, the test configures region of interest (foreground and
+ * background) information and the corresponding QP offset for various frames during encoding.
+ * The encoded output is analyzed to check if the Roi information is honored.
+ */
+@SdkSuppress(minSdkVersion = Build.VERSION_CODES.VANILLA_ICE_CREAM, codeName =
+ "VanillaIceCream")
+@AppModeFull(reason = "Instant apps cannot access the SD card")
+@RequiresFlagsEnabled(FLAG_REGION_OF_INTEREST)
+@RunWith(Parameterized.class)
+public class VideoEncoderRoiTest extends VideoEncoderQualityRegressionTestBase {
+ private static final int[] BIT_RATES = {800000, 1500000, 2000000, 3000000, 4000000};
+ private static final int FRAME_LIMIT = 10;
+ private static final int FRAME_RATE = 30;
+ private static final int KEY_FRAME_INTERVAL = 600;
+ private static final double EXPECTED_BD_RATE = 0d;
+ private static final int MAX_B_FRAMES = 0;
+ private static final String[] FEATURES = {null, MediaCodecInfo.CodecCapabilities.FEATURE_Roi};
+ private static final int BLOCK_WD = 16;
+ private static final int BLOCK_HT = 16;
+ private static final List<Object[]> exhaustiveArgsList = new ArrayList<>();
+
+ public enum RoiType {
+ ROI_TYPE_RECTS,
+ ROI_TYPE_MAP
+ }
+
+ private final RoiType mRoiType;
+
+ /**
+ * Helper class for {@link VideoEncoderRoiTest}
+ */
+ public static class VideoEncoderRoiHelper extends VideoEncoderValidationTestBase {
+ private final Map<Long, List<QpOffsetRect>> mRoiMetadata;
+ private final RoiType mRoiType;
+
+ VideoEncoderRoiHelper(String encoder, String mediaType, EncoderConfigParams encCfgParams,
+ Map<Long, List<QpOffsetRect>> roiMetadata, RoiType roiType, String allTestParams) {
+ super(encoder, mediaType, encCfgParams, allTestParams);
+ mRoiMetadata = roiMetadata;
+ mRoiType = roiType;
+ }
+
+ private List<QpOffsetRect> getRoiMetadataForPts(Long pts) {
+ final int roundToleranceUs = 10;
+ if (mRoiMetadata.containsKey(pts)) return mRoiMetadata.get(pts);
+ for (Map.Entry<Long, List<QpOffsetRect>> entry : mRoiMetadata.entrySet()) {
+ Long keyPts = entry.getKey();
+ if (Math.abs(keyPts - pts) < roundToleranceUs) {
+ return entry.getValue();
+ }
+ }
+ return null;
+ }
+
+ private int clamp(int val, int min, int max) {
+ return Math.max(min, Math.min(max, val));
+ }
+
+ private void fillOffsetArray(int arrayStride, byte[] qpOffsetArray,
+ QpOffsetRect qpOffsetRect) throws NoSuchFieldException, IllegalAccessException {
+ Field mContour = QpOffsetRect.class.getDeclaredField("mContour");
+ mContour.setAccessible(true);
+ Rect contour = (Rect) mContour.get(qpOffsetRect);
+ Field mQpOffset = QpOffsetRect.class.getDeclaredField("mQpOffset");
+ mQpOffset.setAccessible(true);
+ contour.left = clamp(contour.left, 0, mActiveEncCfg.mWidth);
+ contour.top = clamp(contour.top, 0, mActiveEncCfg.mHeight);
+ contour.right = clamp(contour.right, 0, mActiveEncCfg.mWidth);
+ contour.bottom = clamp(contour.bottom, 0, mActiveEncCfg.mHeight);
+ int qpOffset = (int) mQpOffset.get(qpOffsetRect);
+ for (int t = contour.top / BLOCK_HT; t < contour.bottom / BLOCK_HT; t++) {
+ for (int l = contour.left / BLOCK_WD; l < contour.right / BLOCK_WD; l++) {
+ qpOffsetArray[t * arrayStride + l] = (byte) qpOffset;
+ }
+ }
+ }
+
+ @Override
+ protected void enqueueInput(int bufferIndex) {
+ long pts = mInputOffsetPts + mInputCount * 1000000L / mActiveEncCfg.mFrameRate;
+ List<QpOffsetRect> qpOffsetRects = getRoiMetadataForPts(pts);
+ if (qpOffsetRects != null) {
+ Bundle param = new Bundle();
+ if (Objects.equals(mRoiType, RoiType.ROI_TYPE_RECTS)) {
+ param.putString(MediaCodec.PARAMETER_KEY_QP_OFFSET_RECTS,
+ QpOffsetRect.flattenToString(qpOffsetRects));
+ } else if (Objects.equals(mRoiType, RoiType.ROI_TYPE_MAP)) {
+ int alignedWidth =
+ ((mActiveEncCfg.mWidth + (BLOCK_WD - 1)) / BLOCK_WD) * BLOCK_WD;
+ int alignedHeight =
+ ((mActiveEncCfg.mHeight + (BLOCK_HT - 1)) / BLOCK_HT) * BLOCK_HT;
+ int arraySize = (alignedWidth / BLOCK_WD) * (alignedHeight / BLOCK_HT);
+ int arrayStride = alignedWidth / BLOCK_WD;
+ byte[] qpOffsetArray = new byte[arraySize];
+ for (int i = qpOffsetRects.size() - 1; i >= 0; i--) {
+ try {
+ fillOffsetArray(arrayStride, qpOffsetArray, qpOffsetRects.get(i));
+ } catch (NoSuchFieldException | IllegalAccessException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ param.putByteArray(MediaCodec.PARAMETER_KEY_QP_OFFSET_MAP, qpOffsetArray);
+ }
+ mCodec.setParameters(param);
+ }
+ super.enqueueInput(bufferIndex);
+ }
+ }
+
+ private static void addParams(CompressedResource cRes, RoiType roiType) {
+ final String[] mediaTypes =
+ new String[]{MediaFormat.MIMETYPE_VIDEO_AVC, MediaFormat.MIMETYPE_VIDEO_HEVC,
+ MediaFormat.MIMETYPE_VIDEO_VP9, MediaFormat.MIMETYPE_VIDEO_AV1};
+ RESOURCES.add(cRes);
+ for (String mediaType : mediaTypes) {
+ // mediaType, cfg
+ exhaustiveArgsList.add(new Object[]{mediaType, cRes, roiType});
+ }
+ }
+
+ @Parameterized.Parameters(name = "{index}_{0}_{1}_{3}")
+ public static Collection<Object[]> input() {
+ addParams(SELFIEGROUP_FULLHD_PORTRAIT, RoiType.ROI_TYPE_RECTS);
+ addParams(SELFIEGROUP_FULLHD_PORTRAIT, RoiType.ROI_TYPE_MAP);
+ return prepareParamList(exhaustiveArgsList, true, false, true, false, HARDWARE);
+ }
+
+ public VideoEncoderRoiTest(String encoder, String mediaType, CompressedResource cRes,
+ RoiType roiType, String allTestParams) {
+ super(encoder, mediaType, cRes, allTestParams);
+ mRoiType = roiType;
+ }
+
+ public Map<Long, List<Rect>> getPtsRectMap(Map<Long, List<QpOffsetRect>> roiMetadata)
+ throws NoSuchFieldException, IllegalAccessException {
+ Map<Long, List<Rect>> ptsRectMap = new HashMap<>();
+ for (Map.Entry<Long, List<QpOffsetRect>> entry :
+ roiMetadata.entrySet()) {
+ Long keyPts = entry.getKey();
+ List<QpOffsetRect> qpOffsetRects = entry.getValue();
+ List<Rect> rects = new ArrayList<>();
+ for (QpOffsetRect qpOffsetRect : qpOffsetRects) {
+ Field mQpOffset = QpOffsetRect.class.getDeclaredField("mQpOffset");
+ mQpOffset.setAccessible(true);
+ if ((int) mQpOffset.get(qpOffsetRect) < 0) {
+ Field mContour = QpOffsetRect.class.getDeclaredField("mContour");
+ mContour.setAccessible(true);
+ rects.add((Rect) mContour.get(qpOffsetRect));
+ }
+ }
+ ptsRectMap.put(keyPts, rects);
+ }
+ return ptsRectMap;
+ }
+
+ @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_Roi",
+ "android.media.MediaCodec#PARAMETER_KEY_QP_OFFSET_MAP",
+ "android.media.MediaCodec#PARAMETER_KEY_QP_OFFSET_RECTS",
+ "android.media.MediaFormat#QpOffsetRect"})
+ @LargeTest
+ @Test
+ public void testRoiSupport()
+ throws IOException, InterruptedException, NoSuchFieldException, IllegalAccessException {
+ assumeTrue(mCodecName + " does not support FEATURE_Roi",
+ isFeatureSupported(mCodecName, mMediaType,
+ MediaCodecInfo.CodecCapabilities.FEATURE_Roi));
+ RawResource res = getRawResource(mCRes);
+ Map<Long, List<QpOffsetRect>> roiMetadata = GenerateRoiMetadata.ROI_INFO.get(mCRes);
+ assertNotNull("no roi metadata found for resource " + mCRes.uniqueLabel(), roiMetadata);
+ VideoEncoderValidationTestBase[] testInstances =
+ {new VideoEncoderValidationTestBase(null, mMediaType, null,
+ mAllTestParams), new VideoEncoderRoiHelper(null, mMediaType, null,
+ roiMetadata, mRoiType, mAllTestParams)};
+ String[] encoderNames = new String[FEATURES.length];
+ List<EncoderConfigParams[]> cfgsUnion = new ArrayList<>();
+ for (int i = 0; i < FEATURES.length; i++) {
+ EncoderConfigParams[] cfgs = new EncoderConfigParams[BIT_RATES.length];
+ cfgsUnion.add(cfgs);
+ ArrayList<MediaFormat> fmts = new ArrayList<>();
+ for (int j = 0; j < cfgs.length; j++) {
+ Pair<String, Boolean> feature = new Pair<>(FEATURES[i], FEATURES[i] != null);
+ cfgs[j] = getVideoEncoderCfgParams(mMediaType, res.mWidth, res.mHeight,
+ BIT_RATES[j], BITRATE_MODE_VBR, KEY_FRAME_INTERVAL, FRAME_RATE,
+ MAX_B_FRAMES, feature);
+ fmts.add(cfgs[j].getFormat());
+ }
+ assumeTrue("Encoder: " + mCodecName + " doesn't support formats.",
+ areFormatsSupported(mCodecName, mMediaType, fmts));
+ encoderNames[i] = mCodecName;
+ }
+ Predicate<Double> predicate = bdRate -> bdRate <= EXPECTED_BD_RATE;
+ Map<Long, List<Rect>> frameCropRects = getPtsRectMap(roiMetadata);
+ getQualityRegressionForCfgs(cfgsUnion, testInstances, encoderNames, res, FRAME_LIMIT,
+ FRAME_RATE, frameCropRects, false, predicate);
+ }
+}
+
+/**
+ * Generates ROI Metadata for {@link VideoEncoderRoiTest}.
+ */
+class GenerateRoiMetadata {
+ static final Map<CompressedResource, Map<Long, List<QpOffsetRect>>> ROI_INFO =
+ new HashMap<>();
+
+ static {
+ Map<Long, List<QpOffsetRect>> roiMetadata = new HashMap<>();
+ roiMetadata.put(0L, new ArrayList<>(
+ Arrays.asList(new QpOffsetRect(new Rect(694, 668, 991, 1487), -5),
+ new QpOffsetRect(new Rect(18, 627, 770, 1957), -5),
+ new QpOffsetRect(new Rect(0, 0, 1080, 600), 5))));
+ roiMetadata.put(33333L, new ArrayList<>(
+ Arrays.asList(new QpOffsetRect(new Rect(688, 643, 991, 1531), -5),
+ new QpOffsetRect(new Rect(21, 645, 762, 1946), -5),
+ new QpOffsetRect(new Rect(0, 0, 1080, 600), 5))));
+ roiMetadata.put(66666L, new ArrayList<>(
+ Arrays.asList(new QpOffsetRect(new Rect(673, 613, 965, 1562), -5),
+ new QpOffsetRect(new Rect(26, 636, 761, 1945), -5),
+ new QpOffsetRect(new Rect(0, 0, 1080, 600), 5))));
+ roiMetadata.put(100000L, new ArrayList<>(
+ Arrays.asList(new QpOffsetRect(new Rect(672, 642, 949, 1541), -5),
+ new QpOffsetRect(new Rect(15, 639, 867, 1956), -5),
+ new QpOffsetRect(new Rect(0, 0, 1080, 600), 5))));
+ roiMetadata.put(133333L, new ArrayList<>(
+ Arrays.asList(new QpOffsetRect(new Rect(657, 668, 944, 1499), -5),
+ new QpOffsetRect(new Rect(20, 638, 761, 1957), -5),
+ new QpOffsetRect(new Rect(0, 0, 1080, 600), 5))));
+ roiMetadata.put(166666L, new ArrayList<>(
+ Arrays.asList(new QpOffsetRect(new Rect(643, 674, 942, 1526), -5),
+ new QpOffsetRect(new Rect(8, 647, 761, 1946), -5),
+ new QpOffsetRect(new Rect(0, 0, 1080, 600), 5))));
+ roiMetadata.put(200000L, new ArrayList<>(
+ Arrays.asList(new QpOffsetRect(new Rect(638, 694, 940, 1472), -5),
+ new QpOffsetRect(new Rect(4, 653, 769, 1939), -5),
+ new QpOffsetRect(new Rect(0, 0, 1080, 600), 5))));
+ roiMetadata.put(233333L, new ArrayList<>(
+ Arrays.asList(new QpOffsetRect(new Rect(630, 693, 953, 1472), -5),
+ new QpOffsetRect(new Rect(15, 652, 764, 1936), -5),
+ new QpOffsetRect(new Rect(0, 0, 1080, 600), 5))));
+ roiMetadata.put(266666L, new ArrayList<>(
+ Arrays.asList(new QpOffsetRect(new Rect(627, 687, 961, 1486), -5),
+ new QpOffsetRect(new Rect(20, 661, 752, 1939), -5),
+ new QpOffsetRect(new Rect(0, 0, 1080, 600), 5))));
+ roiMetadata.put(300000L, new ArrayList<>(
+ Arrays.asList(new QpOffsetRect(new Rect(634, 682, 926, 1466), -5),
+ new QpOffsetRect(new Rect(18, 644, 758, 1946), -5),
+ new QpOffsetRect(new Rect(0, 0, 1080, 600), 5))));
+ ROI_INFO.put(SELFIEGROUP_FULLHD_PORTRAIT, roiMetadata);
+ }
+}
diff --git a/tools/cts-device-info/Android.bp b/tools/cts-device-info/Android.bp
index 5af6361..0af395b 100644
--- a/tools/cts-device-info/Android.bp
+++ b/tools/cts-device-info/Android.bp
@@ -36,6 +36,37 @@
"catbox",
"gcatbox",
"ats",
+ "mcts",
+ "xrts",
+ "mcts-adbd",
+ "mcts-adservices",
+ "mcts-appsearch",
+ "mcts-art",
+ "mcts-bluetooth",
+ "mcts-cellbroadcast",
+ "mcts-configinfrastructure",
+ "mcts-conscrypt",
+ "mcts-cronet",
+ "mcts-dnsresolver",
+ "mcts-documentsui",
+ "mcts-extservices",
+ "mcts-healthfitness",
+ "mcts-ipsec",
+ "mcts-mainline-infra",
+ "mcts-media",
+ "mcts-mediaprovider",
+ "mcts-networking",
+ "mcts-neuralnetworks",
+ "mcts-ondevicepersonalization",
+ "mcts-permission",
+ "mcts-rkpd",
+ "mcts-scheduling",
+ "mcts-sdkextensions",
+ "mcts-statsd",
+ "mcts-tethering",
+ "mcts-tzdata",
+ "mcts-uwb",
+ "mcts-wifi",
],
static_libs: [
"compatibility-device-info",
@@ -63,6 +94,7 @@
" -r android.permission.READ_PHONE_STATE " +
" -r android.permission.WRITE_EXTERNAL_STORAGE " +
" -r android.permission.ACCESS_FINE_LOCATION " +
+ " -r android.permission.QUERY_ALL_PACKAGES" +
" -a com.android.compatibility.common.deviceinfo.GlesStubActivity " +
" -a com.android.cts.deviceinfo.CameraDeviceInfo " +
" -a com.android.cts.deviceinfo.SensorDeviceInfo " +
diff --git a/tools/cts-tradefed/res/config/cts-known-failures.xml b/tools/cts-tradefed/res/config/cts-known-failures.xml
index cd24d81..2d5c63d 100644
--- a/tools/cts-tradefed/res/config/cts-known-failures.xml
+++ b/tools/cts-tradefed/res/config/cts-known-failures.xml
@@ -298,6 +298,13 @@
<option name="compatibility:exclude-filter" value="CtsShortcutManagerTestCases[foldable:3:OPEN] android.content.pm.cts.shortcutmanager.ShortcutManagerUsageTest#testReportShortcutUsed" />
<option name="compatibility:exclude-filter" value="CtsShortcutManagerTestCases[foldable:4:CONCURRENT_INNER_DEFAULT] android.content.pm.cts.shortcutmanager.ShortcutManagerUsageTest#testReportShortcutUsed" />
<option name="compatibility:exclude-filter" value="CtsShortcutManagerTestCases[foldable:5:REAR_DUAL] android.content.pm.cts.shortcutmanager.ShortcutManagerUsageTest#testReportShortcutUsed" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutManagerTestCases android.content.pm.cts.shortcutmanager.ShortcutManagerUsageTest#testReportShortcutUsed" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutManagerTestCases[instant] android.content.pm.cts.shortcutmanager.ShortcutManagerUsageTest#testReportShortcutUsed" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutManagerTestCases[foldable:1:TENT] android.content.pm.cts.shortcutmanager.ShortcutManagerUsageTest#testReportShortcutUsed" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutManagerTestCases[foldable:2:HALF_FOLDED] android.content.pm.cts.shortcutmanager.ShortcutManagerUsageTest#testReportShortcutUsed" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutManagerTestCases[foldable:3:OPEN] android.content.pm.cts.shortcutmanager.ShortcutManagerUsageTest#testReportShortcutUsed" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutManagerTestCases[foldable:4:CONCURRENT_INNER_DEFAULT] android.content.pm.cts.shortcutmanager.ShortcutManagerUsageTest#testReportShortcutUsed" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutManagerTestCases[foldable:5:REAR_DUAL] android.content.pm.cts.shortcutmanager.ShortcutManagerUsageTest#testReportShortcutUsed" />
<!-- b/326656256 -->
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_downgrade" />
@@ -307,6 +314,13 @@
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[foldable:3:OPEN] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_downgrade" />
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[foldable:4:CONCURRENT_INNER_DEFAULT] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_downgrade" />
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[foldable:5:REAR_DUAL] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_downgrade" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_downgrade" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[instant] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_downgrade" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:1:TENT] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_downgrade" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:2:HALF_FOLDED] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_downgrade" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:3:OPEN] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_downgrade" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:4:CONCURRENT_INNER_DEFAULT] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_downgrade" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:5:REAR_DUAL] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_downgrade" />
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[instant] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[foldable:1:TENT] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
@@ -314,6 +328,13 @@
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[foldable:3:OPEN] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[foldable:4:CONCURRENT_INNER_DEFAULT] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[foldable:5:REAR_DUAL] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[instant] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:1:TENT] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:2:HALF_FOLDED] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:3:OPEN] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:4:CONCURRENT_INNER_DEFAULT] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:5:REAR_DUAL] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_noManifestOnOldVersion" />
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[instant] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[foldable:1:TENT] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
@@ -321,10 +342,35 @@
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[foldable:3:OPEN] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[foldable:4:CONCURRENT_INNER_DEFAULT] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
<option name="compatibility:exclude-filter" value="CtsShortcutHostTestCases[foldable:5:REAR_DUAL] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[instant] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:1:TENT] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:2:HALF_FOLDED] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:3:OPEN] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:4:CONCURRENT_INNER_DEFAULT] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
+ <option name="compatibility:exclude-filter" value="arm64-v8a CtsShortcutHostTestCases[foldable:5:REAR_DUAL] android.content.pm.cts.shortcuthost.ShortcutManagerBackupTest#testBackupAndRestore_invisibleIgnored" />
<!-- b/328010720 -->
<option name="compatibility:exclude-filter" value="CtsMediaAudioTestCases android.media.audio.cts.AudioHalVersionInfoTest#test_VERSIONS_not_contains" />
<option name="compatibility:exclude-filter" value="CtsMediaAudioTestCases android.media.audio.cts.AudioHalVersionInfoTest#test_VERSIONS_contains" />
<option name="compatibility:exclude-filter" value="CtsMediaAudioTestCases[instant] android.media.audio.cts.AudioHalVersionInfoTest#test_VERSIONS_not_contains" />
<option name="compatibility:exclude-filter" value="CtsMediaAudioTestCases[instant] android.media.audio.cts.AudioHalVersionInfoTest#test_VERSIONS_contains" />
+
+ <!-- b/330276352 -->
+ <option name="compatibility:exclude-filter" value="CtsHealthConnectHostSideDeviceTestCases android.healthconnect.cts.device.HealthConnectDeviceTest#testAppCanReadChangeLogsUsingDataOriginFilters" />
+ <option name="compatibility:exclude-filter" value="CtsHealthConnectHostTestCases android.healthconnect.cts.logging.HealthConnectServiceStatsTests#testChangeLogTokenRequest" />
+ <option name="compatibility:exclude-filter" value="CtsHealthConnectHostTestCases android.healthconnect.cts.logging.HealthConnectServiceStatsTests#testChangeLogsRequest" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectChangeLogsTests#testChangeLogs_insert_dataOrigin_filter_correct" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectChangeLogsTests#testChangeLogs_insertAndDelete_dataOrigin_filter_correct" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectChangeLogsTests#testChangeLogs_insertAndDelete_beforePermission" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectChangeLogsTests#testChangeLogs_insertAndDelete_default" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectChangeLogsTests#testChangeLogs_insert_default_withNextPageToken" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectChangeLogsTests#testChangeLogs_insert_default" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectChangeLogsTests#testChangeLogs_insert_dataOrigin_filter_incorrect" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectChangeLogsTests#testChangeLogs_checkToken_hasMorePages_False" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectChangeLogsTests#testChangeLogs_insert_default_withPageSize" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectChangeLogsTests#testChangeLogs_insert_default_withSamePageToken" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectChangeLogsTests#testGetChangeLogToken" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectChangeLogsTests#testChangeLogs_insertAndDelete_dataOrigin_filter_incorrect" />
+ <option name="compatibility:exclude-filter" value="CtsHealthFitnessDeviceTestCases android.healthconnect.cts.HealthConnectManagerTest#testDataApis_migrationInProgress_apisBlocked" />
</configuration>