Merge "Match pid to check if package has been stopped" into stage-aosp-tm-ts-dev
diff --git a/apps/CameraITS/tests/scene3/test_edge_enhancement.py b/apps/CameraITS/tests/scene3/test_edge_enhancement.py
index c6a3bcb..c1b63c7 100644
--- a/apps/CameraITS/tests/scene3/test_edge_enhancement.py
+++ b/apps/CameraITS/tests/scene3/test_edge_enhancement.py
@@ -67,17 +67,17 @@
for n in range(NUM_SAMPLES):
cap = cam.do_capture(req, out_surface, repeat_request=req)
y, _, _ = image_processing_utils.convert_capture_to_planes(cap)
- chart.img = image_processing_utils.normalize_img(
- image_processing_utils.get_image_patch(
- y, chart.xnorm, chart.ynorm, chart.wnorm, chart.hnorm))
+ chart.img = image_processing_utils.get_image_patch(
+ y, chart.xnorm, chart.ynorm, chart.wnorm, chart.hnorm)
if n == 0:
image_processing_utils.write_image(
chart.img, '%s_edge=%d.jpg' % (
os.path.join(log_path, NAME), edge_mode))
edge_mode_res = cap['metadata']['android.edge.mode']
sharpness_list.append(
- image_processing_utils.compute_image_sharpness(chart.img))
-
+ image_processing_utils.compute_image_sharpness(chart.img)*255)
+ logging.debug('edge mode: %d, sharpness values: %s',
+ edge_mode_res, sharpness_list)
return {'edge_mode': edge_mode_res, 'sharpness': np.mean(sharpness_list)}
@@ -89,7 +89,6 @@
"""
def test_edge_enhancement(self):
- logging.debug('Starting %s', NAME)
with its_session_utils.ItsSession(
device_id=self.dut.serial,
camera_id=self.camera_id,
diff --git a/apps/CameraITS/tests/scene3/test_reprocess_edge_enhancement.py b/apps/CameraITS/tests/scene3/test_reprocess_edge_enhancement.py
index c5e9b19..84ae071 100644
--- a/apps/CameraITS/tests/scene3/test_reprocess_edge_enhancement.py
+++ b/apps/CameraITS/tests/scene3/test_reprocess_edge_enhancement.py
@@ -107,16 +107,15 @@
caps = cam.do_capture([req]*NUM_SAMPLES, [out_surface], reprocess_format)
for n in range(NUM_SAMPLES):
y, _, _ = image_processing_utils.convert_capture_to_planes(caps[n])
- chart.img = image_processing_utils.normalize_img(
- image_processing_utils.get_image_patch(
- y, chart.xnorm, chart.ynorm, chart.wnorm, chart.hnorm))
+ chart.img = image_processing_utils.get_image_patch(
+ y, chart.xnorm, chart.ynorm, chart.wnorm, chart.hnorm)
if n == 0:
image_processing_utils.write_image(
chart.img, '%s_reprocess_fmt_%s_edge=%d.jpg' % (
os.path.join(log_path, NAME), reprocess_format, edge_mode))
edge_mode_res = caps[n]['metadata']['android.edge.mode']
sharpness_list.append(
- image_processing_utils.compute_image_sharpness(chart.img))
+ image_processing_utils.compute_image_sharpness(chart.img)*255)
logging.debug('Sharpness list for edge mode %d: %s',
edge_mode, str(sharpness_list))
return {'edge_mode': edge_mode_res, 'sharpness': np.mean(sharpness_list)}
@@ -134,7 +133,6 @@
"""
def test_reprocess_edge_enhancement(self):
- logging.debug('Starting %s', NAME)
logging.debug('Edge modes: %s', str(EDGE_MODES))
with its_session_utils.ItsSession(
device_id=self.dut.serial,
@@ -179,9 +177,10 @@
# Initialize plot
pylab.figure('reprocess_result')
- pylab.title(NAME)
- pylab.xlabel('Edge Enhance Mode')
- pylab.ylabel('Sharpness')
+ pylab.suptitle(NAME)
+ pylab.title(str(EDGE_MODES))
+ pylab.xlabel('Edge Enhancement Mode')
+ pylab.ylabel('Image Sharpness')
pylab.xticks(EDGE_MODES_VALUES)
# Get the sharpness for each edge mode for regular requests
diff --git a/apps/CtsVerifier/AndroidManifest.xml b/apps/CtsVerifier/AndroidManifest.xml
index 4cd70c6..3d6fdb2 100644
--- a/apps/CtsVerifier/AndroidManifest.xml
+++ b/apps/CtsVerifier/AndroidManifest.xml
@@ -2052,6 +2052,9 @@
<meta-data android:name="test_required_features" android:value="android.hardware.wifi" />
<meta-data android:name="display_mode"
android:value="multi_display_mode" />
+ <meta-data android:name="CddTest" android:value="7.4.5.2" />
+ <meta-data android:name="ApiTest"
+ android:value="android.net.ConnectivityManager#registerNetworkCallback|android.net.ConnectivityManager#unregisterNetworkCallback|android.net.ConnectivityManager#getLinkProperties" />
</activity>
<activity android:name=".net.MultiNetworkConnectivityTestActivity"
@@ -2068,6 +2071,8 @@
android:value="android.hardware.type.television:android.software.leanback:android.hardware.type.watch" />
<meta-data android:name="display_mode"
android:value="multi_display_mode" />
+ <meta-data android:name="ApiTest"
+ android:value="android.net.ConnectivityManager#getNetworkCapabilities|android.net.ConnectivityManager#getAllNetworks|android.net.ConnectivityManager#requestNetwork|android.net.ConnectivityManager#unregisterNetworkCallback|android.net.ConnectivityManager#getActiveNetwork|android.net.ConnectivityManager#getNetworkInfo|android.net.ConnectivityManager#reportNetworkConnectivity" />
</activity>
<activity android:name=".nfc.NfcTestActivity"
@@ -2861,6 +2866,32 @@
android:value="android.hardware.type.automotive"/>
<meta-data android:name="display_mode"
android:value="single_display_mode" />
+ <meta-data android:name="ApiTest"
+ android:value="android.hardware.Camera#getParameters|
+ android.hardware.Camera#lock|
+ android.hardware.Camera#setDisplayOrientation|
+ android.hardware.Camera#setPreviewCallback|
+ android.hardware.Camera#setParameters|
+ android.hardware.Camera#setPreviewTexture|
+ android.hardware.Camera#startPreview|
+ android.hardware.Camera#stopPreview|
+ android.hardware.Camera#unlock|
+ android.media.MediaRecorder#prepare|
+ android.media.MediaRecorder#release|
+ android.media.MediaRecorder#reset|
+ android.media.MediaRecorder#setAudioEncoder|
+ android.media.MediaRecorder#setAudioSource|
+ android.media.MediaRecorder#setCamera|
+ android.media.MediaRecorder#setOnErrorListener|
+ android.media.MediaRecorder#setOutputFormat|
+ android.media.MediaRecorder#setOutputFile|
+ android.media.MediaRecorder#setProfile|
+ android.media.MediaRecorder#setVideoEncoder|
+ android.media.MediaRecorder#setVideoEncodingBitRate|
+ android.media.MediaRecorder#setVideoSize|
+ android.media.MediaRecorder#setVideoSource|
+ android.media.MediaRecorder#start|
+ android.media.MediaRecorder#stop" />
</activity>
<activity android:name=".camera.its.ItsTestActivity"
diff --git a/apps/CtsVerifier/res/values/strings.xml b/apps/CtsVerifier/res/values/strings.xml
index 526645ef..8cd2151 100644
--- a/apps/CtsVerifier/res/values/strings.xml
+++ b/apps/CtsVerifier/res/values/strings.xml
@@ -32,6 +32,16 @@
<string name="finish_button_text">Finish</string>
<string name="fail_and_next_button_text">Fail and Next</string>
+ <!-- Strings for CtsReportLog warning -->
+ <string name="reportlog_warning_title">CTS-Verifier Report Log</string>
+ <string name="reportlog_warning_body">Can\'t create folder for CTS-Verifier Report Logs.
+ \n\nPlease enable Report Log creation by exiting CTS Verifier and running the following commands:
+ \n\n<code>adb shell appops set com.android.cts.verifier android:read_device_identifiers allow</code>
+ \n\n<code>adb shell appops set com.android.cts.verifier MANAGE_EXTERNAL_STORAGE 0</code>
+ \n\nTest instructions are found in the \"Using CTS Verifier\" document found at
+ <a href="https://source.android.com/compatibility/cts/verifier">https://source.android.com/compatibility/cts/verifier</a>
+ </string>
+
<!-- Strings for TestListActivity -->
<string name="test_category_audio">Audio</string>
<string name="test_category_camera">Camera</string>
@@ -5480,6 +5490,7 @@
<string name="audio_general_test_not_run">Test Not Run</string>
<string name="audio_general_testnotcompleted">Test not completed.</string>
+ <string name="audio_general_reportlogtest">[Can\'t Write ReportLog]</string>
<!-- Audio Loopback Latency Test -->
<string name="audio_loopback_latency_test">Audio Loopback Latency Test</string>
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/CtsVerifierReportLog.java b/apps/CtsVerifier/src/com/android/cts/verifier/CtsVerifierReportLog.java
index b013bb7..a93e3b5 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/CtsVerifierReportLog.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/CtsVerifierReportLog.java
@@ -74,6 +74,10 @@
}
}
+ public boolean isOpen() {
+ return mStore != null;
+ }
+
/**
* Closes report file. Static functions that do not have access to instrumentation can
* use this to close report logs. Summary, if present, is not reported to instrumentation, hence
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/PassFailButtons.java b/apps/CtsVerifier/src/com/android/cts/verifier/PassFailButtons.java
index 0294ff7..7b993ee 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/PassFailButtons.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/PassFailButtons.java
@@ -32,7 +32,6 @@
import android.os.Bundle;
import android.os.PowerManager;
import android.os.PowerManager.WakeLock;
-import android.util.Log;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
@@ -146,6 +145,8 @@
private final CtsVerifierReportLog mReportLog;
private final TestResultHistoryCollection mHistoryCollection;
+ protected boolean mRequireReportLogToPass;
+
public Activity() {
this.mReportLog = new CtsVerifierReportLog(getReportFileName(), getReportSectionName());
this.mHistoryCollection = new TestResultHistoryCollection();
@@ -159,6 +160,10 @@
.newWakeLock(PowerManager.SCREEN_DIM_WAKE_LOCK, "PassFailButtons");
mWakeLock.acquire();
}
+
+ if (!this.mReportLog.isOpen()) {
+ showReportLogWarningDialog(this);
+ }
}
@Override
@@ -212,6 +217,14 @@
}
/**
+ * A mechanism to block tests from passing if no ReportLog data has been collected.
+ * @return true if the ReportLog is open OR if the test does not require that.
+ */
+ public boolean isReportLogOkToPass() {
+ return !mRequireReportLogToPass || mReportLog.isOpen();
+ }
+
+ /**
* @return The name of the file to store the (suite of) ReportLog information.
*/
@Override
@@ -527,6 +540,12 @@
activity.showDialog(INFO_DIALOG_ID, args);
}
+ protected static void showReportLogWarningDialog(final android.app.Activity activity) {
+ showInfoDialog(activity,
+ R.string.reportlog_warning_title, R.string.reportlog_warning_body, -1);
+ }
+
+
protected static Dialog createDialog(final android.app.Activity activity, int id, Bundle args) {
switch (id) {
case INFO_DIALOG_ID:
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AnalogHeadsetAudioActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AnalogHeadsetAudioActivity.java
index 528d914..83b32de 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AnalogHeadsetAudioActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AnalogHeadsetAudioActivity.java
@@ -182,12 +182,15 @@
mResultsTxt.setText(getResources().getString(R.string.analog_headset_pass_noheadset));
return true;
} else {
- boolean pass = mPlugIntentReceived &&
- mHeadsetDeviceInfo != null &&
- mPlaybackSuccess &&
- (mHasHeadsetHook || mHasPlayPause) && mHasVolUp && mHasVolDown;
+ boolean pass = isReportLogOkToPass()
+ && mPlugIntentReceived
+ && mHeadsetDeviceInfo != null
+ && mPlaybackSuccess
+ && (mHasHeadsetHook || mHasPlayPause) && mHasVolUp && mHasVolDown;
if (pass) {
mResultsTxt.setText(getResources().getString(R.string.analog_headset_pass));
+ } else if (!isReportLogOkToPass()) {
+ mResultsTxt.setText(getResources().getString(R.string.audio_general_reportlogtest));
}
return pass;
}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioAEC.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioAEC.java
index 6b9bf05..3f1266e 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioAEC.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioAEC.java
@@ -496,8 +496,10 @@
Log.v(TAG, "Test EndedOk. " + testId + " str:"+str);
showProgressIndicator(false);
mResultTest.setText("test completed. " + str);
- if (mTestAECPassed) {
- getPassButton().setEnabled(true);;
+ if (!isReportLogOkToPass()) {
+ mResultTest.setText(getResources().getString(R.string.audio_general_reportlogtest));
+ } else if (mTestAECPassed) {
+ getPassButton().setEnabled(true);
}
}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioLoopbackLatencyActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioLoopbackLatencyActivity.java
index a6bd4ad..79d3e3b 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioLoopbackLatencyActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioLoopbackLatencyActivity.java
@@ -294,6 +294,8 @@
getPassButton().setEnabled(false);
setInfoResources(R.string.audio_loopback_latency_test, R.string.audio_loopback_info, -1);
+ mRequireReportLogToPass = true;
+
mClaimsOutput = AudioSystemFlags.claimsOutput(this);
mClaimsInput = AudioSystemFlags.claimsInput(this);
mClaimsProAudio = AudioSystemFlags.claimsProAudio(this);
@@ -683,7 +685,8 @@
mResultsText[mTestRoute].setText(testSpec.getResultString());
LoopbackLatencyRequirements requirements = new LoopbackLatencyRequirements();
- boolean pass = requirements.evaluate(mClaimsProAudio,
+ boolean pass = isReportLogOkToPass()
+ && requirements.evaluate(mClaimsProAudio,
Build.VERSION.MEDIA_PERFORMANCE_CLASS,
mTestSpecs[TESTROUTE_DEVICE].isMeasurementValid()
? mTestSpecs[TESTROUTE_DEVICE].mMeanLatencyMS : 0.0,
@@ -694,8 +697,12 @@
getPassButton().setEnabled(pass);
- String resultText = requirements.getResultsString();
- mTestStatusText.setText(resultText);
+ StringBuilder sb = new StringBuilder();
+ if (!isReportLogOkToPass()) {
+ sb.append(getResources().getString(R.string.audio_general_reportlogtest) + "\n");
+ }
+ sb.append(requirements.getResultsString());
+ mTestStatusText.setText(sb.toString());
showWait(false);
enableStartButtons(true);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioTap2ToneActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioTap2ToneActivity.java
index 215d26f..8ff2358 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioTap2ToneActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/AudioTap2ToneActivity.java
@@ -154,6 +154,8 @@
String yesString = getResources().getString(R.string.audio_general_yes);
String noString = getResources().getString(R.string.audio_general_no);
+ mRequireReportLogToPass = true;
+
boolean claimsProAudio = AudioSystemFlags.claimsProAudio(this);
boolean claimsLowLatencyAudio = AudioSystemFlags.claimsLowLatencyAudio(this);
@@ -311,11 +313,14 @@
}
double averageLatency = mLatencyAve[mActiveTestAPI];
- boolean pass = averageLatency != 0 && averageLatency <= mMaxRequiredLatency;
+ boolean pass = isReportLogOkToPass()
+ && averageLatency != 0 && averageLatency <= mMaxRequiredLatency;
if (pass) {
mSpecView.setText("Average: " + averageLatency + " ms <= "
+ mMaxRequiredLatency + " ms -- PASS");
+ } else if (!isReportLogOkToPass()) {
+ mSpecView.setText(getResources().getString(R.string.audio_general_reportlogtest));
} else {
mSpecView.setText("Average: " + averageLatency + " ms > "
+ mMaxRequiredLatency + " ms -- FAIL");
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/ProAudioActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/ProAudioActivity.java
index 126d15f..e93d2b3 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/ProAudioActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/ProAudioActivity.java
@@ -158,11 +158,9 @@
boolean usbOK = mClaimsUSBHostMode && mClaimsUSBPeripheralMode;
boolean hdmiOK = !mClaimsHDMI || isHDMIValid();
- boolean hasPassed = !mClaimsProAudio ||
- (mClaimsLowLatencyAudio &&
- mClaimsMIDI &&
- usbOK &&
- hdmiOK);
+ boolean hasPassed = isReportLogOkToPass()
+ && !mClaimsProAudio
+ || (mClaimsLowLatencyAudio && mClaimsMIDI && usbOK && hdmiOK);
getPassButton().setEnabled(hasPassed);
return hasPassed;
@@ -172,7 +170,9 @@
boolean hasPassed = calculatePass();
Resources strings = getResources();
- if (hasPassed) {
+ if (!isReportLogOkToPass()) {
+ mTestStatusLbl.setText(getResources().getString(R.string.audio_general_reportlogtest));
+ } else if (hasPassed) {
mTestStatusLbl.setText(strings.getString(R.string.audio_proaudio_pass));
} else if (!mClaimsMIDI) {
mTestStatusLbl.setText(strings.getString(R.string.audio_proaudio_midinotreported));
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/audio/USBAudioPeripheralNotificationsTest.java b/apps/CtsVerifier/src/com/android/cts/verifier/audio/USBAudioPeripheralNotificationsTest.java
index bc23048..6270a5a 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/audio/USBAudioPeripheralNotificationsTest.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/audio/USBAudioPeripheralNotificationsTest.java
@@ -20,23 +20,17 @@
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
-
import android.media.AudioDeviceCallback;
import android.media.AudioDeviceInfo;
import android.media.AudioManager;
-
import android.os.Bundle;
import android.os.Handler;
-
import android.util.Log;
-
import android.widget.TextView;
import com.android.compatibility.common.util.CddTest;
-import com.android.compatibility.common.util.ReportLog;
import com.android.compatibility.common.util.ResultType;
import com.android.compatibility.common.util.ResultUnit;
-
import com.android.cts.verifier.PassFailButtons;
import com.android.cts.verifier.R; // needed to access resource in CTSVerifier project namespace.
@@ -173,9 +167,10 @@
// Test Status
//
private boolean calculatePass() {
- return mUsbHeadsetInReceived && mUsbHeadsetOutReceived &&
- mUsbDeviceInReceived && mUsbDeviceOutReceived &&
- mPlugIntentReceived;
+ return isReportLogOkToPass()
+ && mUsbHeadsetInReceived && mUsbHeadsetOutReceived
+ && mUsbDeviceInReceived && mUsbDeviceOutReceived
+ && mPlugIntentReceived;
}
//
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/tunnelmode/MediaCodecFlushActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/tunnelmode/MediaCodecFlushActivity.java
index 055f26f..c0df10c 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/tunnelmode/MediaCodecFlushActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/tunnelmode/MediaCodecFlushActivity.java
@@ -66,9 +66,9 @@
private void playVideo() {
try {
- mPlayer.start();
mPlayer.prepare();
- mPlayer.startThread();
+ mPlayer.startCodec();
+ mPlayer.play();
mHandler.postDelayed(this::pauseStep, 5000);
} catch(Exception e) {
Log.d(TAG, "Could not play video", e);
@@ -95,7 +95,7 @@
private void resumeStep() {
try {
- mPlayer.start();
+ mPlayer.resume();
mHandler.postDelayed(this::enablePassButton, 3000);
} catch(Exception e) {
Log.d(TAG, "Could not resume video", e);
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/tunnelmode/OWNERS b/apps/CtsVerifier/src/com/android/cts/verifier/tunnelmode/OWNERS
new file mode 100644
index 0000000..4744ab8
--- /dev/null
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/tunnelmode/OWNERS
@@ -0,0 +1,3 @@
+# Buganizer component id: 687598
+blindahl@google.com
+narcisaam@google.com
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/tunnelmode/VolumeLevelChangesActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/tunnelmode/VolumeLevelChangesActivity.java
index 0163c62f..c446143 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/tunnelmode/VolumeLevelChangesActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/tunnelmode/VolumeLevelChangesActivity.java
@@ -209,10 +209,10 @@
private void playVideo() {
try {
- mPlayer.start();
mPlayer.prepare();
+ mPlayer.startCodec();
mPlayer.setLoopEnabled(true);
- mPlayer.startThread();
+ mPlayer.play();
} catch (Exception e) {
Log.d(TAG, "Could not play the video.", e);
}
diff --git a/apps/CtsVerifier/src/com/android/cts/verifier/tv/display/ModeSwitchingTestActivity.java b/apps/CtsVerifier/src/com/android/cts/verifier/tv/display/ModeSwitchingTestActivity.java
index 641ab20..94ef536 100644
--- a/apps/CtsVerifier/src/com/android/cts/verifier/tv/display/ModeSwitchingTestActivity.java
+++ b/apps/CtsVerifier/src/com/android/cts/verifier/tv/display/ModeSwitchingTestActivity.java
@@ -221,10 +221,10 @@
private void playVideo() {
try {
- mPlayer.start();
mPlayer.prepare();
+ mPlayer.startCodec();
mPlayer.setLoopEnabled(true);
- mPlayer.startThread();
+ mPlayer.play();
} catch (Exception e) {
Log.d(TAG, "Could not play video", e);
}
diff --git a/tests/framework/base/windowmanager/src/android/server/wm/TaskFragmentOrganizerPolicyTest.java b/tests/framework/base/windowmanager/src/android/server/wm/TaskFragmentOrganizerPolicyTest.java
index 318e6f7..530390d 100644
--- a/tests/framework/base/windowmanager/src/android/server/wm/TaskFragmentOrganizerPolicyTest.java
+++ b/tests/framework/base/windowmanager/src/android/server/wm/TaskFragmentOrganizerPolicyTest.java
@@ -196,14 +196,6 @@
null /* activityOptions */);
mTaskFragmentOrganizer.applyTransaction(wct);
-
- mTaskFragmentOrganizer.waitForTaskFragmentCreated();
-
- TaskFragmentInfo info = mTaskFragmentOrganizer.getTaskFragmentInfo(taskFragToken);
-
- // TaskFragment must remain empty because embedding activities in a new task is not allowed.
- assertEmptyTaskFragment(info, taskFragToken);
-
mTaskFragmentOrganizer.waitForTaskFragmentError();
assertThat(mTaskFragmentOrganizer.getThrowable()).isInstanceOf(SecurityException.class);
diff --git a/tests/framework/base/windowmanager/src/android/server/wm/TaskFragmentTrustedModeTest.java b/tests/framework/base/windowmanager/src/android/server/wm/TaskFragmentTrustedModeTest.java
index c13c4ee..9fd1a41 100644
--- a/tests/framework/base/windowmanager/src/android/server/wm/TaskFragmentTrustedModeTest.java
+++ b/tests/framework/base/windowmanager/src/android/server/wm/TaskFragmentTrustedModeTest.java
@@ -21,6 +21,7 @@
import static android.server.wm.jetpack.utils.ActivityEmbeddingUtil.assumeActivityEmbeddingSupportedDevice;
import static com.google.common.truth.Truth.assertThat;
+import static com.google.common.truth.Truth.assertWithMessage;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
@@ -33,6 +34,7 @@
import android.graphics.Rect;
import android.os.Binder;
import android.os.IBinder;
+import android.platform.test.annotations.Presubmit;
import android.server.wm.WindowManagerState.Task;
import android.window.TaskFragmentCreationParams;
import android.window.TaskFragmentInfo;
@@ -49,6 +51,7 @@
* Build/Install/Run:
* atest CtsWindowManagerDeviceTestCases:TaskFragmentTrustedModeTest
*/
+@Presubmit
public class TaskFragmentTrustedModeTest extends TaskFragmentOrganizerTestBase {
private final ComponentName mTranslucentActivity = new ComponentName(mContext,
@@ -248,7 +251,7 @@
*/
@Test
public void testUntrustedModeTaskFragment_startActivityInTaskFragmentOutsideOfParentBounds() {
- final Task parentTask = mWmState.getRootTask(mOwnerTaskId);
+ Task parentTask = mWmState.getRootTask(mOwnerTaskId);
final Rect parentBounds = new Rect(parentTask.getBounds());
final IBinder errorCallbackToken = new Binder();
final WindowContainerTransaction wct = new WindowContainerTransaction()
@@ -263,8 +266,11 @@
// It is disallowed to start activity to TaskFragment with bounds outside of its parent
// in untrusted mode.
assertTaskFragmentError(errorCallbackToken, SecurityException.class);
- mWmState.waitForAppTransitionIdleOnDisplay(mOwnerActivity.getDisplayId());
- mWmState.assertNotExist(SECOND_UNTRUSTED_EMBEDDING_ACTIVITY);
+
+ parentTask = mWmState.getRootTask(mOwnerTaskId);
+ assertWithMessage("Activity must be started in parent Task because it's not"
+ + " allowed to be embedded").that(parentTask.mActivities).contains(
+ mWmState.getActivity(SECOND_UNTRUSTED_EMBEDDING_ACTIVITY));
}
/**
diff --git a/tests/location/location_fine/src/android/location/cts/fine/GeocoderTest.java b/tests/location/location_fine/src/android/location/cts/fine/GeocoderTest.java
index 08ddca1..6516de9 100644
--- a/tests/location/location_fine/src/android/location/cts/fine/GeocoderTest.java
+++ b/tests/location/location_fine/src/android/location/cts/fine/GeocoderTest.java
@@ -33,10 +33,12 @@
import android.content.pm.PackageManager.ResolveInfoFlags;
import android.location.Geocoder;
import android.location.Geocoder.GeocodeListener;
+import android.platform.test.annotations.AppModeFull;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
+import com.android.compatibility.common.util.ApiTest;
import com.android.compatibility.common.util.RetryRule;
import org.junit.Before;
@@ -73,6 +75,8 @@
}
}
+ @ApiTest(apis = "android.location.Geocoder#getFromLocation")
+ @AppModeFull(reason = "b/238831704 - Test cases don't apply for Instant apps")
@Test
public void testGetFromLocation() {
assumeTrue(Geocoder.isPresent());
@@ -82,6 +86,7 @@
verify(listener, timeout(10000)).onGeocode(anyList());
}
+ @ApiTest(apis = "android.location.Geocoder#getFromLocation")
@Test
public void testGetFromLocation_sync() throws Exception {
assumeTrue(Geocoder.isPresent());
@@ -89,6 +94,7 @@
mGeocoder.getFromLocation(60, 30, 5);
}
+ @ApiTest(apis = "android.location.Geocoder#getFromLocation")
@Test
public void testGetFromLocation_badInput() {
GeocodeListener listener = mock(GeocodeListener.class);
@@ -102,6 +108,7 @@
() -> mGeocoder.getFromLocation(10, 181, 5, listener));
}
+ @ApiTest(apis = "android.location.Geocoder#getFromLocationName")
@Test
public void testGetFromLocationName() {
assumeTrue(Geocoder.isPresent());
@@ -111,6 +118,7 @@
verify(listener, timeout(10000)).onGeocode(anyList());
}
+ @ApiTest(apis = "android.location.Geocoder#getFromLocationName")
@Test
public void testGetFromLocationName_sync() throws Exception {
assumeTrue(Geocoder.isPresent());
@@ -118,6 +126,7 @@
mGeocoder.getFromLocationName("Dalvik,Iceland", 5);
}
+ @ApiTest(apis = "android.location.Geocoder#getFromLocationName")
@Test
public void testGetFromLocationName_badInput() {
GeocodeListener listener = mock(GeocodeListener.class);
diff --git a/tests/media/src/android/mediav2/cts/CodecInfoTest.java b/tests/media/src/android/mediav2/cts/CodecInfoTest.java
index 39b90ff..14ebd28 100644
--- a/tests/media/src/android/mediav2/cts/CodecInfoTest.java
+++ b/tests/media/src/android/mediav2/cts/CodecInfoTest.java
@@ -161,7 +161,7 @@
// For devices launching with Android T, if a codec supports an HDR profile and device
// supports HDR display, it must advertise P010 support
int[] HdrProfileArray = mProfileHdrMap.get(mMediaType);
- if (FIRST_SDK_IS_AT_LEAST_T && HdrProfileArray != null && DISPLAY_HDR_TYPES.length > 0) {
+ if (VNDK_IS_AT_LEAST_T && HdrProfileArray != null && DISPLAY_HDR_TYPES.length > 0) {
for (CodecProfileLevel pl : caps.profileLevels) {
if (IntStream.of(HdrProfileArray).anyMatch(x -> x == pl.profile)) {
assertFalse(mCodecInfo.getName() + " supports HDR profile " + pl.profile + "," +
diff --git a/tests/tests/graphics/src/android/graphics/cts/BitmapFactoryTest.java b/tests/tests/graphics/src/android/graphics/cts/BitmapFactoryTest.java
index 4766268..ef44528 100644
--- a/tests/tests/graphics/src/android/graphics/cts/BitmapFactoryTest.java
+++ b/tests/tests/graphics/src/android/graphics/cts/BitmapFactoryTest.java
@@ -41,6 +41,7 @@
import android.os.Build;
import android.os.Parcel;
import android.os.ParcelFileDescriptor;
+import android.os.SystemProperties;
import android.platform.test.annotations.LargeTest;
import android.platform.test.annotations.RequiresDevice;
import android.system.ErrnoException;
@@ -1012,6 +1013,9 @@
public void testDecode10BitHEIFTo10BitBitmap() {
assumeTrue(
"Test needs Android T.", ApiLevelUtil.isFirstApiAtLeast(Build.VERSION_CODES.TIRAMISU));
+ assumeTrue(
+ "Test needs VNDK at least T.",
+ SystemProperties.getInt("ro.vndk.version", 0) >= Build.VERSION_CODES.TIRAMISU);
assumeTrue("No 10-bit HEVC decoder, skip the test.", has10BitHEVCDecoder());
BitmapFactory.Options opt = new BitmapFactory.Options();
@@ -1028,6 +1032,9 @@
public void testDecode10BitHEIFTo8BitBitmap() {
assumeTrue(
"Test needs Android T.", ApiLevelUtil.isFirstApiAtLeast(Build.VERSION_CODES.TIRAMISU));
+ assumeTrue(
+ "Test needs VNDK at least T.",
+ SystemProperties.getInt("ro.vndk.version", 0) >= Build.VERSION_CODES.TIRAMISU);
assumeTrue("No 10-bit HEVC decoder, skip the test.", has10BitHEVCDecoder());
BitmapFactory.Options opt = new BitmapFactory.Options();
diff --git a/tests/tests/graphics/src/android/graphics/cts/ImageDecoderTest.java b/tests/tests/graphics/src/android/graphics/cts/ImageDecoderTest.java
index 6741c07..b6689d8 100644
--- a/tests/tests/graphics/src/android/graphics/cts/ImageDecoderTest.java
+++ b/tests/tests/graphics/src/android/graphics/cts/ImageDecoderTest.java
@@ -49,6 +49,7 @@
import android.media.MediaFormat;
import android.net.Uri;
import android.os.Build;
+import android.os.SystemProperties;
import android.util.DisplayMetrics;
import android.util.Size;
import android.util.TypedValue;
@@ -246,6 +247,9 @@
public void testDecode10BitHeif() {
assumeTrue(
"Test needs Android T.", ApiLevelUtil.isFirstApiAtLeast(Build.VERSION_CODES.TIRAMISU));
+ assumeTrue(
+ "Test needs VNDK at least T.",
+ SystemProperties.getInt("ro.vndk.version", 0) >= Build.VERSION_CODES.TIRAMISU);
assumeTrue("No 10-bit HEVC decoder, skip the test.", has10BitHEVCDecoder());
try {
diff --git a/tests/tests/media/common/src/android/media/cts/CodecState.java b/tests/tests/media/common/src/android/media/cts/CodecState.java
index 3565fc8..4aa9db4 100644
--- a/tests/tests/media/common/src/android/media/cts/CodecState.java
+++ b/tests/tests/media/common/src/android/media/cts/CodecState.java
@@ -169,13 +169,15 @@
}
}
- public void start() {
+ public void startCodec() {
mCodec.start();
mCodecInputBuffers = mCodec.getInputBuffers();
if (!mIsTunneled || mIsAudio) {
mCodecOutputBuffers = mCodec.getOutputBuffers();
}
+ }
+ public void play() {
if (mAudioTrack != null) {
mAudioTrack.play();
}
diff --git a/tests/tests/media/common/src/android/media/cts/MediaCodecClearKeyPlayer.java b/tests/tests/media/common/src/android/media/cts/MediaCodecClearKeyPlayer.java
index 5e4df7f..888cf23 100644
--- a/tests/tests/media/common/src/android/media/cts/MediaCodecClearKeyPlayer.java
+++ b/tests/tests/media/common/src/android/media/cts/MediaCodecClearKeyPlayer.java
@@ -487,11 +487,13 @@
}
for (CodecState state : mVideoCodecStates.values()) {
- state.start();
+ state.startCodec();
+ state.play();
}
for (CodecState state : mAudioCodecStates.values()) {
- state.start();
+ state.startCodec();
+ state.play();
}
mDeltaTimeUs = -1;
diff --git a/tests/tests/media/common/src/android/media/cts/MediaCodecTunneledPlayer.java b/tests/tests/media/common/src/android/media/cts/MediaCodecTunneledPlayer.java
index 0b495dd..0df95b1 100644
--- a/tests/tests/media/common/src/android/media/cts/MediaCodecTunneledPlayer.java
+++ b/tests/tests/media/common/src/android/media/cts/MediaCodecTunneledPlayer.java
@@ -44,11 +44,13 @@
/** State the player starts in, before configuration. */
private static final int STATE_IDLE = 1;
/** State of the player during initial configuration. */
- private static final int STATE_PREPARING = 2;
+ private static final int STATE_PREPARED = 2;
+ /** State of the player after starting the codecs */
+ private static final int STATE_STARTED = 3;
/** State of the player during playback. */
- private static final int STATE_PLAYING = 3;
- /** State of the player when configured but not playing. */
- private static final int STATE_PAUSED = 4;
+ private static final int STATE_PLAYING = 4;
+ /** State of the player when playback is paused. */
+ private static final int STATE_PAUSED = 5;
private Boolean mThreadStarted = false;
private byte[] mSessionId;
@@ -194,7 +196,12 @@
return true;
}
+ // Creates the extractors, identifies tracks and formats, and then calls MediaCodec.configure
public boolean prepare() throws IOException {
+ if (mState != STATE_IDLE) {
+ throw new IllegalStateException("Expected STATE_IDLE, got " + mState);
+ }
+
if (null == mAudioExtractor) {
mAudioExtractor = new MediaExtractor();
if (null == mAudioExtractor) {
@@ -237,9 +244,7 @@
return false;
}
- synchronized (mState) {
- mState = STATE_PAUSED;
- }
+ mState = STATE_PREPARED;
return true;
}
@@ -306,70 +311,56 @@
return format.containsKey(key) ? format.getInteger(key) : 0;
}
- public boolean start() {
+ // Calls MediaCodec.start
+ public void startCodec() {
Log.d(TAG, "start");
- synchronized (mState) {
- if (mState == STATE_PLAYING || mState == STATE_PREPARING) {
- return true;
- } else if (mState == STATE_IDLE) {
- mState = STATE_PREPARING;
- return true;
- } else if (mState != STATE_PAUSED) {
- throw new IllegalStateException("Expected STATE_PAUSED, got " + mState);
- }
-
- for (CodecState state : mVideoCodecStates.values()) {
- state.start();
- }
-
- for (CodecState state : mAudioCodecStates.values()) {
- state.start();
- }
-
- mDeltaTimeUs = -1;
- mState = STATE_PLAYING;
+ if (mState != STATE_PREPARED) {
+ throw new IllegalStateException("Expected STATE_PREAPRED, got " + mState);
}
- return false;
+
+ for (CodecState state : mVideoCodecStates.values()) {
+ state.startCodec();
+ }
+
+ for (CodecState state : mAudioCodecStates.values()) {
+ state.startCodec();
+ }
+
+ mDeltaTimeUs = -1;
+ mState = STATE_STARTED;
}
- public void startWork() throws IOException, Exception {
- try {
- // Just change state from STATE_IDLE to STATE_PREPARING.
- start();
- // Extract media information from uri asset, and change state to STATE_PAUSED.
- prepare();
- // Start CodecState, and change from STATE_PAUSED to STATE_PLAYING.
- start();
- } catch (IOException e) {
- throw e;
+ // Starts the decoding threads and then starts AudioTrack playback
+ public void play() {
+ if (mState != STATE_STARTED) {
+ throw new IllegalStateException("Expected STATE_STARTED, got " + mState);
}
+ mState = STATE_PLAYING;
synchronized (mThreadStarted) {
mThreadStarted = true;
mThread.start();
}
- }
- public void startThread() {
- start();
- synchronized (mThreadStarted) {
- mThreadStarted = true;
- mThread.start();
+ for (CodecState state : mVideoCodecStates.values()) {
+ state.play();
+ }
+
+ for (CodecState state : mAudioCodecStates.values()) {
+ state.play();
}
}
- // Pauses the audio track
+ // Pauses playback by pausing the AudioTrack
public void pause() {
Log.d(TAG, "pause");
- synchronized (mState) {
- if (mState == STATE_PAUSED) {
- return;
- } else if (mState != STATE_PLAYING) {
- throw new IllegalStateException();
- }
+ if (mState != STATE_PLAYING) {
+ throw new IllegalStateException("Expected STATE_PLAYING, got " + mState);
+ }
+ synchronized (mState) {
for (CodecState state : mVideoCodecStates.values()) {
state.pause();
}
@@ -382,43 +373,60 @@
}
}
- public void flush() {
- Log.d(TAG, "flush");
+ // Resume playback when paused
+ public void resume() {
+ Log.d(TAG, "resume");
+
+ if (mState != STATE_PAUSED) {
+ throw new IllegalStateException("Expected STATE_PAUSED, got " + mState);
+ }
synchronized (mState) {
- if (mState == STATE_PLAYING || mState == STATE_PREPARING) {
- return;
+ for (CodecState state : mVideoCodecStates.values()) {
+ state.play();
}
for (CodecState state : mAudioCodecStates.values()) {
- state.flush();
+ state.play();
}
- for (CodecState state : mVideoCodecStates.values()) {
- state.flush();
- }
+ mState = STATE_PLAYING;
}
}
- /** Seek all tracks to their very beginning.
+ public void flush() {
+ Log.d(TAG, "flush");
+
+ if (mState != STATE_PAUSED) {
+ throw new IllegalStateException("Expected STATE_PAUSED, got " + mState);
+ }
+
+ for (CodecState state : mAudioCodecStates.values()) {
+ state.flush();
+ }
+
+ for (CodecState state : mVideoCodecStates.values()) {
+ state.flush();
+ }
+ }
+
+ /** Seek all tracks to the first sample time.
*
* @param presentationTimeOffsetUs The offset for the presentation time to start at.
* @throws IllegalStateException if the player is not paused
*/
public void seekToBeginning(long presentationTimeOffsetUs) {
Log.d(TAG, "seekToBeginning");
- synchronized (mState) {
- if (mState != STATE_PAUSED) {
- throw new IllegalStateException("Expected STATE_PAUSED, got " + mState);
- }
+ if (mState != STATE_PAUSED) {
+ throw new IllegalStateException("Expected STATE_PAUSED, got " + mState);
+ }
- for (CodecState state : mVideoCodecStates.values()) {
- state.seekToBeginning(presentationTimeOffsetUs);
- }
+ for (CodecState state : mVideoCodecStates.values()) {
+ state.seekToBeginning(presentationTimeOffsetUs);
+ }
- for (CodecState state : mAudioCodecStates.values()) {
- state.seekToBeginning(presentationTimeOffsetUs);
- }
+ for (CodecState state : mAudioCodecStates.values()) {
+ state.seekToBeginning(presentationTimeOffsetUs);
}
}
@@ -426,53 +434,50 @@
* Enables or disables looping. Should be called after {@link #prepare()}.
*/
public void setLoopEnabled(boolean enabled) {
- synchronized (mState) {
- if (mVideoCodecStates != null) {
- for (CodecState state : mVideoCodecStates.values()) {
- state.setLoopEnabled(enabled);
- }
- }
+ if (mState != STATE_PREPARED) {
+ throw new IllegalStateException("Expected STATE_PREPARED, got " + mState);
+ }
- if (mAudioCodecStates != null) {
- for (CodecState state : mAudioCodecStates.values()) {
- state.setLoopEnabled(enabled);
- }
- }
+ for (CodecState state : mVideoCodecStates.values()) {
+ state.setLoopEnabled(enabled);
+ }
+
+ for (CodecState state : mAudioCodecStates.values()) {
+ state.setLoopEnabled(enabled);
}
}
public void reset() {
- synchronized (mState) {
- if (mState == STATE_PLAYING) {
- pause();
- }
- if (mVideoCodecStates != null) {
- for (CodecState state : mVideoCodecStates.values()) {
- state.release();
- }
- mVideoCodecStates = null;
- }
-
- if (mAudioCodecStates != null) {
- for (CodecState state : mAudioCodecStates.values()) {
- state.release();
- }
- mAudioCodecStates = null;
- }
-
- if (mAudioExtractor != null) {
- mAudioExtractor.release();
- mAudioExtractor = null;
- }
-
- if (mVideoExtractor != null) {
- mVideoExtractor.release();
- mVideoExtractor = null;
- }
-
- mDurationUs = -1;
- mState = STATE_IDLE;
+ if (mState == STATE_PLAYING) {
+ pause();
}
+ if (mVideoCodecStates != null) {
+ for (CodecState state : mVideoCodecStates.values()) {
+ state.release();
+ }
+ mVideoCodecStates = null;
+ }
+
+ if (mAudioCodecStates != null) {
+ for (CodecState state : mAudioCodecStates.values()) {
+ state.release();
+ }
+ mAudioCodecStates = null;
+ }
+
+ if (mAudioExtractor != null) {
+ mAudioExtractor.release();
+ mAudioExtractor = null;
+ }
+
+ if (mVideoExtractor != null) {
+ mVideoExtractor.release();
+ mVideoExtractor = null;
+ }
+
+ mDurationUs = -1;
+ mState = STATE_IDLE;
+
synchronized (mThreadStarted) {
mThreadStarted = false;
}
@@ -607,6 +612,14 @@
return mVideoCodecStates.get(0).getVideoTimeUs();
}
+ public long getVideoSystemTimeNs() {
+ if (mVideoCodecStates == null || mVideoCodecStates.get(0) == null) {
+ return -1;
+ }
+ return mVideoCodecStates.get(0).getVideoTimeUs();
+
+ }
+
/**
* Returns the ordered list of video frame timestamps rendered in tunnel mode.
*
@@ -658,24 +671,6 @@
}
/**
- * Resume playback when paused.
- *
- * @throws IllegalStateException if playback is not paused or if there is no configured audio
- * track.
- */
- public void resume() {
- Log.d(TAG, "resume");
- if (mAudioTrackState == null) {
- throw new IllegalStateException("Resuming playback with no audio track");
- }
- if (mState != STATE_PAUSED) {
- throw new IllegalStateException("Expected STATE_PAUSED, got " + mState);
- }
- mAudioTrackState.playAudioTrack();
- mState = STATE_PLAYING;
- }
-
- /**
* Configure video peek for the video codecs attached to the player.
*/
public void setVideoPeek(boolean enable) {
diff --git a/tests/tests/media/decoder/src/android/media/decoder/cts/DecodeAccuracyTest.java b/tests/tests/media/decoder/src/android/media/decoder/cts/DecodeAccuracyTest.java
index 145cfaf..c982376 100644
--- a/tests/tests/media/decoder/src/android/media/decoder/cts/DecodeAccuracyTest.java
+++ b/tests/tests/media/decoder/src/android/media/decoder/cts/DecodeAccuracyTest.java
@@ -25,7 +25,6 @@
import android.content.Context;
import android.graphics.Bitmap;
import android.media.MediaFormat;
-import android.media.cts.MediaCodecTunneledPlayer;
import android.media.cts.MediaHeavyPresubmitTest;
import android.media.cts.TestArgs;
import android.os.Environment;
diff --git a/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java b/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java
index 0b8c505..9926f04 100644
--- a/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java
+++ b/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java
@@ -73,6 +73,7 @@
import androidx.test.filters.SdkSuppress;
import com.android.compatibility.common.util.ApiLevelUtil;
+import com.android.compatibility.common.util.ApiTest;
import com.android.compatibility.common.util.CddTest;
import com.android.compatibility.common.util.DeviceReportLog;
import com.android.compatibility.common.util.DynamicConfigDeviceSide;
@@ -141,8 +142,6 @@
private DisplayManager mDisplayManager;
static final Map<String, String> sDefaultDecoders = new HashMap<>();
- private static boolean mIsAtLeastS = ApiLevelUtil.isAtLeast(Build.VERSION_CODES.S);
-
protected static AssetFileDescriptor getAssetFileDescriptorFor(final String res)
throws FileNotFoundException {
File inpFile = new File(mInpPrefix + res);
@@ -3884,11 +3883,10 @@
Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
- assertTrue("MediaCodecPlayer.start() failed!", mMediaCodecPlayer.start());
assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
+ mMediaCodecPlayer.startCodec();
- // starts video playback
- mMediaCodecPlayer.startThread();
+ mMediaCodecPlayer.play();
sleepUntil(() ->
mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
&& mMediaCodecPlayer.getTimestamp() != null
@@ -3921,8 +3919,8 @@
/**
* Test tunneled video playback mode with HEVC if supported
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
public void testTunneledVideoPlaybackHevc() throws Exception {
tunneledVideoPlayback(MediaFormat.MIMETYPE_VIDEO_HEVC,
"video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
@@ -3931,8 +3929,8 @@
/**
* Test tunneled video playback mode with AVC if supported
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
public void testTunneledVideoPlaybackAvc() throws Exception {
tunneledVideoPlayback(MediaFormat.MIMETYPE_VIDEO_AVC,
"video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
@@ -3941,8 +3939,8 @@
/**
* Test tunneled video playback mode with VP9 if supported
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
public void testTunneledVideoPlaybackVp9() throws Exception {
tunneledVideoPlayback(MediaFormat.MIMETYPE_VIDEO_VP9,
"bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
@@ -3966,11 +3964,10 @@
Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
- assertTrue("MediaCodecPlayer.start() failed!", mMediaCodecPlayer.start());
assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
+ mMediaCodecPlayer.startCodec();
- // starts video playback
- mMediaCodecPlayer.startThread();
+ mMediaCodecPlayer.play();
sleepUntil(() ->
mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
&& mMediaCodecPlayer.getTimestamp() != null
@@ -3990,8 +3987,8 @@
/**
* Test tunneled video playback flush with HEVC if supported
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
public void testTunneledVideoFlushHevc() throws Exception {
testTunneledVideoFlush(MediaFormat.MIMETYPE_VIDEO_HEVC,
"video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
@@ -4000,8 +3997,8 @@
/**
* Test tunneled video playback flush with AVC if supported
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
public void testTunneledVideoFlushAvc() throws Exception {
testTunneledVideoFlush(MediaFormat.MIMETYPE_VIDEO_AVC,
"video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
@@ -4010,23 +4007,19 @@
/**
* Test tunneled video playback flush with VP9 if supported
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
public void testTunneledVideoFlushVp9() throws Exception {
testTunneledVideoFlush(MediaFormat.MIMETYPE_VIDEO_VP9,
"bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
}
/**
- * Test tunneled video peek renders the first frame when on
+ * Test that the first frame is rendered when video peek is on in tunneled mode.
*
* TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
*/
private void testTunneledVideoPeekOn(String mimeType, String videoName) throws Exception {
- if (!MediaUtils.check(mIsAtLeastS, "testTunneledVideoPeekOn requires Android 12")) {
- return;
- }
-
if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
"No tunneled video playback codec found for MIME " + mimeType)) {
return;
@@ -4040,9 +4033,8 @@
Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
- assertTrue("MediaCodecPlayer.start() failed!", mMediaCodecPlayer.start());
assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
- mMediaCodecPlayer.start();
+ mMediaCodecPlayer.startCodec();
mMediaCodecPlayer.setVideoPeek(true); // Enable video peek
// Assert that onFirstTunnelFrameReady is called
@@ -4061,30 +4053,30 @@
}
/**
- * Test tunneled video peek with HEVC renders the first frame when on
+ * Test that the first frame is rendered when video peek is on for HEVC in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
public void testTunneledVideoPeekOnHevc() throws Exception {
testTunneledVideoPeekOn(MediaFormat.MIMETYPE_VIDEO_HEVC,
"video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
}
/**
- * Test tunneled video peek with AVC renders the first frame when on
+ * Test that the first frame is rendered when video peek is on for AVC in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
public void testTunneledVideoPeekOnAvc() throws Exception {
testTunneledVideoPeekOn(MediaFormat.MIMETYPE_VIDEO_AVC,
"video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
}
/**
- * Test tunneled video peek with VP9 renders the first frame when on
+ * Test that the first frame is rendered when video peek is on for VP9 in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
public void testTunneledVideoPeekOnVp9() throws Exception {
testTunneledVideoPeekOn(MediaFormat.MIMETYPE_VIDEO_VP9,
"bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
@@ -4092,15 +4084,11 @@
/**
- * Test tunneled video peek doesn't render the first frame when off and then turned on
+ * Test that peek off doesn't render the first frame until turned on in tunneled mode.
*
* TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
*/
private void testTunneledVideoPeekOff(String mimeType, String videoName) throws Exception {
- if (!MediaUtils.check(mIsAtLeastS, "testTunneledVideoPeekOff requires Android 12")) {
- return;
- }
-
if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
"No tunneled video playback codec found for MIME " + mimeType)) {
return;
@@ -4114,9 +4102,8 @@
Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
- assertTrue("MediaCodecPlayer.start() failed!", mMediaCodecPlayer.start());
assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
- mMediaCodecPlayer.start();
+ mMediaCodecPlayer.startCodec();
mMediaCodecPlayer.setVideoPeek(false); // Disable video peek
// Assert that onFirstTunnelFrameReady is called
@@ -4142,75 +4129,40 @@
}
/**
- * Test tunneled video peek with HEVC doesn't render the first frame when off and then turned on
+ * Test that peek off doesn't render the first frame until turned on for HEC in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
public void testTunneledVideoPeekOffHevc() throws Exception {
testTunneledVideoPeekOff(MediaFormat.MIMETYPE_VIDEO_HEVC,
"video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
}
/**
- * Test tunneled video peek with AVC doesn't render the first frame when off and then turned on
+ * Test that peek off doesn't render the first frame until turned on for AVC in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
public void testTunneledVideoPeekOffAvc() throws Exception {
testTunneledVideoPeekOff(MediaFormat.MIMETYPE_VIDEO_AVC,
"video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
}
/**
- * Test tunneled video peek with VP9 doesn't render the first frame when off and then turned on
+ * Test that peek off doesn't render the first frame until turned on for VP9 in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
public void testTunneledVideoPeekOffVp9() throws Exception {
testTunneledVideoPeekOff(MediaFormat.MIMETYPE_VIDEO_VP9,
"bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
}
- /**
- * Test tunneled audio PTS gaps with HEVC if supported.
- * If there exist PTS Gaps in AudioTrack playback, the framePosition returned by
- * AudioTrack#getTimestamp must not advance for any silent frames rendered to fill the
- * gap.
- */
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
- @Test
- public void testTunneledAudioPtsGapsHevc() throws Exception {
- testTunneledAudioPtsGaps(MediaFormat.MIMETYPE_VIDEO_HEVC,
- "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
- }
-
- /**
- * Test tunneled audio PTS gaps with AVC if supported
- * If there exist PTS Gaps in AudioTrack playback, the framePosition returned by
- * AudioTrack#getTimestamp must not advance for any silent frames rendered to fill the
- * gap.
- */
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
- @Test
- public void testTunneledAudioPtsGapsAvc() throws Exception {
- testTunneledAudioPtsGaps(MediaFormat.MIMETYPE_VIDEO_AVC,
- "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
- }
-
- /**
- * Test tunneled audio PTS gaps with VP9 if supported
- * If there exist PTS Gaps in AudioTrack playback, the framePosition returned by
- * AudioTrack#getTimestamp must not advance for any silent frames rendered to fill the
- * gap.
- */
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
- @Test
- public void testTunneledAudioPtsGapsVp9() throws Exception {
- testTunneledAudioPtsGaps(MediaFormat.MIMETYPE_VIDEO_VP9,
- "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
- }
-
- private void testTunneledAudioPtsGaps(String mimeType, String fileName) throws Exception {
+ /**
+ * Test that audio timestamps don't progress during audio PTS gaps in tunneled mode.
+ */
+ private void testTunneledAudioProgressWithPtsGaps(String mimeType, String fileName)
+ throws Exception {
if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
"No tunneled video playback codec found for MIME " + mimeType)) {
return;
@@ -4224,11 +4176,10 @@
final Uri mediaUri = Uri.fromFile(new File(mInpPrefix, fileName));
mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
- assertTrue("MediaCodecPlayer.start() failed!", mMediaCodecPlayer.start());
assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
+ mMediaCodecPlayer.startCodec();
- // starts video playback
- mMediaCodecPlayer.startThread();
+ mMediaCodecPlayer.play();
sleepUntil(() ->
mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
&& mMediaCodecPlayer.getTimestamp() != null
@@ -4291,37 +4242,40 @@
}
/**
- * Test tunneled audioTimestamp progress with underrun, with HEVC if supported
+ * Test that audio timestamps don't progress during audio PTS gaps for HEVC in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
- public void testTunneledAudioTimestampProgressWithUnderrunHevc() throws Exception {
- testTunneledAudioTimestampProgressWithUnderrun(MediaFormat.MIMETYPE_VIDEO_HEVC,
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
+ public void testTunneledAudioProgressWithPtsGapsHevc() throws Exception {
+ testTunneledAudioProgressWithPtsGaps(MediaFormat.MIMETYPE_VIDEO_HEVC,
"video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
}
/**
- * Test tunneled audioTimestamp progress with underrun, with AVC if supported.
+ * Test that audio timestamps don't progress during audio PTS gaps for AVC in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
- public void testTunneledAudioTimestampProgressWithUnderrunAvc() throws Exception {
- testTunneledAudioTimestampProgressWithUnderrun(MediaFormat.MIMETYPE_VIDEO_AVC,
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
+ public void testTunneledAudioProgressWithPtsGapsAvc() throws Exception {
+ testTunneledAudioProgressWithPtsGaps(MediaFormat.MIMETYPE_VIDEO_AVC,
"video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
}
/**
- * Test tunneled audioTimestamp progress with underrun, with VP9 if supported.
+ * Test that audio timestamps don't progress during audio PTS gaps for VP9 in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
- public void testTunneledAudioTimestampProgressWithUnderrunVp9() throws Exception {
- testTunneledAudioTimestampProgressWithUnderrun(MediaFormat.MIMETYPE_VIDEO_VP9,
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
+ public void testTunneledAudioProgressWithPtsGapsVp9() throws Exception {
+ testTunneledAudioProgressWithPtsGaps(MediaFormat.MIMETYPE_VIDEO_VP9,
"bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
}
- private void testTunneledAudioTimestampProgressWithUnderrun(
- String mimeType, String fileName) throws Exception {
+ /**
+ * Test that audio timestamps stop progressing during underrun in tunneled mode.
+ */
+ private void testTunneledAudioProgressWithUnderrun(String mimeType, String fileName)
+ throws Exception {
if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
"No tunneled video playback codec found for MIME " + mimeType)) {
return;
@@ -4335,11 +4289,10 @@
final Uri mediaUri = Uri.fromFile(new File(mInpPrefix, fileName));
mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
- assertTrue("MediaCodecPlayer.start() failed!", mMediaCodecPlayer.start());
assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
+ mMediaCodecPlayer.startCodec();
- // starts video playback
- mMediaCodecPlayer.startThread();
+ mMediaCodecPlayer.play();
sleepUntil(() ->
mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
&& mMediaCodecPlayer.getTimestamp() != null
@@ -4380,9 +4333,39 @@
}
/**
- * Test accurate video rendering after a video MediaCodec flush.
+ * Test that audio timestamps stop progressing during underrun for HEVC in tunneled mode.
+ */
+ @Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
+ public void testTunneledAudioProgressWithUnderrunHevc() throws Exception {
+ testTunneledAudioProgressWithUnderrun(MediaFormat.MIMETYPE_VIDEO_HEVC,
+ "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
+ }
+
+ /**
+ * Test that audio timestamps stop progressing during underrun for AVC in tunneled mode.
+ */
+ @Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
+ public void testTunneledAudioProgressWithUnderrunAvc() throws Exception {
+ testTunneledAudioProgressWithUnderrun(MediaFormat.MIMETYPE_VIDEO_AVC,
+ "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
+ }
+
+ /**
+ * Test that audio timestamps stop progressing during underrun for VP9 in tunneled mode.
+ */
+ @Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
+ public void testTunneledAudioProgressWithUnderrunVp9() throws Exception {
+ testTunneledAudioProgressWithUnderrun(MediaFormat.MIMETYPE_VIDEO_VP9,
+ "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
+ }
+
+ /**
+ * Test accurate video rendering after a flush in tunneled mode.
*
- * On some devices, queuing content when the player is paused, then triggering a flush, then
+ * Test On some devices, queuing content when the player is paused, then triggering a flush, then
* queuing more content does not behave as expected. The queued content gets lost and the flush
* is really only applied once playback has resumed.
*
@@ -4390,10 +4373,6 @@
*/
private void testTunneledAccurateVideoFlush(String mimeType, String videoName)
throws Exception {
- if (!MediaUtils.check(mIsAtLeastS, "testTunneledAccurateVideoFlush requires Android 12")) {
- return;
- }
-
if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
"No tunneled video playback codec found for MIME " + mimeType)) {
return;
@@ -4414,15 +4393,14 @@
Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
- assertTrue("MediaCodecPlayer.start() failed!", mMediaCodecPlayer.start());
assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
+ mMediaCodecPlayer.startCodec();
// Video peek might interfere with the test: we want to ensure that queuing more data during
// a pause does not cause displaying more video frames, which is precisely what video peek
// does.
mMediaCodecPlayer.setVideoPeek(false);
- // starts video playback
- mMediaCodecPlayer.startThread();
+ mMediaCodecPlayer.play();
sleepUntil(() ->
mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
&& mMediaCodecPlayer.getTimestamp() != null
@@ -4434,22 +4412,72 @@
assertNotEquals("Audio timestamp has a zero frame position",
mMediaCodecPlayer.getTimestamp().framePosition, 0);
+ // Allow some time for playback to commence
+ Thread.sleep(500);
+
// Pause playback
mMediaCodecPlayer.pause();
- // Allow some time for playback to pause
- Thread.sleep(maxDrainTimeMs);
- // Verify that playback has paused
- long pauseAudioFramePositionUs = mMediaCodecPlayer.getTimestamp().framePosition;
- long pauseVideoPositionUs = mMediaCodecPlayer.getVideoTimeUs();
- Thread.sleep(maxDrainTimeMs);
- assertEquals(mMediaCodecPlayer.getTimestamp().framePosition, pauseAudioFramePositionUs);
+ // Wait for audio to pause
+ AudioTimestamp pauseAudioTimestamp;
+ {
+ AudioTimestamp currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
+ long startTimeMs = System.currentTimeMillis();
+ do {
+ // If it takes longer to pause, the UX won't feel responsive to the user
+ int audioPauseTimeoutMs = 250;
+ assertTrue(String.format("No audio pause after %d milliseconds",
+ audioPauseTimeoutMs),
+ System.currentTimeMillis() - startTimeMs < audioPauseTimeoutMs);
+ pauseAudioTimestamp = currentAudioTimestamp;
+ Thread.sleep(50);
+ currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
+ } while (currentAudioTimestamp.framePosition != pauseAudioTimestamp.framePosition);
+ }
+ long pauseAudioSystemTimeMs = pauseAudioTimestamp.nanoTime / 1000 / 1000;
+
+ // Wait for video to pause
+ long pauseVideoSystemTimeNs;
+ long pauseVideoPositionUs;
+ {
+ long currentVideoSystemTimeNs = mMediaCodecPlayer.getCurrentRenderedSystemTimeNano();
+ long startTimeMs = System.currentTimeMillis();
+ do {
+ int videoUnderrunTimeoutMs = 2000;
+ assertTrue(String.format("No video pause after %d milliseconds",
+ videoUnderrunTimeoutMs),
+ System.currentTimeMillis() - startTimeMs < videoUnderrunTimeoutMs);
+ pauseVideoSystemTimeNs = currentVideoSystemTimeNs;
+ Thread.sleep(250); // onFrameRendered can get delayed in the Framework
+ currentVideoSystemTimeNs = mMediaCodecPlayer.getCurrentRenderedSystemTimeNano();
+ } while (currentVideoSystemTimeNs != pauseVideoSystemTimeNs);
+ pauseVideoPositionUs = mMediaCodecPlayer.getVideoTimeUs();
+ }
+ long pauseVideoSystemTimeMs = pauseVideoSystemTimeNs / 1000 / 1000;
+
+ // Video should not continue running for a long period of time after audio pauses
+ long pauseVideoToleranceMs = 500;
+ assertTrue(String.format(
+ "Video ran %d milliseconds longer than audio (video:%d audio:%d)",
+ pauseVideoToleranceMs, pauseVideoSystemTimeMs, pauseAudioSystemTimeMs),
+ pauseVideoSystemTimeMs - pauseAudioSystemTimeMs < pauseVideoToleranceMs);
+
+ // Verify that playback stays paused
+ Thread.sleep(500);
+ assertEquals(mMediaCodecPlayer.getTimestamp().framePosition, pauseAudioTimestamp.framePosition);
+ assertEquals(mMediaCodecPlayer.getCurrentRenderedSystemTimeNano(), pauseVideoSystemTimeNs);
assertEquals(mMediaCodecPlayer.getVideoTimeUs(), pauseVideoPositionUs);
- // Verify audio and video are in sync
- assertTrue(String.format("Video pts (%d) is ahead of audio pts (%d)",
- pauseVideoPositionUs, pauseAudioFramePositionUs),
- pauseVideoPositionUs <= pauseAudioFramePositionUs);
+ // Verify audio and video are roughly in sync when paused
+ long framePosition = mMediaCodecPlayer.getTimestamp().framePosition;
+ long playbackRateFps = mMediaCodecPlayer.getAudioTrack().getPlaybackRate();
+ long pauseAudioPositionMs = pauseAudioTimestamp.framePosition * 1000 / playbackRateFps;
+ long pauseVideoPositionMs = pauseVideoPositionUs / 1000;
+ long deltaMs = pauseVideoPositionMs - pauseAudioPositionMs;
+ assertTrue(String.format(
+ "Video is %d milliseconds out of sync from audio (video:%d audio:%d)",
+ deltaMs, pauseVideoPositionMs, pauseAudioPositionMs),
+ deltaMs > -80 && deltaMs < pauseVideoToleranceMs);
// Flush both audio and video pipelines
mMediaCodecPlayer.flush();
@@ -4493,8 +4521,8 @@
/**
* Test accurate video rendering after a video MediaCodec flush with HEVC if supported
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
public void testTunneledAccurateVideoFlushHevc() throws Exception {
testTunneledAccurateVideoFlush(MediaFormat.MIMETYPE_VIDEO_HEVC,
"video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
@@ -4503,8 +4531,8 @@
/**
* Test accurate video rendering after a video MediaCodec flush with AVC if supported
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
public void testTunneledAccurateVideoFlushAvc() throws Exception {
testTunneledAccurateVideoFlush(MediaFormat.MIMETYPE_VIDEO_AVC,
"video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
@@ -4513,49 +4541,18 @@
/**
* Test accurate video rendering after a video MediaCodec flush with VP9 if supported
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
public void testTunneledAccurateVideoFlushVp9() throws Exception {
testTunneledAccurateVideoFlush(MediaFormat.MIMETYPE_VIDEO_VP9,
"bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
}
/**
- * Test tunneled audioTimestamp progress with HEVC if supported
+ * Test that audio timestamps stop progressing during pause in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
- @Test
- public void testTunneledAudioTimestampProgressHevc() throws Exception {
- testTunneledAudioTimestampProgress(MediaFormat.MIMETYPE_VIDEO_HEVC,
- "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
- }
-
- /**
- * Test tunneled audioTimestamp progress with AVC if supported
- */
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
- @Test
- public void testTunneledAudioTimestampProgressAvc() throws Exception {
- testTunneledAudioTimestampProgress(MediaFormat.MIMETYPE_VIDEO_AVC,
- "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
- }
-
- /**
- * Test tunneled audioTimestamp progress with VP9 if supported
- */
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
- @Test
- public void testTunneledAudioTimestampProgressVp9() throws Exception {
- testTunneledAudioTimestampProgress(MediaFormat.MIMETYPE_VIDEO_VP9,
- "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
- }
-
- /**
- * Test that AudioTrack timestamps don't advance after pause.
- */
- private void
- testTunneledAudioTimestampProgress(String mimeType, String videoName) throws Exception
- {
+ private void testTunneledAudioProgressWithPause(String mimeType, String videoName)
+ throws Exception {
if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
"No tunneled video playback codec found for MIME " + mimeType)) {
return;
@@ -4568,11 +4565,10 @@
Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
- assertTrue("MediaCodecPlayer.start() failed!", mMediaCodecPlayer.start());
assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
+ mMediaCodecPlayer.startCodec();
- // starts video playback
- mMediaCodecPlayer.startThread();
+ mMediaCodecPlayer.play();
sleepUntil(() ->
mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
&& mMediaCodecPlayer.getTimestamp() != null
@@ -4604,14 +4600,43 @@
assertEquals(audioTimestampAfterPause.nanoTime, mMediaCodecPlayer.getTimestamp().nanoTime);
}
+
/**
- * Test tunneled audio underrun, if supported.
- *
- * Underrun test with lower pts after underrun.
+ * Test that audio timestamps stop progressing during pause for HEVC in tunneled mode.
+ */
+ @Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
+ public void testTunneledAudioProgressWithPauseHevc() throws Exception {
+ testTunneledAudioProgressWithPause(MediaFormat.MIMETYPE_VIDEO_HEVC,
+ "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
+ }
+
+ /**
+ * Test that audio timestamps stop progressing during pause for AVC in tunneled mode.
+ */
+ @Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
+ public void testTunneledAudioProgressWithPauseAvc() throws Exception {
+ testTunneledAudioProgressWithPause(MediaFormat.MIMETYPE_VIDEO_AVC,
+ "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
+ }
+
+ /**
+ * Test that audio timestamps stop progressing during pause for VP9 in tunneled mode.
+ */
+ @Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
+ public void testTunneledAudioProgressWithPauseVp9() throws Exception {
+ testTunneledAudioProgressWithPause(MediaFormat.MIMETYPE_VIDEO_VP9,
+ "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
+ }
+
+ /**
+ * Test that audio underrun pauses video and resumes in-sync in tunneled mode.
*
* TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
*/
- private void tunneledAudioUnderrun(String mimeType, String videoName, int frameRate)
+ private void tunneledAudioUnderrun(String mimeType, String videoName)
throws Exception {
if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
"No tunneled video playback codec found for MIME " + mimeType)) {
@@ -4625,11 +4650,10 @@
Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
- assertTrue("MediaCodecPlayer.start() failed!", mMediaCodecPlayer.start());
assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
+ mMediaCodecPlayer.startCodec();
- // Starts video playback
- mMediaCodecPlayer.startThread();
+ mMediaCodecPlayer.play();
sleepUntil(() ->
mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
&& mMediaCodecPlayer.getTimestamp() != null
@@ -4645,36 +4669,39 @@
mMediaCodecPlayer.simulateAudioUnderrun(true);
// Wait for audio underrun
- final int audioUnderrunTimeoutMs = 1000; // Arbitrary upper time limit on loop time duration
- long startTimeMs = System.currentTimeMillis();
- AudioTimestamp currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
AudioTimestamp underrunAudioTimestamp;
- do {
- assertTrue(String.format("No audio underrun after %d milliseconds",
- System.currentTimeMillis() - startTimeMs),
- System.currentTimeMillis() - startTimeMs < audioUnderrunTimeoutMs);
- underrunAudioTimestamp = currentAudioTimestamp;
- Thread.sleep(50);
- currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
- } while (currentAudioTimestamp.framePosition != underrunAudioTimestamp.framePosition);
+ {
+ AudioTimestamp currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
+ long startTimeMs = System.currentTimeMillis();
+ do {
+ int audioUnderrunTimeoutMs = 1000;
+ assertTrue(String.format("No audio underrun after %d milliseconds",
+ System.currentTimeMillis() - startTimeMs),
+ System.currentTimeMillis() - startTimeMs < audioUnderrunTimeoutMs);
+ underrunAudioTimestamp = currentAudioTimestamp;
+ Thread.sleep(50);
+ currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
+ } while (currentAudioTimestamp.framePosition != underrunAudioTimestamp.framePosition);
+ }
+ // Wait until video playback pauses due to underrunning audio
+ long pausedVideoTimeUs = -1;
+ {
+ long currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
+ long startTimeMs = System.currentTimeMillis();
+ do {
+ int videoPauseTimeoutMs = 2000;
+ assertTrue(String.format("No video pause after %d milliseconds",
+ videoPauseTimeoutMs),
+ System.currentTimeMillis() - startTimeMs < videoPauseTimeoutMs);
+ pausedVideoTimeUs = currentVideoTimeUs;
+ Thread.sleep(250); // onFrameRendered messages can get delayed in the Framework
+ currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
+ } while (currentVideoTimeUs != pausedVideoTimeUs);
+ }
- // Wait until video playback stalls
- final int videoUnderrunTimeoutMs = 1000;
- startTimeMs = System.currentTimeMillis();
- long currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
- long underrunVideoTimeUs = -1;
- do {
- assertTrue(String.format("No video underrun after %d milliseconds",
- videoUnderrunTimeoutMs),
- System.currentTimeMillis() - startTimeMs < videoUnderrunTimeoutMs);
- underrunVideoTimeUs = currentVideoTimeUs;
- Thread.sleep(50);
- currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
- } while (currentVideoTimeUs != underrunVideoTimeUs);
-
- // Retrieve index for the video rendered frame at the time of underrun
- int underrunVideoRenderedTimestampIndex =
+ // Retrieve index for the video rendered frame at the time of video pausing
+ int pausedVideoRenderedTimestampIndex =
mMediaCodecPlayer.getRenderedVideoFrameTimestampList().size() - 1;
// Resume audio buffering with a negative offset, in order to simulate a desynchronisation.
@@ -4683,35 +4710,38 @@
mMediaCodecPlayer.simulateAudioUnderrun(false);
// Wait until audio playback resumes
- final int audioResumeTimeoutMs = 1000;
- startTimeMs = System.currentTimeMillis();
- currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
AudioTimestamp postResumeAudioTimestamp;
- do {
- assertTrue(String.format("Audio has not resumed after %d milliseconds",
- audioResumeTimeoutMs),
- System.currentTimeMillis() - startTimeMs < audioResumeTimeoutMs);
- postResumeAudioTimestamp = currentAudioTimestamp;
- Thread.sleep(50);
- currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
- } while(currentAudioTimestamp.framePosition == postResumeAudioTimestamp.framePosition);
+ {
+ AudioTimestamp previousAudioTimestamp;
+ long startTimeMs = System.currentTimeMillis();
+ do {
+ int audioResumeTimeoutMs = 1000;
+ assertTrue(String.format("Audio has not resumed after %d milliseconds",
+ audioResumeTimeoutMs),
+ System.currentTimeMillis() - startTimeMs < audioResumeTimeoutMs);
+ previousAudioTimestamp = mMediaCodecPlayer.getTimestamp();
+ Thread.sleep(50);
+ postResumeAudioTimestamp = mMediaCodecPlayer.getTimestamp();
+ } while (postResumeAudioTimestamp.framePosition == previousAudioTimestamp.framePosition);
+ }
// Now that audio playback has resumed, wait until video playback resumes
- // We care about the timestamp of the first output frame, rather than the exact time the
- // video resumed, which is why we only start polling after we are sure audio playback has
- // resumed.
- final int videoResumeTimeoutMs = 1000;
- startTimeMs = System.currentTimeMillis();
- currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
- long resumeVideoTimeUs = -1;
- do {
- assertTrue(String.format("Video has not resumed after %d milliseconds",
- videoResumeTimeoutMs),
- System.currentTimeMillis() - startTimeMs < videoResumeTimeoutMs);
- resumeVideoTimeUs = currentVideoTimeUs;
- Thread.sleep(50);
- currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
- } while (currentVideoTimeUs == resumeVideoTimeUs);
+ {
+ // We actually don't care about trying to capture the exact time video resumed, because
+ // we can just look at the historical list of rendered video timestamps
+ long postResumeVideoTimeUs;
+ long previousVideoTimeUs;
+ long startTimeMs = System.currentTimeMillis();
+ do {
+ int videoResumeTimeoutMs = 2000;
+ assertTrue(String.format("Video has not resumed after %d milliseconds",
+ videoResumeTimeoutMs),
+ System.currentTimeMillis() - startTimeMs < videoResumeTimeoutMs);
+ previousVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
+ Thread.sleep(50);
+ postResumeVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
+ } while (postResumeVideoTimeUs == previousVideoTimeUs);
+ }
// The system time when rendering the first audio frame after the resume
long playbackRateFps = mMediaCodecPlayer.getAudioTrack().getPlaybackRate();
@@ -4721,52 +4751,74 @@
long resumeAudioSystemTimeNs = postResumeAudioTimestamp.nanoTime - (long) elapsedTimeNs;
long resumeAudioSystemTimeMs = resumeAudioSystemTimeNs / 1000 / 1000;
- // The system time when rendering the first video frame after the resume
+ // The system time when rendering the first video frame after video playback resumes
long resumeVideoSystemTimeMs = mMediaCodecPlayer.getRenderedVideoFrameSystemTimeList()
- .get(underrunVideoRenderedTimestampIndex + 1) / 1000 / 1000;
+ .get(pausedVideoRenderedTimestampIndex + 1) / 1000 / 1000;
- // Verify that audio and video are in-sync after resume time
+ // Verify that video resumes in a reasonable amount of time after audio resumes
// Note: Because a -100ms PTS gap is introduced, the video should resume 100ms later
resumeAudioSystemTimeMs += 100;
- long vsyncMs = 1000 / frameRate;
- long avSyncOffsetMs = resumeAudioSystemTimeMs - resumeVideoSystemTimeMs;
+ long resumeDeltaMs = resumeVideoSystemTimeMs - resumeAudioSystemTimeMs;
+ assertTrue(String.format("Video started %s milliseconds before audio resumed "
+ + "(video:%d audio:%d)", resumeDeltaMs * -1, resumeVideoSystemTimeMs,
+ resumeAudioSystemTimeMs),
+ resumeDeltaMs > 0); // video is expected to start after audio resumes
assertTrue(String.format(
- "Audio is %d milliseconds out of sync of video (audio:%d video:%d)",
- avSyncOffsetMs, resumeAudioSystemTimeMs, resumeVideoSystemTimeMs),
- Math.abs(avSyncOffsetMs) <= vsyncMs);
+ "Video started %d milliseconds after audio resumed (video:%d audio:%d)",
+ resumeDeltaMs, resumeVideoSystemTimeMs, resumeAudioSystemTimeMs),
+ resumeDeltaMs <= 600); // video starting 300ms after audio is barely noticeable
+
+ // Determine the system time of the audio frame that matches the presentation timestamp of
+ // the resumed video frame
+ long resumeVideoPresentationTimeUs = mMediaCodecPlayer.getRenderedVideoFrameTimestampList()
+ .get(pausedVideoRenderedTimestampIndex + 1);
+ long matchingAudioFramePosition = resumeVideoPresentationTimeUs * playbackRateFps / 1000 / 1000;
+ playedFrames = matchingAudioFramePosition - postResumeAudioTimestamp.framePosition;
+ elapsedTimeNs = playedFrames * (1000.0 * 1000.0 * 1000.0 / playbackRateFps);
+ long matchingAudioSystemTimeNs = postResumeAudioTimestamp.nanoTime + (long) elapsedTimeNs;
+ long matchingAudioSystemTimeMs = matchingAudioSystemTimeNs / 1000 / 1000;
+
+ // Verify that video and audio are in sync at the time when video resumes
+ // Note: Because a -100ms PTS gap is introduced, the video should resume 100ms later
+ matchingAudioSystemTimeMs += 100;
+ long avSyncOffsetMs = resumeVideoSystemTimeMs - matchingAudioSystemTimeMs;
+ assertTrue(String.format("Video is %d milliseconds out of sync of audio after resuming "
+ + "(video:%d, audio:%d)", avSyncOffsetMs, resumeVideoSystemTimeMs,
+ matchingAudioSystemTimeMs),
+ // some leniency in AV sync is required because Android TV STB/OTT OEMs often have
+ // to tune for imperfect downstream TVs (that have processing delays on the video)
+ // by knowingly producing HDMI output that has audio and video mildly out of sync
+ Math.abs(avSyncOffsetMs) <= 80);
}
/**
- * Test tunneled audio underrun with HEVC if supported
+ * Test that audio underrun pauses video and resumes in-sync for HEVC in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
public void testTunneledAudioUnderrunHevc() throws Exception {
tunneledAudioUnderrun(MediaFormat.MIMETYPE_VIDEO_HEVC,
- "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv",
- 25);
+ "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
}
/**
- * Test tunneled audio underrun with AVC if supported
+ * Test that audio underrun pauses video and resumes in-sync for AVC in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
public void testTunneledAudioUnderrunAvc() throws Exception {
tunneledAudioUnderrun(MediaFormat.MIMETYPE_VIDEO_AVC,
- "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
- 25);
+ "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
}
/**
- * Test tunneled audio underrun with VP9 if supported
+ * Test that audio underrun pauses video and resumes in-sync for VP9 in tunneled mode.
*/
- @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
@Test
+ @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
public void testTunneledAudioUnderrunVp9() throws Exception {
tunneledAudioUnderrun(MediaFormat.MIMETYPE_VIDEO_VP9,
- "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
- 30);
+ "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
}
private void sleepUntil(Supplier<Boolean> supplier, Duration maxWait) throws Exception {
diff --git a/tests/tests/media/drmframework/src/android/media/drmframework/cts/MediaDrmClearkeyTest.java b/tests/tests/media/drmframework/src/android/media/drmframework/cts/MediaDrmClearkeyTest.java
index 1a1a46f..2c1d9c5 100644
--- a/tests/tests/media/drmframework/src/android/media/drmframework/cts/MediaDrmClearkeyTest.java
+++ b/tests/tests/media/drmframework/src/android/media/drmframework/cts/MediaDrmClearkeyTest.java
@@ -26,7 +26,6 @@
import android.media.UnsupportedSchemeException;
import android.media.cts.AudioManagerStub;
import android.media.cts.AudioManagerStubHelper;
-import android.media.cts.CodecState;
import android.media.cts.ConnectionStatus;
import android.media.cts.IConnectionStatus;
import android.media.cts.InputSurface;
diff --git a/tests/tests/systemui/Android.bp b/tests/tests/systemui/Android.bp
index e33d8fc..3a68e4d 100644
--- a/tests/tests/systemui/Android.bp
+++ b/tests/tests/systemui/Android.bp
@@ -39,6 +39,7 @@
"androidx.test.ext.junit",
"androidx.test.uiautomator",
"cts-wm-util",
+ "permission-test-util-lib",
"ub-uiautomator",
],
srcs: [
diff --git a/tests/tests/systemui/AndroidManifest.xml b/tests/tests/systemui/AndroidManifest.xml
index f55ed3f..d4ba3b3 100644
--- a/tests/tests/systemui/AndroidManifest.xml
+++ b/tests/tests/systemui/AndroidManifest.xml
@@ -26,6 +26,7 @@
<uses-permission android:name="android.permission.MANAGE_EXTERNAL_STORAGE"/>
<!-- Required by flickerlib to dump window states -->
<uses-permission android:name="android.permission.DUMP"/>
+ <uses-permission android:name="android.permission.POST_NOTIFICATIONS"/>
<application android:requestLegacyExternalStorage="true">
<activity android:name=".LightBarActivity"
diff --git a/tests/tests/systemui/src/android/systemui/cts/LightBarTests.java b/tests/tests/systemui/src/android/systemui/cts/LightBarTests.java
index ffa58ba..5079217 100644
--- a/tests/tests/systemui/src/android/systemui/cts/LightBarTests.java
+++ b/tests/tests/systemui/src/android/systemui/cts/LightBarTests.java
@@ -16,6 +16,9 @@
package android.systemui.cts;
+import static android.Manifest.permission.POST_NOTIFICATIONS;
+import static android.Manifest.permission.REVOKE_POST_NOTIFICATIONS_WITHOUT_KILL;
+import static android.Manifest.permission.REVOKE_RUNTIME_PERMISSIONS;
import static android.server.wm.BarTestUtils.assumeHasColoredNavigationBar;
import static android.server.wm.BarTestUtils.assumeHasColoredStatusBar;
import static android.server.wm.BarTestUtils.assumeStatusBarContainsCutout;
@@ -33,7 +36,10 @@
import android.graphics.Bitmap;
import android.graphics.Color;
import android.graphics.Insets;
+import android.os.Process;
import android.os.SystemClock;
+import android.permission.PermissionManager;
+import android.permission.cts.PermissionUtils;
import android.platform.test.annotations.AppModeFull;
import android.view.Gravity;
import android.view.InputDevice;
@@ -45,6 +51,7 @@
import androidx.test.rule.ActivityTestRule;
import androidx.test.runner.AndroidJUnit4;
+import com.android.compatibility.common.util.SystemUtil;
import com.android.compatibility.common.util.ThrowingRunnable;
import org.junit.Rule;
@@ -244,22 +251,23 @@
}
private void runInNotificationSession(ThrowingRunnable task) throws Exception {
+ Context context = getInstrumentation().getContext();
+ String packageName = getInstrumentation().getTargetContext().getPackageName();
try {
- mNm = (NotificationManager) getInstrumentation().getContext()
- .getSystemService(Context.NOTIFICATION_SERVICE);
+ PermissionUtils.grantPermission(packageName, POST_NOTIFICATIONS);
+ mNm = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
NotificationChannel channel1 = new NotificationChannel(NOTIFICATION_CHANNEL_ID,
NOTIFICATION_CHANNEL_ID, NotificationManager.IMPORTANCE_LOW);
mNm.createNotificationChannel(channel1);
// post 10 notifications to ensure enough icons in the status bar
for (int i = 0; i < 10; i++) {
- Notification.Builder noti1 = new Notification.Builder(
- getInstrumentation().getContext(),
- NOTIFICATION_CHANNEL_ID)
- .setSmallIcon(R.drawable.ic_save)
- .setChannelId(NOTIFICATION_CHANNEL_ID)
- .setPriority(Notification.PRIORITY_LOW)
- .setGroup(NOTIFICATION_GROUP_KEY);
+ Notification.Builder noti1 =
+ new Notification.Builder(context, NOTIFICATION_CHANNEL_ID)
+ .setSmallIcon(R.drawable.ic_save)
+ .setChannelId(NOTIFICATION_CHANNEL_ID)
+ .setPriority(Notification.PRIORITY_LOW)
+ .setGroup(NOTIFICATION_GROUP_KEY);
mNm.notify(NOTIFICATION_TAG, i, noti1.build());
}
@@ -267,6 +275,16 @@
} finally {
mNm.cancelAll();
mNm.deleteNotificationChannel(NOTIFICATION_CHANNEL_ID);
+
+ // Use test API to prevent PermissionManager from killing the test process when revoking
+ // permission.
+ SystemUtil.runWithShellPermissionIdentity(
+ () -> context.getSystemService(PermissionManager.class)
+ .revokePostNotificationPermissionWithoutKillForTest(
+ packageName,
+ Process.myUserHandle().getIdentifier()),
+ REVOKE_POST_NOTIFICATIONS_WITHOUT_KILL,
+ REVOKE_RUNTIME_PERMISSIONS);
}
}
diff --git a/tests/tests/telephony/current/src/android/telephony/cts/TelephonyManagerTest.java b/tests/tests/telephony/current/src/android/telephony/cts/TelephonyManagerTest.java
index 3757b63..8247c3a 100644
--- a/tests/tests/telephony/current/src/android/telephony/cts/TelephonyManagerTest.java
+++ b/tests/tests/telephony/current/src/android/telephony/cts/TelephonyManagerTest.java
@@ -283,6 +283,7 @@
private static final int RADIO_HAL_VERSION_1_3 = makeRadioVersion(1, 3);
private static final int RADIO_HAL_VERSION_1_5 = makeRadioVersion(1, 5);
private static final int RADIO_HAL_VERSION_1_6 = makeRadioVersion(1, 6);
+ private static final int RADIO_HAL_VERSION_2_0 = makeRadioVersion(2, 0);
static {
EMERGENCY_NUMBER_SOURCE_SET = new HashSet<Integer>();
@@ -1729,6 +1730,10 @@
@Test
public void testRebootRadio() throws Throwable {
assumeTrue(hasFeature(PackageManager.FEATURE_TELEPHONY_RADIO_ACCESS));
+ if (mRadioVersion < RADIO_HAL_VERSION_2_0) {
+ Log.d(TAG, "Skipping test since rebootModem is not supported.");
+ return;
+ }
TestThread t = new TestThread(() -> {
Looper.prepare();