Use only the audio timestamp frame position to detect underrun

Use the frame position to detect audio underrun instead of the whole
timestamp, since nanoTime changes during underrun conditions.

Also overhaul the wait loops using do-whiles instead, since they more
naturally express what the test is trying to accomplish.

This allows us to get further with the following test on ADT-3:
atest android.media.decoder.cts.DecoderTest#testTunneledAudioUnderrunAvc

Note: The test still fails, because av-sync is off by hundreds of
millis. I am still working with AMLogic to identify the problem.

Bug: 234327211
Test: atest android.media.decoder.cts.DecoderTest#testTunneledAudioUnderrunAvc
Change-Id: Ie4c31a23893cf41ebdeb63e1d930a5a7d53d4dbc
diff --git a/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java b/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java
index f35d0df..73ba7c3 100644
--- a/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java
+++ b/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java
@@ -4610,7 +4610,7 @@
         assertTrue("MediaCodecPlayer.start() failed!", mMediaCodecPlayer.start());
         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
 
-        // starts video playback
+        // Starts video playback
         mMediaCodecPlayer.startThread();
         sleepUntil(() ->
                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
@@ -4625,75 +4625,82 @@
 
         // Keep buffering video content but stop buffering audio content -> audio underrun
         mMediaCodecPlayer.simulateAudioUnderrun(true);
-        // Loop to wait for audio underrun
-        // TODO(b/200280965): Find a more appropriate delay based on partner feedback
+
+        // Wait for audio underrun
         final int audioUnderrunTimeoutMs = 1000; // Arbitrary upper time limit on loop time duration
         long startTimeMs = System.currentTimeMillis();
-        AudioTimestamp previousTimestamp;
-        while ((previousTimestamp = mMediaCodecPlayer.getTimestamp()) == null) {
-            assertTrue(String.format("No audio timestamp after %d milliseconds",
+        AudioTimestamp currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
+        AudioTimestamp underrunAudioTimestamp;
+        do {
+            assertTrue(String.format("No audio underrun after %d milliseconds",
                             System.currentTimeMillis() - startTimeMs),
                     System.currentTimeMillis() - startTimeMs < audioUnderrunTimeoutMs);
+            underrunAudioTimestamp = currentAudioTimestamp;
             Thread.sleep(50);
-        }
-        AudioTimestamp underrunAudioTimestamp;
-        while ((underrunAudioTimestamp = mMediaCodecPlayer.getTimestamp()) != previousTimestamp) {
-            assertTrue(String.format("No audio underrun after %d milliseconds",
-                            audioUnderrunTimeoutMs),
-                    System.currentTimeMillis() - startTimeMs < audioUnderrunTimeoutMs);
-            previousTimestamp = underrunAudioTimestamp;
-            Thread.sleep(50);
-        }
-        // Loop to wait until video playback stalls
-        long previousVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
-        long underrunVideoTimeUs;
-        startTimeMs = System.currentTimeMillis();
-        // TODO(b/200280965): Find a more appropriate delay based on partner feedback
+            currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
+        } while (currentAudioTimestamp.framePosition != underrunAudioTimestamp.framePosition);
+
+
+        // Wait until video playback stalls
         final int videoUnderrunTimeoutMs = 1000;
-        while ((underrunVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs()) != previousVideoTimeUs) {
+        startTimeMs = System.currentTimeMillis();
+        long currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
+        long underrunVideoTimeUs = -1;
+        do {
             assertTrue(String.format("No video underrun after %d milliseconds",
                             videoUnderrunTimeoutMs),
                     System.currentTimeMillis() - startTimeMs < videoUnderrunTimeoutMs);
-            previousVideoTimeUs = underrunVideoTimeUs;
+            underrunVideoTimeUs = currentVideoTimeUs;
             Thread.sleep(50);
-        }
+            currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
+        } while (currentVideoTimeUs != underrunVideoTimeUs);
 
-        final int underrunVideoRenderedTimestampIndex =
+        // Retrieve index for the video rendered frame at the time of underrun
+        int underrunVideoRenderedTimestampIndex =
                 mMediaCodecPlayer.getRenderedVideoFrameTimestampList().size() - 1;
+
         // Resume audio buffering with a negative offset, in order to simulate a desynchronisation.
         // TODO(b/202710709): Use timestamp relative to last played video frame before pause
         mMediaCodecPlayer.setAudioTrackOffsetMs(-100);
         mMediaCodecPlayer.simulateAudioUnderrun(false);
 
-        // Loop to wait until audio playback resumes
+        // Wait until audio playback resumes
+        final int audioResumeTimeoutMs = 1000;
         startTimeMs = System.currentTimeMillis();
-        AudioTimestamp postResumeTimestamp;
-        while ((postResumeTimestamp = mMediaCodecPlayer.getTimestamp()) == underrunAudioTimestamp) {
+        currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
+        AudioTimestamp postResumeAudioTimestamp;
+        do {
             assertTrue(String.format("Audio has not resumed after %d milliseconds",
-                            audioUnderrunTimeoutMs),
-                    System.currentTimeMillis() - startTimeMs < audioUnderrunTimeoutMs);
+                            audioResumeTimeoutMs),
+                    System.currentTimeMillis() - startTimeMs < audioResumeTimeoutMs);
+            postResumeAudioTimestamp = currentAudioTimestamp;
             Thread.sleep(50);
-        }
+            currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
+        } while(currentAudioTimestamp.framePosition == postResumeAudioTimestamp.framePosition);
 
-        // Now that audio playback has resumed, loop to wait until video playback resumes
+        // Now that audio playback has resumed, wait until video playback resumes
         // We care about the timestamp of the first output frame, rather than the exact time the
         // video resumed, which is why we only start polling after we are sure audio playback has
         // resumed.
-        long resumeVideoTimeUs = 0;
+        final int videoResumeTimeoutMs = 1000;
         startTimeMs = System.currentTimeMillis();
-        while ((resumeVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs()) == underrunVideoTimeUs) {
+        currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
+        long resumeVideoTimeUs = -1;
+        do {
             assertTrue(String.format("Video has not resumed after %d milliseconds",
-                            videoUnderrunTimeoutMs),
-                    System.currentTimeMillis() - startTimeMs < videoUnderrunTimeoutMs);
+                            videoResumeTimeoutMs),
+                    System.currentTimeMillis() - startTimeMs < videoResumeTimeoutMs);
+            resumeVideoTimeUs = currentVideoTimeUs;
             Thread.sleep(50);
-        }
+            currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
+        } while (currentVideoTimeUs == resumeVideoTimeUs);
 
         // The system time when rendering the first audio frame after the resume
         long playbackRateFps = mMediaCodecPlayer.getAudioTrack().getPlaybackRate();
-        long playedFrames = postResumeTimestamp.framePosition
+        long playedFrames = postResumeAudioTimestamp.framePosition
                 - underrunAudioTimestamp.framePosition + 1;
         double elapsedTimeNs = playedFrames * (1000.0 * 1000.0 * 1000.0 / playbackRateFps);
-        long resumeAudioSystemTimeNs = postResumeTimestamp.nanoTime - (long) elapsedTimeNs;
+        long resumeAudioSystemTimeNs = postResumeAudioTimestamp.nanoTime - (long) elapsedTimeNs;
         long resumeAudioSystemTimeMs = resumeAudioSystemTimeNs / 1000 / 1000;
 
         // The system time when rendering the first video frame after the resume
@@ -4706,7 +4713,7 @@
         long vsyncMs = 1000 / frameRate;
         long avSyncOffsetMs = resumeAudioSystemTimeMs - resumeVideoSystemTimeMs;
         assertTrue(String.format(
-                        "Audio and video is %d milliseconds out of sync (audio:%d video:%d)",
+                        "Audio is %d milliseconds out of sync of video (audio:%d video:%d)",
                         avSyncOffsetMs, resumeAudioSystemTimeMs, resumeVideoSystemTimeMs),
                 Math.abs(avSyncOffsetMs) <= vsyncMs);
     }