Pass PTS through the MediaCodec tests
Use the existing APIs and/or the new EGL call to pass the
presentation time stamp through video encode/decode.
Bug 8191230
Change-Id: I4bf381934ff2e75d15f49b3be7696cc3ff5408bb
diff --git a/tests/tests/media/src/android/media/cts/DecodeEditEncodeTest.java b/tests/tests/media/src/android/media/cts/DecodeEditEncodeTest.java
index cb7d250..23c10a5 100644
--- a/tests/tests/media/src/android/media/cts/DecodeEditEncodeTest.java
+++ b/tests/tests/media/src/android/media/cts/DecodeEditEncodeTest.java
@@ -290,7 +290,6 @@
// If we're not done submitting frames, generate a new one and submit it. The
// eglSwapBuffers call will block if the input is full.
if (!inputDone) {
- long ptsUsec = generateIndex * 1000000 / FRAME_RATE;
if (generateIndex == NUM_FRAMES) {
// Send an empty frame with the end-of-stream flag set.
if (VERBOSE) Log.d(TAG, "signaling input EOS");
@@ -304,7 +303,7 @@
inputDone = true;
} else {
generateSurfaceFrame(generateIndex);
- // TODO: provide PTS time stamp to EGL
+ inputSurface.setPresentationTime(computePresentationTime(generateIndex));
if (VERBOSE) Log.d(TAG, "inputSurface swapBuffers");
inputSurface.swapBuffers();
}
@@ -349,7 +348,7 @@
encodedData.position(info.offset);
encodedData.limit(info.offset + info.size);
- output.addChunk(encodedData, info.flags);
+ output.addChunk(encodedData, info.flags, info.presentationTimeUs);
outputCount++;
}
@@ -500,9 +499,11 @@
// the BUFFER_FLAG_CODEC_CONFIG flag set.
ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
inputBuf.clear();
- int flags = inputData.getChunk(inputChunk, inputBuf);
+ inputData.getChunkData(inputChunk, inputBuf);
+ int flags = inputData.getChunkFlags(inputChunk);
+ long time = inputData.getChunkTime(inputChunk);
decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(),
- 0L, flags);
+ time, flags);
if (VERBOSE) {
Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
inputBuf.position() + " flags=" + flags);
@@ -544,7 +545,7 @@
encodedData.position(info.offset);
encodedData.limit(info.offset + info.size);
- outputData.addChunk(encodedData, info.flags);
+ outputData.addChunk(encodedData, info.flags, info.presentationTimeUs);
outputCount++;
if (VERBOSE) Log.d(TAG, "encoder output " + info.size + " bytes");
@@ -595,6 +596,7 @@
outputSurface.drawImage();
// Send it to the encoder.
+ inputSurface.setPresentationTime(info.presentationTimeUs);
if (VERBOSE) Log.d(TAG, "swapBuffers");
inputSurface.swapBuffers();
}
@@ -686,9 +688,11 @@
// the BUFFER_FLAG_CODEC_CONFIG flag set.
ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
inputBuf.clear();
- int flags = inputData.getChunk(inputChunk, inputBuf);
+ inputData.getChunkData(inputChunk, inputBuf);
+ int flags = inputData.getChunkFlags(inputChunk);
+ long time = inputData.getChunkTime(inputChunk);
decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(),
- 0L, flags);
+ time, flags);
if (VERBOSE) {
Log.d(TAG, "submitted frame " + inputChunk + " to dec, size=" +
inputBuf.position() + " flags=" + flags);
@@ -732,6 +736,8 @@
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender) {
if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
+ assertEquals(computePresentationTime(checkIndex),
+ info.presentationTimeUs);
surface.awaitNewImage();
surface.drawImage();
if (!checkSurfaceFrame(checkIndex++)) {
@@ -807,6 +813,13 @@
}
}
+ /**
+ * Generates the presentation time for frame N.
+ */
+ private static long computePresentationTime(int frameIndex) {
+ return 123 + frameIndex * 1000000 / FRAME_RATE;
+ }
+
/**
* The elementary stream coming out of the "video/avc" encoder needs to be fed back into
@@ -818,6 +831,7 @@
private MediaFormat mMediaFormat;
private ArrayList<byte[]> mChunks = new ArrayList<byte[]>();
private ArrayList<Integer> mFlags = new ArrayList<Integer>();
+ private ArrayList<Long> mTimes = new ArrayList<Long>();
/**
* Sets the MediaFormat, for the benefit of a future decoder.
@@ -836,11 +850,12 @@
/**
* Adds a new chunk. Advances buf.position to buf.limit.
*/
- public void addChunk(ByteBuffer buf, int flags) {
+ public void addChunk(ByteBuffer buf, int flags, long time) {
byte[] data = new byte[buf.remaining()];
buf.get(data);
mChunks.add(data);
mFlags.add(flags);
+ mTimes.add(time);
}
/**
@@ -851,15 +866,28 @@
}
/**
- * Copies chunk N into "dest", and returns the BufferInfo flags. Advances dest.position.
+ * Copies the data from chunk N into "dest". Advances dest.position.
*/
- public int getChunk(int chunk, ByteBuffer dest) {
+ public void getChunkData(int chunk, ByteBuffer dest) {
byte[] data = mChunks.get(chunk);
dest.put(data);
+ }
+
+ /**
+ * Returns the flags associated with chunk N.
+ */
+ public int getChunkFlags(int chunk) {
return mFlags.get(chunk);
}
/**
+ * Returns the timestamp associated with chunk N.
+ */
+ public long getChunkTime(int chunk) {
+ return mTimes.get(chunk);
+ }
+
+ /**
* Writes the chunks to a file as a contiguous stream. Useful for debugging.
*/
public void saveToFile(File file) {
diff --git a/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java b/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java
index 6c3e051..5b9c37e 100644
--- a/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java
+++ b/tests/tests/media/src/android/media/cts/EncodeDecodeTest.java
@@ -481,7 +481,7 @@
int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
if (VERBOSE) Log.d(TAG, "inputBufIndex=" + inputBufIndex);
if (inputBufIndex >= 0) {
- long ptsUsec = generateIndex * 1000000 / FRAME_RATE;
+ long ptsUsec = computePresentationTime(generateIndex);
if (generateIndex == NUM_FRAMES) {
// Send an empty frame with the end-of-stream flag set. If we set EOS
// on a frame with data, that frame data will be ignored, and the
@@ -624,6 +624,8 @@
if (VERBOSE) Log.d(TAG, "got empty frame");
} else {
if (VERBOSE) Log.d(TAG, "decoded, checking frame " + checkIndex);
+ assertEquals(computePresentationTime(checkIndex),
+ info.presentationTimeUs);
if (!checkFrame(checkIndex++, decoderColorFormat, outputFrame)) {
badFrames++;
}
@@ -652,6 +654,8 @@
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender) {
if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
+ assertEquals(computePresentationTime(checkIndex),
+ info.presentationTimeUs);
outputSurface.awaitNewImage();
outputSurface.drawImage();
if (!checkSurfaceFrame(checkIndex++)) {
@@ -724,7 +728,6 @@
// If we're not done submitting frames, generate a new one and submit it. The
// eglSwapBuffers call will block if the input is full.
if (!inputDone) {
- long ptsUsec = generateIndex * 1000000 / FRAME_RATE;
if (generateIndex == NUM_FRAMES) {
// Send an empty frame with the end-of-stream flag set.
if (VERBOSE) Log.d(TAG, "signaling input EOS");
@@ -733,7 +736,7 @@
} else {
inputSurface.makeCurrent();
generateSurfaceFrame(generateIndex);
- // TODO: provide PTS time stamp to EGL
+ inputSurface.setPresentationTime(computePresentationTime(generateIndex));
if (VERBOSE) Log.d(TAG, "inputSurface swapBuffers");
inputSurface.swapBuffers();
}
@@ -780,6 +783,8 @@
outputSurface.makeCurrent();
decoder.releaseOutputBuffer(decoderStatus, doRender);
if (doRender) {
+ assertEquals(computePresentationTime(checkIndex),
+ info.presentationTimeUs);
if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
outputSurface.awaitNewImage();
outputSurface.drawImage();
@@ -1098,4 +1103,11 @@
return actual > (expected - MAX_DELTA) && actual < (expected + MAX_DELTA);
}
}
+
+ /**
+ * Generates the presentation time for frame N.
+ */
+ private static long computePresentationTime(int frameIndex) {
+ return 132 + frameIndex * 1000000 / FRAME_RATE;
+ }
}
diff --git a/tests/tests/media/src/android/media/cts/InputSurface.java b/tests/tests/media/src/android/media/cts/InputSurface.java
index 413b9bb..ff6ece1 100644
--- a/tests/tests/media/src/android/media/cts/InputSurface.java
+++ b/tests/tests/media/src/android/media/cts/InputSurface.java
@@ -17,23 +17,19 @@
package android.media.cts;
import android.opengl.EGL14;
-import android.opengl.GLES20;
+import android.opengl.EGLConfig;
+import android.opengl.EGLContext;
+import android.opengl.EGLDisplay;
+import android.opengl.EGLSurface;
+import android.opengl.GLES10;
import android.opengl.GLES11Ext;
+import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Log;
import android.view.Surface;
import java.nio.ByteBuffer;
-import javax.microedition.khronos.egl.EGL10;
-import javax.microedition.khronos.egl.EGLConfig;
-import javax.microedition.khronos.egl.EGLContext;
-import javax.microedition.khronos.egl.EGLDisplay;
-import javax.microedition.khronos.egl.EGLSurface;
-import javax.microedition.khronos.opengles.GL;
-import javax.microedition.khronos.opengles.GL10;
-
-
/**
* Holds state associated with a Surface used for MediaCodec encoder input.
@@ -49,7 +45,6 @@
private static final int EGL_RECORDABLE_ANDROID = 0x3142;
private static final int EGL_OPENGL_ES2_BIT = 4;
- private EGL10 mEGL;
private EGLDisplay mEGLDisplay;
private EGLContext mEGLContext;
private EGLSurface mEGLSurface;
@@ -72,35 +67,40 @@
* Prepares EGL. We want a GLES 2.0 context and a surface that supports recording.
*/
private void eglSetup() {
- mEGL = (EGL10)EGLContext.getEGL();
- mEGLDisplay = mEGL.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
- if (!mEGL.eglInitialize(mEGLDisplay, null)) {
- throw new RuntimeException("unable to initialize EGL10");
+ mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
+ if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) {
+ throw new RuntimeException("unable to get EGL14 display");
+ }
+ int[] version = new int[2];
+ if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) {
+ mEGLDisplay = null;
+ throw new RuntimeException("unable to initialize EGL14");
}
// Configure EGL for pbuffer and OpenGL ES 2.0. We want enough RGB bits
// to be able to tell if the frame is reasonable.
int[] attribList = {
- EGL10.EGL_RED_SIZE, 8,
- EGL10.EGL_GREEN_SIZE, 8,
- EGL10.EGL_BLUE_SIZE, 8,
- EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
+ EGL14.EGL_RED_SIZE, 8,
+ EGL14.EGL_GREEN_SIZE, 8,
+ EGL14.EGL_BLUE_SIZE, 8,
+ EGL14.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
EGL_RECORDABLE_ANDROID, 1,
- EGL10.EGL_NONE
+ EGL14.EGL_NONE
};
EGLConfig[] configs = new EGLConfig[1];
int[] numConfigs = new int[1];
- if (!mEGL.eglChooseConfig(mEGLDisplay, attribList, configs, 1, numConfigs)) {
- throw new RuntimeException("unable to find RGB888+recordable EGL config");
+ if (!EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length,
+ numConfigs, 0)) {
+ throw new RuntimeException("unable to find RGB888+recordable ES2 EGL config");
}
// Configure context for OpenGL ES 2.0.
int[] attrib_list = {
EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
- EGL10.EGL_NONE
+ EGL14.EGL_NONE
};
- mEGLContext = mEGL.eglCreateContext(mEGLDisplay, configs[0], EGL10.EGL_NO_CONTEXT,
- attrib_list);
+ mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT,
+ attrib_list, 0);
checkEglError("eglCreateContext");
if (mEGLContext == null) {
throw new RuntimeException("null context");
@@ -108,10 +108,10 @@
// Create a window surface, and attach it to the Surface we received.
int[] surfaceAttribs = {
- EGL10.EGL_NONE
+ EGL14.EGL_NONE
};
- mEGLSurface = mEGL.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
- surfaceAttribs);
+ mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface,
+ surfaceAttribs, 0);
checkEglError("eglCreateWindowSurface");
if (mEGLSurface == null) {
throw new RuntimeException("surface was null");
@@ -123,14 +123,14 @@
* Surface that was passed to our constructor.
*/
public void release() {
- if (mEGL.eglGetCurrentContext() == mEGLContext) {
+ if (EGL14.eglGetCurrentContext() == mEGLContext) {
// Clear the current context and surface to ensure they are discarded immediately.
- mEGL.eglMakeCurrent(mEGLDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE,
- EGL10.EGL_NO_CONTEXT);
+ EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE,
+ EGL14.EGL_NO_CONTEXT);
}
- mEGL.eglDestroySurface(mEGLDisplay, mEGLSurface);
- mEGL.eglDestroyContext(mEGLDisplay, mEGLContext);
- //mEGL.eglTerminate(mEGLDisplay);
+ EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface);
+ EGL14.eglDestroyContext(mEGLDisplay, mEGLContext);
+ //EGL14.eglTerminate(mEGLDisplay);
mSurface.release();
@@ -138,7 +138,6 @@
mEGLDisplay = null;
mEGLContext = null;
mEGLSurface = null;
- mEGL = null;
mSurface = null;
}
@@ -147,7 +146,7 @@
* Makes our EGL context and surface current.
*/
public void makeCurrent() {
- if (!mEGL.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
+ if (!EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext)) {
throw new RuntimeException("eglMakeCurrent failed");
}
}
@@ -156,7 +155,7 @@
* Calls eglSwapBuffers. Use this to "publish" the current frame.
*/
public boolean swapBuffers() {
- return mEGL.eglSwapBuffers(mEGLDisplay, mEGLSurface);
+ return EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface);
}
/**
@@ -167,12 +166,19 @@
}
/**
+ * Sends the presentation time stamp to EGL.
+ */
+ public void setPresentationTime(long when) {
+ EGL14.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, when);
+ }
+
+ /**
* Checks for EGL errors.
*/
private void checkEglError(String msg) {
boolean failed = false;
int error;
- while ((error = mEGL.eglGetError()) != EGL10.EGL_SUCCESS) {
+ while ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
Log.e(TAG, msg + ": EGL error: 0x" + Integer.toHexString(error));
failed = true;
}