blob: c058f5ffbce4e89cf1cc617692ebf5646cacff04 [file] [log] [blame]
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.mediav2.cts;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible;
import static android.media.MediaCodecInfo.CodecCapabilities.COLOR_FormatYUVP010;
import static android.mediav2.common.cts.CodecTestBase.hasSupportForColorFormat;
import static android.mediav2.common.cts.CodecTestBase.isHardwareAcceleratedCodec;
import static android.mediav2.common.cts.CodecTestBase.isSoftwareCodec;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeFalse;
import static org.junit.Assume.assumeTrue;
import android.media.MediaCodec;
import android.media.MediaCodecList;
import android.media.MediaExtractor;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.mediav2.common.cts.CodecAsyncHandler;
import android.mediav2.common.cts.CodecTestBase;
import android.mediav2.common.cts.OutputManager;
import android.util.Log;
import android.util.Pair;
import android.view.Surface;
import androidx.test.filters.LargeTest;
import androidx.test.platform.app.InstrumentationRegistry;
import com.android.compatibility.common.util.ApiTest;
import com.android.compatibility.common.util.Preconditions;
import org.junit.After;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.stream.IntStream;
/**
* Test mediacodec api, video encoders and their interactions in surface mode.
* <p>
* The test decodes an input clip to surface. This decoded output is fed as input to encoder.
* Assuming no frame drops, the test expects,
* <ul>
* <li>The number of encoded frames to be identical to number of frames present in input clip
* .</li>
* <li>As encoders are expected to give consistent output for a given input and configuration
* parameters, the test checks for consistency across runs. For now, this attribute is not
* strictly enforced in this test.</li>
* <li>The encoder output timestamps list should be identical to decoder input timestamp list
* .</li>
* </ul>
* <p>
* The output of encoder is further verified by computing PSNR to check for obvious visual
* artifacts.
* <p>
* The test runs mediacodec in synchronous and asynchronous mode.
*/
@RunWith(Parameterized.class)
public class CodecEncoderSurfaceTest {
private static final String LOG_TAG = CodecEncoderSurfaceTest.class.getSimpleName();
private static final String MEDIA_DIR = WorkDir.getMediaDirString();
private static final boolean ENABLE_LOGS = false;
private final String mCompName;
private final String mMime;
private final String mTestFile;
private final int mBitrate;
private final int mFrameRate;
private final int mMaxBFrames;
private final boolean mTestToneMap;
private final int mColorFormat;
private int mLatency;
private boolean mReviseLatency;
private MediaFormat mEncoderFormat;
private MediaExtractor mExtractor;
private MediaCodec mEncoder;
private CodecAsyncHandler mAsyncHandleEncoder;
private String mDecoderName;
private MediaCodec mDecoder;
private MediaFormat mDecoderFormat;
private CodecAsyncHandler mAsyncHandleDecoder;
private boolean mIsCodecInAsyncMode;
private boolean mSignalEOSWithLastFrame;
private boolean mSawDecInputEOS;
private boolean mSawDecOutputEOS;
private boolean mSawEncOutputEOS;
private int mDecInputCount;
private int mDecOutputCount;
private int mEncOutputCount;
private String mTestArgs;
private StringBuilder mTestConfig = new StringBuilder();
private StringBuilder mTestEnv = new StringBuilder();
private boolean mSaveToMem;
private OutputManager mOutputBuff;
private Surface mSurface;
private MediaMuxer mMuxer;
private int mTrackID = -1;
static {
System.loadLibrary("ctsmediav2codecencsurface_jni");
android.os.Bundle args = InstrumentationRegistry.getArguments();
CodecTestBase.mimeSelKeys = args.getString(CodecTestBase.MIME_SEL_KEY);
}
public CodecEncoderSurfaceTest(String encoder, String mime, String testFile, int bitrate,
int frameRate, boolean testToneMap, int colorFormat, int maxBFrames,
String allTestParams) {
mCompName = encoder;
mMime = mime;
mTestFile = MEDIA_DIR + testFile;
mBitrate = bitrate;
mFrameRate = frameRate;
mTestToneMap = testToneMap;
mColorFormat = colorFormat;
mTestArgs = allTestParams;
mMaxBFrames = maxBFrames;
mLatency = mMaxBFrames;
mReviseLatency = false;
mAsyncHandleDecoder = new CodecAsyncHandler();
mAsyncHandleEncoder = new CodecAsyncHandler();
}
@Rule
public TestName mTestName = new TestName();
@Before
public void setUp() throws IOException {
mTestConfig.setLength(0);
mTestConfig.append("\n################## Test Details ####################\n");
mTestConfig.append("Test Name :- ").append(mTestName.getMethodName()).append("\n");
mTestConfig.append("Test Parameters :- ").append(mTestArgs).append("\n");
if (mCompName.startsWith(CodecTestBase.INVALID_CODEC)) {
fail("no valid component available for current test. \n" + mTestConfig);
}
mDecoderFormat = setUpSource(mTestFile);
if (mTestToneMap) {
mDecoderFormat.setInteger(MediaFormat.KEY_COLOR_TRANSFER_REQUEST,
MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
}
ArrayList<MediaFormat> decoderFormatList = new ArrayList<>();
decoderFormatList.add(mDecoderFormat);
String decoderMediaType = mDecoderFormat.getString(MediaFormat.KEY_MIME);
if (CodecTestBase.doesAnyFormatHaveHDRProfile(decoderMediaType, decoderFormatList) ||
mTestFile.contains("10bit")) {
// Check if encoder is capable of supporting HDR profiles.
// Previous check doesn't verify this as profile isn't set in the format
Assume.assumeTrue(mCompName + " doesn't support HDR encoding",
CodecTestBase.doesCodecSupportHDRProfile(mCompName, mMime));
}
MediaCodecList codecList = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
mDecoderName = codecList.findDecoderForFormat(mDecoderFormat);
Assume.assumeNotNull(mDecoderFormat.toString() + " not supported by any decoder.",
mDecoderName);
if (mColorFormat == COLOR_FormatSurface) {
// TODO(b/253492870) Remove the following assumption check once this is supported
Assume.assumeFalse(
mDecoderName + "is hardware accelerated and " + mCompName + "is software only.",
isHardwareAcceleratedCodec(mDecoderName) && isSoftwareCodec(mCompName));
} else {
// findDecoderForFormat() ignores color-format and decoder returned may not be
// supporting the color format set in mDecoderFormat. Following check will
// skip the test if decoder doesn't support the color format that is set.
boolean decoderSupportsColorFormat =
hasSupportForColorFormat(mDecoderName, decoderMediaType, mColorFormat);
if (mColorFormat == COLOR_FormatYUVP010) {
assumeTrue(mDecoderName + " doesn't support P010 output.",
decoderSupportsColorFormat);
} else {
assertTrue(mDecoderName + " doesn't support 420p 888 flexible output.",
decoderSupportsColorFormat);
}
}
mEncoderFormat = setUpEncoderFormat(mDecoderFormat);
}
@After
public void tearDown() {
if (mDecoder != null) {
mDecoder.release();
mDecoder = null;
}
if (mSurface != null) {
mSurface.release();
mSurface = null;
}
if (mEncoder != null) {
mEncoder.release();
mEncoder = null;
}
if (mExtractor != null) {
mExtractor.release();
mExtractor = null;
}
if (mMuxer != null) {
mMuxer.release();
mMuxer = null;
}
}
@Parameterized.Parameters(name = "{index}({0}_{1}_{5}_{7})")
public static Collection<Object[]> input() {
final boolean isEncoder = true;
final boolean needAudio = false;
final boolean needVideo = true;
final List<Object[]> exhaustiveArgsList = new ArrayList<>();
final List<Object[]> args = new ArrayList<>(Arrays.asList(new Object[][]{
{MediaFormat.MIMETYPE_VIDEO_H263, "bbb_176x144_128kbps_15fps_h263.3gp", 128000, 15,
false},
{MediaFormat.MIMETYPE_VIDEO_MPEG4, "bbb_128x96_64kbps_12fps_mpeg4.mp4", 64000, 12,
false},
{MediaFormat.MIMETYPE_VIDEO_AVC, "bbb_cif_768kbps_30fps_avc.mp4", 512000, 30,
false},
{MediaFormat.MIMETYPE_VIDEO_HEVC, "bbb_cif_768kbps_30fps_avc.mp4", 512000, 30,
false},
{MediaFormat.MIMETYPE_VIDEO_VP8, "bbb_cif_768kbps_30fps_avc.mp4", 512000, 30,
false},
{MediaFormat.MIMETYPE_VIDEO_VP9, "bbb_cif_768kbps_30fps_avc.mp4", 512000, 30,
false},
{MediaFormat.MIMETYPE_VIDEO_AV1, "bbb_cif_768kbps_30fps_avc.mp4", 512000, 30,
false},
}));
final List<Object[]> argsHighBitDepth = new ArrayList<>(Arrays.asList(new Object[][]{
{MediaFormat.MIMETYPE_VIDEO_AVC, "cosmat_520x390_24fps_crf22_avc_10bit.mkv",
512000, 30, false},
{MediaFormat.MIMETYPE_VIDEO_AVC, "cosmat_520x390_24fps_crf22_avc_10bit.mkv",
512000, 30, true},
{MediaFormat.MIMETYPE_VIDEO_HEVC, "cosmat_520x390_24fps_crf22_hevc_10bit.mkv",
512000, 30, false},
{MediaFormat.MIMETYPE_VIDEO_HEVC, "cosmat_520x390_24fps_crf22_hevc_10bit.mkv",
512000, 30, true},
{MediaFormat.MIMETYPE_VIDEO_VP9, "cosmat_520x390_24fps_crf22_vp9_10bit.mkv",
512000, 30, false},
{MediaFormat.MIMETYPE_VIDEO_VP9, "cosmat_520x390_24fps_crf22_vp9_10bit.mkv",
512000, 30, true},
{MediaFormat.MIMETYPE_VIDEO_AV1, "cosmat_520x390_24fps_768kbps_av1_10bit.mkv",
512000, 30, false},
{MediaFormat.MIMETYPE_VIDEO_AV1, "cosmat_520x390_24fps_768kbps_av1_10bit.mkv",
512000, 30, true},
}));
int[] argsColorFormats = {COLOR_FormatSurface, COLOR_FormatYUV420Flexible};
int[] maxBFrames = {0, 2};
int argLength = args.get(0).length;
for (Object[] arg : args) {
for (int colorFormat : argsColorFormats) {
for (int maxBFrame : maxBFrames) {
String mediaType = arg[0].toString();
if (!mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC)
&& !mediaType.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)
&& maxBFrame != 0) {
continue;
}
Object[] argUpdate = new Object[argLength + 2];
System.arraycopy(arg, 0, argUpdate, 0, argLength);
argUpdate[argLength] = colorFormat;
argUpdate[argLength + 1] = maxBFrame;
exhaustiveArgsList.add(argUpdate);
}
}
}
// P010 support was added in Android T, hence limit the following tests to Android T and
// above
if (CodecTestBase.IS_AT_LEAST_T) {
int[] argsHighBitDepthColorFormats = {COLOR_FormatSurface, COLOR_FormatYUVP010};
int argsHighBitDepthLength = argsHighBitDepth.get(0).length;
for (Object[] arg : argsHighBitDepth) {
for (int colorFormat : argsHighBitDepthColorFormats) {
for (int maxBFrame : maxBFrames) {
String mediaType = arg[0].toString();
if (!mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC)
&& !mediaType.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)
&& maxBFrame != 0) {
continue;
}
Object[] argUpdate = new Object[argsHighBitDepthLength + 2];
System.arraycopy(arg, 0, argUpdate, 0, argsHighBitDepthLength);
argUpdate[argsHighBitDepthLength] = colorFormat;
argUpdate[argsHighBitDepthLength + 1] = maxBFrame;
exhaustiveArgsList.add(argUpdate);
}
}
}
}
return CodecTestBase.prepareParamList(exhaustiveArgsList, isEncoder, needAudio, needVideo,
true);
}
private boolean hasSeenError() {
return mAsyncHandleDecoder.hasSeenError() || mAsyncHandleEncoder.hasSeenError();
}
private MediaFormat setUpSource(String srcFile) throws IOException {
Preconditions.assertTestFileExists(srcFile);
mExtractor = new MediaExtractor();
mExtractor.setDataSource(srcFile);
for (int trackID = 0; trackID < mExtractor.getTrackCount(); trackID++) {
MediaFormat format = mExtractor.getTrackFormat(trackID);
String mime = format.getString(MediaFormat.KEY_MIME);
if (mime.startsWith("video/")) {
mExtractor.selectTrack(trackID);
ArrayList<MediaFormat> formatList = new ArrayList<>();
formatList.add(format);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mColorFormat);
return format;
}
}
mExtractor.release();
fail("No video track found in file: " + srcFile + ". \n" + mTestConfig + mTestEnv);
return null;
}
private void resetContext(boolean isAsync, boolean signalEOSWithLastFrame) {
mAsyncHandleDecoder.resetContext();
mAsyncHandleEncoder.resetContext();
mIsCodecInAsyncMode = isAsync;
mSignalEOSWithLastFrame = signalEOSWithLastFrame;
mSawDecInputEOS = false;
mSawDecOutputEOS = false;
mSawEncOutputEOS = false;
mDecInputCount = 0;
mDecOutputCount = 0;
mEncOutputCount = 0;
}
private void configureCodec(MediaFormat decFormat, MediaFormat encFormat, boolean isAsync,
boolean signalEOSWithLastFrame) {
resetContext(isAsync, signalEOSWithLastFrame);
mAsyncHandleEncoder.setCallBack(mEncoder, isAsync);
mEncoder.configure(encFormat, null, MediaCodec.CONFIGURE_FLAG_ENCODE, null);
if (mEncoder.getInputFormat().containsKey(MediaFormat.KEY_LATENCY)) {
mReviseLatency = true;
mLatency = mEncoder.getInputFormat().getInteger(MediaFormat.KEY_LATENCY);
}
mSurface = mEncoder.createInputSurface();
assertTrue("Surface is not valid", mSurface.isValid());
mAsyncHandleDecoder.setCallBack(mDecoder, isAsync);
mDecoder.configure(decFormat, mSurface, null, 0);
mTestEnv.setLength(0);
mTestEnv.append("################### Test Environment #####################\n");
mTestEnv.append(String.format("Encoder under test :- %s \n", mCompName));
mTestEnv.append(String.format("Format under test :- %s \n", encFormat));
mTestEnv.append(String.format("Encoder is fed with output of :- %s \n", mDecoderName));
mTestEnv.append(String.format("Format of Decoder Input :- %s", decFormat));
mTestEnv.append(String.format("Encoder and Decoder are operating in :- %s mode \n",
(isAsync ? "asynchronous" : "synchronous")));
mTestEnv.append(String.format("Components received input eos :- %s \n",
(signalEOSWithLastFrame ? "with full buffer" : "with empty buffer")));
if (ENABLE_LOGS) {
Log.v(LOG_TAG, "codec configured");
}
}
private void enqueueDecoderEOS(int bufferIndex) {
if (!mSawDecInputEOS) {
mDecoder.queueInputBuffer(bufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
mSawDecInputEOS = true;
if (ENABLE_LOGS) {
Log.v(LOG_TAG, "Queued End of Stream");
}
}
}
private void enqueueDecoderInput(int bufferIndex) {
if (mExtractor.getSampleSize() < 0) {
enqueueDecoderEOS(bufferIndex);
} else {
ByteBuffer inputBuffer = mDecoder.getInputBuffer(bufferIndex);
mExtractor.readSampleData(inputBuffer, 0);
int size = (int) mExtractor.getSampleSize();
long pts = mExtractor.getSampleTime();
int extractorFlags = mExtractor.getSampleFlags();
int codecFlags = 0;
if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_SYNC) != 0) {
codecFlags |= MediaCodec.BUFFER_FLAG_KEY_FRAME;
}
if ((extractorFlags & MediaExtractor.SAMPLE_FLAG_PARTIAL_FRAME) != 0) {
codecFlags |= MediaCodec.BUFFER_FLAG_PARTIAL_FRAME;
}
if (!mExtractor.advance() && mSignalEOSWithLastFrame) {
codecFlags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
mSawDecInputEOS = true;
}
if (ENABLE_LOGS) {
Log.v(LOG_TAG, "input: id: " + bufferIndex + " size: " + size + " pts: " + pts +
" flags: " + codecFlags);
}
mDecoder.queueInputBuffer(bufferIndex, 0, size, pts, codecFlags);
if (size > 0 && (codecFlags & (MediaCodec.BUFFER_FLAG_CODEC_CONFIG |
MediaCodec.BUFFER_FLAG_PARTIAL_FRAME)) == 0) {
mOutputBuff.saveInPTS(pts);
mDecInputCount++;
}
}
}
private void dequeueDecoderOutput(int bufferIndex, MediaCodec.BufferInfo info) {
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mSawDecOutputEOS = true;
}
if (ENABLE_LOGS) {
Log.v(LOG_TAG, "output: id: " + bufferIndex + " flags: " + info.flags + " size: " +
info.size + " timestamp: " + info.presentationTimeUs);
}
if (info.size > 0 && (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
mDecOutputCount++;
}
mDecoder.releaseOutputBuffer(bufferIndex, mSurface != null);
}
private void dequeueEncoderOutput(int bufferIndex, MediaCodec.BufferInfo info) {
if (ENABLE_LOGS) {
Log.v(LOG_TAG, "encoder output: id: " + bufferIndex + " flags: " + info.flags +
" size: " + info.size + " timestamp: " + info.presentationTimeUs);
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mSawEncOutputEOS = true;
}
if (info.size > 0) {
ByteBuffer buf = mEncoder.getOutputBuffer(bufferIndex);
if (mSaveToMem) {
mOutputBuff.saveToMemory(buf, info);
}
if (mMuxer != null) {
if (mTrackID == -1) {
mTrackID = mMuxer.addTrack(mEncoder.getOutputFormat());
mMuxer.start();
}
mMuxer.writeSampleData(mTrackID, buf, info);
}
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
mOutputBuff.saveOutPTS(info.presentationTimeUs);
mEncOutputCount++;
}
}
mEncoder.releaseOutputBuffer(bufferIndex, false);
}
private void tryEncoderOutput(long timeOutUs) throws InterruptedException {
if (mIsCodecInAsyncMode) {
if (!hasSeenError() && !mSawEncOutputEOS) {
int retry = 0;
while (mReviseLatency) {
if (mAsyncHandleEncoder.hasOutputFormatChanged()) {
mReviseLatency = false;
int actualLatency = mAsyncHandleEncoder.getOutputFormat()
.getInteger(MediaFormat.KEY_LATENCY, mLatency);
if (mLatency < actualLatency) {
mLatency = actualLatency;
return;
}
} else {
if (retry > CodecTestBase.RETRY_LIMIT) throw new InterruptedException(
"did not receive output format changed for encoder after " +
CodecTestBase.Q_DEQ_TIMEOUT_US * CodecTestBase.RETRY_LIMIT +
" us");
Thread.sleep(CodecTestBase.Q_DEQ_TIMEOUT_US / 1000);
retry++;
}
}
Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleEncoder.getOutput();
if (element != null) {
dequeueEncoderOutput(element.first, element.second);
}
}
} else {
MediaCodec.BufferInfo outInfo = new MediaCodec.BufferInfo();
if (!mSawEncOutputEOS) {
int outputBufferId = mEncoder.dequeueOutputBuffer(outInfo, timeOutUs);
if (outputBufferId >= 0) {
dequeueEncoderOutput(outputBufferId, outInfo);
} else if (outputBufferId == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
mLatency = mEncoder.getOutputFormat()
.getInteger(MediaFormat.KEY_LATENCY, mLatency);
}
}
}
}
private void waitForAllEncoderOutputs() throws InterruptedException {
if (mIsCodecInAsyncMode) {
while (!hasSeenError() && !mSawEncOutputEOS) {
tryEncoderOutput(CodecTestBase.Q_DEQ_TIMEOUT_US);
}
} else {
while (!mSawEncOutputEOS) {
tryEncoderOutput(CodecTestBase.Q_DEQ_TIMEOUT_US);
}
}
}
private void queueEOS() throws InterruptedException {
if (mIsCodecInAsyncMode) {
while (!mAsyncHandleDecoder.hasSeenError() && !mSawDecInputEOS) {
Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleDecoder.getWork();
if (element != null) {
int bufferID = element.first;
MediaCodec.BufferInfo info = element.second;
if (info != null) {
dequeueDecoderOutput(bufferID, info);
} else {
enqueueDecoderEOS(element.first);
}
}
}
} else {
MediaCodec.BufferInfo outInfo = new MediaCodec.BufferInfo();
while (!mSawDecInputEOS) {
int outputBufferId =
mDecoder.dequeueOutputBuffer(outInfo, CodecTestBase.Q_DEQ_TIMEOUT_US);
if (outputBufferId >= 0) {
dequeueDecoderOutput(outputBufferId, outInfo);
}
int inputBufferId = mDecoder.dequeueInputBuffer(CodecTestBase.Q_DEQ_TIMEOUT_US);
if (inputBufferId != -1) {
enqueueDecoderEOS(inputBufferId);
}
}
}
if (mIsCodecInAsyncMode) {
while (!hasSeenError() && !mSawDecOutputEOS) {
Pair<Integer, MediaCodec.BufferInfo> decOp = mAsyncHandleDecoder.getOutput();
if (decOp != null) dequeueDecoderOutput(decOp.first, decOp.second);
if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream();
if (mDecOutputCount - mEncOutputCount > mLatency) {
tryEncoderOutput(-1);
}
}
} else {
MediaCodec.BufferInfo outInfo = new MediaCodec.BufferInfo();
while (!mSawDecOutputEOS) {
int outputBufferId =
mDecoder.dequeueOutputBuffer(outInfo, CodecTestBase.Q_DEQ_TIMEOUT_US);
if (outputBufferId >= 0) {
dequeueDecoderOutput(outputBufferId, outInfo);
}
if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream();
if (mDecOutputCount - mEncOutputCount > mLatency) {
tryEncoderOutput(-1);
}
}
}
}
private void doWork(int frameLimit) throws InterruptedException {
int frameCnt = 0;
if (mIsCodecInAsyncMode) {
// dequeue output after inputEOS is expected to be done in waitForAllOutputs()
while (!hasSeenError() && !mSawDecInputEOS && frameCnt < frameLimit) {
Pair<Integer, MediaCodec.BufferInfo> element = mAsyncHandleDecoder.getWork();
if (element != null) {
int bufferID = element.first;
MediaCodec.BufferInfo info = element.second;
if (info != null) {
// <id, info> corresponds to output callback. Handle it accordingly
dequeueDecoderOutput(bufferID, info);
} else {
// <id, null> corresponds to input callback. Handle it accordingly
enqueueDecoderInput(bufferID);
frameCnt++;
}
}
// check decoder EOS
if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream();
// encoder output
if (mDecOutputCount - mEncOutputCount > mLatency) {
tryEncoderOutput(-1);
}
}
} else {
MediaCodec.BufferInfo outInfo = new MediaCodec.BufferInfo();
while (!mSawDecInputEOS && frameCnt < frameLimit) {
// decoder input
int inputBufferId = mDecoder.dequeueInputBuffer(CodecTestBase.Q_DEQ_TIMEOUT_US);
if (inputBufferId != -1) {
enqueueDecoderInput(inputBufferId);
frameCnt++;
}
// decoder output
int outputBufferId =
mDecoder.dequeueOutputBuffer(outInfo, CodecTestBase.Q_DEQ_TIMEOUT_US);
if (outputBufferId >= 0) {
dequeueDecoderOutput(outputBufferId, outInfo);
}
// check decoder EOS
if (mSawDecOutputEOS) mEncoder.signalEndOfInputStream();
// encoder output
if (mDecOutputCount - mEncOutputCount > mLatency) {
tryEncoderOutput(-1);
}
}
}
}
private MediaFormat setUpEncoderFormat(MediaFormat decoderFormat) {
MediaFormat encoderFormat = new MediaFormat();
encoderFormat.setString(MediaFormat.KEY_MIME, mMime);
encoderFormat.setInteger(MediaFormat.KEY_WIDTH,
decoderFormat.getInteger(MediaFormat.KEY_WIDTH));
encoderFormat.setInteger(MediaFormat.KEY_HEIGHT,
decoderFormat.getInteger(MediaFormat.KEY_HEIGHT));
encoderFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mFrameRate);
encoderFormat.setInteger(MediaFormat.KEY_BIT_RATE, mBitrate);
encoderFormat.setFloat(MediaFormat.KEY_I_FRAME_INTERVAL, 1.0f);
encoderFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, COLOR_FormatSurface);
encoderFormat.setInteger(MediaFormat.KEY_MAX_B_FRAMES, mMaxBFrames);
return encoderFormat;
}
private void validateToneMappedFormat(MediaFormat format, String descriptor) {
assertEquals("unexpected color transfer in " + descriptor + " after tone mapping",
MediaFormat.COLOR_TRANSFER_SDR_VIDEO,
format.getInteger(MediaFormat.KEY_COLOR_TRANSFER, 0));
assertNotEquals("unexpected color standard in " + descriptor + " after tone mapping",
MediaFormat.COLOR_STANDARD_BT2020,
format.getInteger(MediaFormat.KEY_COLOR_STANDARD, 0));
int profile = format.getInteger(MediaFormat.KEY_PROFILE, -1);
int[] profileArray = CodecTestBase.PROFILE_HDR_MAP.get(mMime);
assertFalse(descriptor + " must not contain HDR profile after tone mapping",
IntStream.of(profileArray).anyMatch(x -> x == profile));
}
/**
* Checks if the component under test can encode from surface properly. The test runs
* mediacodec in both synchronous and asynchronous mode. The test feeds the encoder input
* surface with output of decoder. Assuming no frame drops, the number of output frames from
* encoder should be identical to number of input frames to decoder. Also the timestamps
* should be identical. As encoder output is deterministic, the test expects consistent
* output in all runs. The output is written to a file using muxer. This file is validated
* for PSNR to check if the encoding happened successfully with out any obvious artifacts.
*/
@ApiTest(apis = {"MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface"})
@LargeTest
@Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_LARGE_TEST_MS)
public void testSimpleEncodeFromSurface() throws IOException, InterruptedException {
mDecoder = MediaCodec.createByCodecName(mDecoderName);
String tmpPath = null;
boolean muxOutput = true;
{
mEncoder = MediaCodec.createByCodecName(mCompName);
/* TODO(b/149027258) */
mSaveToMem = false;
OutputManager ref = new OutputManager();
OutputManager test = new OutputManager(ref.getSharedErrorLogs());
int loopCounter = 0;
boolean[] boolStates = {true, false};
for (boolean isAsync : boolStates) {
mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
mOutputBuff = loopCounter == 0 ? ref : test;
mOutputBuff.reset();
if (muxOutput && loopCounter == 0) {
int muxerFormat;
if (mMime.equals(MediaFormat.MIMETYPE_VIDEO_VP8) ||
mMime.equals(MediaFormat.MIMETYPE_VIDEO_VP9)) {
muxerFormat = MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM;
tmpPath = File.createTempFile("tmp", ".webm").getAbsolutePath();
} else {
muxerFormat = MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4;
tmpPath = File.createTempFile("tmp", ".mp4").getAbsolutePath();
}
mMuxer = new MediaMuxer(tmpPath, muxerFormat);
}
configureCodec(mDecoderFormat, mEncoderFormat, isAsync, false);
if (mTestToneMap) {
int transferRequest = mDecoder.getInputFormat().getInteger(
MediaFormat.KEY_COLOR_TRANSFER_REQUEST, 0);
assumeTrue(mDecoderName + " does not support HDR to SDR tone mapping",
0 != transferRequest);
}
mEncoder.start();
mDecoder.start();
doWork(Integer.MAX_VALUE);
queueEOS();
waitForAllEncoderOutputs();
MediaFormat encoderOutputFormat = mEncoder.getOutputFormat();
MediaFormat decoderOutputFormat = mDecoder.getOutputFormat();
if (muxOutput) {
if (mTrackID != -1) {
mMuxer.stop();
mTrackID = -1;
}
if (mMuxer != null) {
mMuxer.release();
mMuxer = null;
}
}
mDecoder.stop();
/* TODO(b/147348711) */
if (false) mEncoder.stop();
else mEncoder.reset();
assertFalse("Decoder has encountered error in async mode. \n"
+ mTestConfig + mTestEnv + mAsyncHandleDecoder.getErrMsg(),
mAsyncHandleDecoder.hasSeenError());
assertFalse("Encoder has encountered error in async mode. \n"
+ mTestConfig + mTestEnv + mAsyncHandleEncoder.getErrMsg(),
mAsyncHandleEncoder.hasSeenError());
assertTrue("Decoder has not received any input \n" + mTestConfig + mTestEnv,
0 != mDecInputCount);
assertTrue("Decoder has not sent any output \n" + mTestConfig + mTestEnv,
0 != mDecOutputCount);
assertTrue("Encoder has not sent any output \n" + mTestConfig + mTestEnv,
0 != mEncOutputCount);
assertEquals("Decoder output count is not equal to decoder input count \n"
+ mTestConfig + mTestEnv, mDecInputCount, mDecOutputCount);
/* TODO(b/153127506)
* Currently disabling all encoder output checks. Added checks only for encoder
* timeStamp is in increasing order or not.
* Once issue is fixed remove increasing timestamp check and enable encoder checks.
*/
/*assertEquals("Encoder output count is not equal to Decoder input count \n"
+ mTestConfig + mTestEnv, mDecInputCount, mEncOutputCount);
if (loopCounter != 0 && !ref.equals(test)) {
fail("Encoder output is not consistent across runs \n" + mTestConfig + mTestEnv
+ test.getErrMsg());
}
if (loopCounter == 0 &&
!ref.isOutPtsListIdenticalToInpPtsList((mMaxBFrames != 0))) {
fail("Input pts list and Output pts list are not identical \n" + mTestConfig
+ mTestEnv + ref.getErrMsg());
}*/
if (mMaxBFrames == 0 && !mOutputBuff.isPtsStrictlyIncreasing(Long.MIN_VALUE)) {
fail("Output timestamps are not strictly increasing \n" + mTestConfig + mTestEnv
+ mOutputBuff.getErrMsg());
}
if (mTestToneMap) {
validateToneMappedFormat(decoderOutputFormat, "decoder output format");
validateToneMappedFormat(encoderOutputFormat, "encoder output format");
if (tmpPath != null) {
MediaExtractor extractor = new MediaExtractor();
extractor.setDataSource(tmpPath);
MediaFormat extractorFormat = extractor.getTrackFormat(0);
extractor.release();
validateToneMappedFormat(extractorFormat, "extractor format");
}
}
loopCounter++;
mSurface.release();
mSurface = null;
}
mEncoder.release();
}
mDecoder.release();
mExtractor.release();
if (muxOutput) new File(tmpPath).delete();
}
private native boolean nativeTestSimpleEncode(String encoder, String decoder, String mime,
String testFile, String muxFile, int bitrate, int framerate, int colorFormat,
StringBuilder retMsg);
/**
* Test is similar to {@link #testSimpleEncodeFromSurface()} but uses ndk api
*/
@ApiTest(apis = {"MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface"})
@LargeTest
@Test(timeout = CodecTestBase.PER_TEST_TIMEOUT_LARGE_TEST_MS)
public void testSimpleEncodeFromSurfaceNative() throws IOException {
assumeFalse("tone mapping tests are skipped in native mode", mTestToneMap);
{
String tmpPath;
if (mMime.equals(MediaFormat.MIMETYPE_VIDEO_VP8) ||
mMime.equals(MediaFormat.MIMETYPE_VIDEO_VP9)) {
tmpPath = File.createTempFile("tmp", ".webm").getAbsolutePath();
} else {
tmpPath = File.createTempFile("tmp", ".mp4").getAbsolutePath();
}
int colorFormat = mDecoderFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT, -1);
boolean isPass = nativeTestSimpleEncode(mCompName, mDecoderName, mMime, mTestFile,
tmpPath, mBitrate, mFrameRate, colorFormat, mTestConfig);
assertTrue(mTestConfig.toString(), isPass);
}
}
}