blob: 02754aba9a6c2c6b56098184c7907e670c54eed5 [file] [log] [blame]
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.media.codec.cts;
import android.content.Context;
import android.content.res.Resources;
import android.media.MediaCodec;
import android.media.MediaCodec.CodecException;
import android.media.MediaCodecInfo.CodecCapabilities;
import android.media.MediaCodecList;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.cts.MediaCodecWrapper;
import android.media.cts.NdkMediaCodec;
import android.media.cts.Preconditions;
import android.media.cts.SdkMediaCodec;
import android.os.Bundle;
import android.os.Environment;
import android.os.Looper;
import android.os.Handler;
import android.platform.test.annotations.AppModeFull;
import android.test.AndroidTestCase;
import android.util.Log;
import com.android.compatibility.common.util.MediaUtils;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Locale;
import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch;
/**
* Verification test for video encoder and decoder.
*
* A raw yv12 stream is encoded at various settings and written to an IVF
* file. Encoded stream bitrate and key frame interval are checked against target values.
* The stream is later decoded by the decoder to verify frames are decodable and to
* calculate PSNR values for various bitrates.
*/
@AppModeFull(reason = "Instant apps cannot access the SD card")
public class VideoCodecTestBase extends AndroidTestCase {
protected static final String TAG = "VideoCodecTestBase";
protected static final String VP8_MIME = MediaFormat.MIMETYPE_VIDEO_VP8;
protected static final String VP9_MIME = MediaFormat.MIMETYPE_VIDEO_VP9;
protected static final String AVC_MIME = MediaFormat.MIMETYPE_VIDEO_AVC;
protected static final String HEVC_MIME = MediaFormat.MIMETYPE_VIDEO_HEVC;
protected static final String SDCARD_DIR =
Environment.getExternalStorageDirectory().getAbsolutePath();
static final String mInpPrefix = WorkDir.getMediaDirString();
// Default timeout for MediaCodec buffer dequeue - 200 ms.
protected static final long DEFAULT_DEQUEUE_TIMEOUT_US = 200000;
// Default timeout for MediaEncoderAsync - 30 sec.
protected static final long DEFAULT_ENCODE_TIMEOUT_MS = 30000;
// Default sync frame interval in frames
private static final int SYNC_FRAME_INTERVAL = 30;
// Video bitrate type - should be set to OMX_Video_ControlRateConstant from OMX_Video.h
protected static final int VIDEO_ControlRateVariable = 1;
protected static final int VIDEO_ControlRateConstant = 2;
// NV12 color format supported by QCOM codec, but not declared in MediaCodec -
// see /hardware/qcom/media/mm-core/inc/OMX_QCOMExtns.h
private static final int COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m = 0x7FA30C04;
// Allowable color formats supported by codec - in order of preference.
private static final int[] mSupportedColorList = {
CodecCapabilities.COLOR_FormatYUV420Planar,
CodecCapabilities.COLOR_FormatYUV420SemiPlanar,
CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar,
COLOR_QCOM_FORMATYUV420PackedSemiPlanar32m
};
// Scaled image cache list - contains scale factors, for which up-scaled frames
// were calculated and were written to yuv file.
ArrayList<Integer> mScaledImages = new ArrayList<Integer>();
private Resources mResources;
@Override
public void setContext(Context context) {
super.setContext(context);
mResources = mContext.getResources();
}
/**
* Video codec properties generated by getVideoCodecProperties() function.
*/
private class CodecProperties {
CodecProperties(String codecName, int colorFormat) {
this.codecName = codecName;
this.colorFormat = colorFormat;
}
public final String codecName; // OpenMax component name for Video codec.
public final int colorFormat; // Color format supported by codec.
}
/**
* Function to find Video codec.
*
* Iterates through the list of available codecs and tries to find
* Video codec, which can support either YUV420 planar or NV12 color formats.
* If forceGoogleCodec parameter set to true the function always returns
* Google Video codec.
* If forceGoogleCodec parameter set to false the functions looks for platform
* specific Video codec first. If no platform specific codec exist, falls back to
* Google Video codec.
*
* @param isEncoder Flag if encoder is requested.
* @param forceGoogleCodec Forces to use Google codec.
*/
private CodecProperties getVideoCodecProperties(
boolean isEncoder,
MediaFormat format,
boolean forceGoogleCodec) throws Exception {
CodecProperties codecProperties = null;
String mime = format.getString(MediaFormat.KEY_MIME);
// Loop through the list of codec components in case platform specific codec
// is requested.
MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
for (MediaCodecInfo codecInfo : mcl.getCodecInfos()) {
if (isEncoder != codecInfo.isEncoder()) {
continue;
}
Log.v(TAG, codecInfo.getName());
// TODO: remove dependence of Google from the test
// Check if this is Google codec - we should ignore it.
if (codecInfo.isVendor() && forceGoogleCodec) {
continue;
}
for (String type : codecInfo.getSupportedTypes()) {
if (!type.equalsIgnoreCase(mime)) {
continue;
}
CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(type);
if (!capabilities.isFormatSupported(format)) {
continue;
}
// Get candidate codec properties.
Log.v(TAG, "Found candidate codec " + codecInfo.getName());
for (int colorFormat: capabilities.colorFormats) {
Log.v(TAG, " Color: 0x" + Integer.toHexString(colorFormat));
}
// Check supported color formats.
for (int supportedColorFormat : mSupportedColorList) {
for (int codecColorFormat : capabilities.colorFormats) {
if (codecColorFormat == supportedColorFormat) {
codecProperties = new CodecProperties(codecInfo.getName(),
codecColorFormat);
Log.v(TAG, "Found target codec " + codecProperties.codecName +
". Color: 0x" + Integer.toHexString(codecColorFormat));
// return first vendor codec (hopefully HW) found
if (codecInfo.isVendor()) {
return codecProperties;
}
}
}
}
}
}
if (codecProperties == null) {
Log.i(TAG, "no suitable " + (forceGoogleCodec ? "google " : "")
+ (isEncoder ? "encoder " : "decoder ") + "found for " + format);
}
return codecProperties;
}
/**
* Parameters for encoded video stream.
*/
protected class EncoderOutputStreamParameters {
// Name of raw YUV420 input file. When the value of this parameter
// is set to null input file descriptor from inputResource parameter
// is used instead.
public String inputYuvFilename;
// Name of scaled YUV420 input file.
public String scaledYuvFilename;
// File descriptor for the raw input file (YUV420). Used only if
// inputYuvFilename parameter is null.
public String inputResource;
// Name of the IVF file to write encoded bitsream
public String outputIvfFilename;
// Mime Type of the Encoded content.
public String codecMimeType;
// Force to use Google Video encoder.
boolean forceGoogleEncoder;
// Number of frames to encode.
int frameCount;
// Frame rate of input file in frames per second.
int frameRate;
// Encoded frame width.
public int frameWidth;
// Encoded frame height.
public int frameHeight;
// Encoding bitrate array in bits/second for every frame. If array length
// is shorter than the total number of frames, the last value is re-used for
// all remaining frames. For constant bitrate encoding single element
// array can be used with first element set to target bitrate value.
public int[] bitrateSet;
// Encoding bitrate type - VBR or CBR
public int bitrateType;
// Number of temporal layers
public int temporalLayers;
// Desired key frame interval - codec is asked to generate key frames
// at a period defined by this parameter.
public int syncFrameInterval;
// Optional parameter - forced key frame interval. Used to
// explicitly request the codec to generate key frames using
// MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME parameter.
public int syncForceFrameInterval;
// Buffer timeout
long timeoutDequeue;
// Flag if encoder should run in Looper thread.
boolean runInLooperThread;
// Flag if use NdkMediaCodec
boolean useNdk;
}
private String getCodecSuffix(String codecMimeType) {
switch(codecMimeType) {
case VP8_MIME:
return "vp8";
case VP9_MIME:
return "vp9";
case AVC_MIME:
return "avc";
case HEVC_MIME:
return "hevc";
default:
Log.w(TAG, "getCodecSuffix got an unexpected codecMimeType.");
}
return "video";
}
/**
* Generates an array of default parameters for encoder output stream based on
* upscaling value.
*/
protected ArrayList<EncoderOutputStreamParameters> getDefaultEncodingParameterList(
String inputYuvName,
String outputIvfBaseName,
String codecMimeType,
int encodeSeconds,
int[] resolutionScales,
int frameWidth,
int frameHeight,
int frameRate,
int bitrateMode,
int[] bitrates,
boolean syncEncoding) {
assertTrue(resolutionScales.length == bitrates.length);
int numCodecs = resolutionScales.length;
ArrayList<EncoderOutputStreamParameters> outputParameters =
new ArrayList<EncoderOutputStreamParameters>(numCodecs);
for (int i = 0; i < numCodecs; i++) {
EncoderOutputStreamParameters params = new EncoderOutputStreamParameters();
if (inputYuvName != null) {
params.inputYuvFilename = SDCARD_DIR + File.separator + inputYuvName;
} else {
params.inputYuvFilename = null;
}
params.scaledYuvFilename = SDCARD_DIR + File.separator +
outputIvfBaseName + resolutionScales[i]+ ".yuv";
params.inputResource = "football_qvga.yuv";
params.codecMimeType = codecMimeType;
String codecSuffix = getCodecSuffix(codecMimeType);
params.outputIvfFilename = SDCARD_DIR + File.separator +
outputIvfBaseName + resolutionScales[i] + "_" + codecSuffix + ".ivf";
params.forceGoogleEncoder = false;
params.frameCount = encodeSeconds * frameRate;
params.frameRate = frameRate;
params.frameWidth = Math.min(frameWidth * resolutionScales[i], 1280);
params.frameHeight = Math.min(frameHeight * resolutionScales[i], 720);
params.bitrateSet = new int[1];
params.bitrateSet[0] = bitrates[i];
params.bitrateType = bitrateMode;
params.temporalLayers = 0;
params.syncFrameInterval = SYNC_FRAME_INTERVAL;
params.syncForceFrameInterval = 0;
if (syncEncoding) {
params.timeoutDequeue = DEFAULT_DEQUEUE_TIMEOUT_US;
params.runInLooperThread = false;
} else {
params.timeoutDequeue = 0;
params.runInLooperThread = true;
}
outputParameters.add(params);
}
return outputParameters;
}
protected EncoderOutputStreamParameters getDefaultEncodingParameters(
String inputYuvName,
String outputIvfBaseName,
String codecMimeType,
int encodeSeconds,
int frameWidth,
int frameHeight,
int frameRate,
int bitrateMode,
int bitrate,
boolean syncEncoding) {
int[] scaleValues = { 1 };
int[] bitrates = { bitrate };
return getDefaultEncodingParameterList(
inputYuvName,
outputIvfBaseName,
codecMimeType,
encodeSeconds,
scaleValues,
frameWidth,
frameHeight,
frameRate,
bitrateMode,
bitrates,
syncEncoding).get(0);
}
/**
* Converts (interleaves) YUV420 planar to NV12.
* Assumes packed, macroblock-aligned frame with no cropping
* (visible/coded row length == stride).
*/
private static byte[] YUV420ToNV(int width, int height, byte[] yuv) {
byte[] nv = new byte[yuv.length];
// Y plane we just copy.
System.arraycopy(yuv, 0, nv, 0, width * height);
// U & V plane we interleave.
int u_offset = width * height;
int v_offset = u_offset + u_offset / 4;
int nv_offset = width * height;
for (int i = 0; i < width * height / 4; i++) {
nv[nv_offset++] = yuv[u_offset++];
nv[nv_offset++] = yuv[v_offset++];
}
return nv;
}
/**
* Converts (de-interleaves) NV12 to YUV420 planar.
* Stride may be greater than width, slice height may be greater than height.
*/
private static byte[] NV12ToYUV420(int width, int height,
int stride, int sliceHeight, byte[] nv12) {
byte[] yuv = new byte[width * height * 3 / 2];
// Y plane we just copy.
for (int i = 0; i < height; i++) {
System.arraycopy(nv12, i * stride, yuv, i * width, width);
}
// U & V plane - de-interleave.
int u_offset = width * height;
int v_offset = u_offset + u_offset / 4;
int nv_offset;
for (int i = 0; i < height / 2; i++) {
nv_offset = stride * (sliceHeight + i);
for (int j = 0; j < width / 2; j++) {
yuv[u_offset++] = nv12[nv_offset++];
yuv[v_offset++] = nv12[nv_offset++];
}
}
return yuv;
}
/**
* Packs YUV420 frame by moving it to a smaller size buffer with stride and slice
* height equal to the crop window.
*/
private static byte[] PackYUV420(int left, int top, int width, int height,
int stride, int sliceHeight, byte[] src) {
byte[] dst = new byte[width * height * 3 / 2];
// Y copy.
for (int i = 0; i < height; i++) {
System.arraycopy(src, (i + top) * stride + left, dst, i * width, width);
}
// U and V copy.
int u_src_offset = stride * sliceHeight;
int v_src_offset = u_src_offset + u_src_offset / 4;
int u_dst_offset = width * height;
int v_dst_offset = u_dst_offset + u_dst_offset / 4;
// Downsample and align to floor-2 for crop origin.
left /= 2;
top /= 2;
for (int i = 0; i < height / 2; i++) {
System.arraycopy(src, u_src_offset + (i + top) * (stride / 2) + left,
dst, u_dst_offset + i * (width / 2), width / 2);
System.arraycopy(src, v_src_offset + (i + top) * (stride / 2) + left,
dst, v_dst_offset + i * (width / 2), width / 2);
}
return dst;
}
private static void imageUpscale1To2(byte[] src, int srcByteOffset, int srcStride,
byte[] dst, int dstByteOffset, int dstWidth, int dstHeight) {
for (int i = 0; i < dstHeight/2 - 1; i++) {
int dstOffset0 = 2 * i * dstWidth + dstByteOffset;
int dstOffset1 = dstOffset0 + dstWidth;
int srcOffset0 = i * srcStride + srcByteOffset;
int srcOffset1 = srcOffset0 + srcStride;
int pixel00 = (int)src[srcOffset0++] & 0xff;
int pixel10 = (int)src[srcOffset1++] & 0xff;
for (int j = 0; j < dstWidth/2 - 1; j++) {
int pixel01 = (int)src[srcOffset0++] & 0xff;
int pixel11 = (int)src[srcOffset1++] & 0xff;
dst[dstOffset0++] = (byte)pixel00;
dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2);
dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
dst[dstOffset1++] = (byte)((pixel00 + pixel01 + pixel10 + pixel11 + 2) / 4);
pixel00 = pixel01;
pixel10 = pixel11;
}
// last column
dst[dstOffset0++] = (byte)pixel00;
dst[dstOffset0++] = (byte)pixel00;
dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
dst[dstOffset1++] = (byte)((pixel00 + pixel10 + 1) / 2);
}
// last row
int dstOffset0 = (dstHeight - 2) * dstWidth + dstByteOffset;
int dstOffset1 = dstOffset0 + dstWidth;
int srcOffset0 = (dstHeight/2 - 1) * srcStride + srcByteOffset;
int pixel00 = (int)src[srcOffset0++] & 0xff;
for (int j = 0; j < dstWidth/2 - 1; j++) {
int pixel01 = (int)src[srcOffset0++] & 0xff;
dst[dstOffset0++] = (byte)pixel00;
dst[dstOffset0++] = (byte)((pixel00 + pixel01 + 1) / 2);
dst[dstOffset1++] = (byte)pixel00;
dst[dstOffset1++] = (byte)((pixel00 + pixel01 + 1) / 2);
pixel00 = pixel01;
}
// the very last pixel - bottom right
dst[dstOffset0++] = (byte)pixel00;
dst[dstOffset0++] = (byte)pixel00;
dst[dstOffset1++] = (byte)pixel00;
dst[dstOffset1++] = (byte)pixel00;
}
/**
* Up-scale image.
* Scale factor is defined by source and destination width ratio.
* Only 1:2 and 1:4 up-scaling is supported for now.
* For 640x480 -> 1280x720 conversion only top 640x360 part of the original
* image is scaled.
*/
private static byte[] imageScale(byte[] src, int srcWidth, int srcHeight,
int dstWidth, int dstHeight) throws Exception {
int srcYSize = srcWidth * srcHeight;
int dstYSize = dstWidth * dstHeight;
byte[] dst = null;
if (dstWidth == 2 * srcWidth && dstHeight <= 2 * srcHeight) {
// 1:2 upscale
dst = new byte[dstWidth * dstHeight * 3 / 2];
imageUpscale1To2(src, 0, srcWidth,
dst, 0, dstWidth, dstHeight); // Y
imageUpscale1To2(src, srcYSize, srcWidth / 2,
dst, dstYSize, dstWidth / 2, dstHeight / 2); // U
imageUpscale1To2(src, srcYSize * 5 / 4, srcWidth / 2,
dst, dstYSize * 5 / 4, dstWidth / 2, dstHeight / 2); // V
} else if (dstWidth == 4 * srcWidth && dstHeight <= 4 * srcHeight) {
// 1:4 upscale - in two steps
int midWidth = 2 * srcWidth;
int midHeight = 2 * srcHeight;
byte[] midBuffer = imageScale(src, srcWidth, srcHeight, midWidth, midHeight);
dst = imageScale(midBuffer, midWidth, midHeight, dstWidth, dstHeight);
} else {
throw new RuntimeException("Can not find proper scaling function");
}
return dst;
}
private void cacheScaledImage(
String srcYuvFilename, String srcResource, int srcFrameWidth, int srcFrameHeight,
String dstYuvFilename, int dstFrameWidth, int dstFrameHeight) throws Exception {
InputStream srcStream = OpenFileOrResource(srcYuvFilename, srcResource);
FileOutputStream dstFile = new FileOutputStream(dstYuvFilename, false);
int srcFrameSize = srcFrameWidth * srcFrameHeight * 3 / 2;
byte[] srcFrame = new byte[srcFrameSize];
byte[] dstFrame = null;
Log.d(TAG, "Scale to " + dstFrameWidth + " x " + dstFrameHeight + ". -> " + dstYuvFilename);
while (true) {
int bytesRead = srcStream.read(srcFrame);
if (bytesRead != srcFrame.length) {
break;
}
if (dstFrameWidth == srcFrameWidth && dstFrameHeight == srcFrameHeight) {
dstFrame = srcFrame;
} else {
dstFrame = imageScale(srcFrame, srcFrameWidth, srcFrameHeight,
dstFrameWidth, dstFrameHeight);
}
dstFile.write(dstFrame);
}
srcStream.close();
dstFile.close();
}
/**
* A basic check if an encoded stream is decodable.
*
* The most basic confirmation we can get about a frame
* being properly encoded is trying to decode it.
* (Especially in realtime mode encode output is non-
* deterministic, therefore a more thorough check like
* md5 sum comparison wouldn't work.)
*
* Indeed, MediaCodec will raise an IllegalStateException
* whenever video decoder fails to decode a frame, and
* this test uses that fact to verify the bitstream.
*
* @param inputIvfFilename The name of the IVF file containing encoded bitsream.
* @param outputYuvFilename The name of the output YUV file (optional).
* @param frameRate Frame rate of input file in frames per second
* @param forceGoogleDecoder Force to use Google Video decoder.
* @param codecConfigs Codec config buffers to be added to the format
*/
protected ArrayList<MediaCodec.BufferInfo> decode(
String inputIvfFilename,
String outputYuvFilename,
String codecMimeType,
int frameRate,
boolean forceGoogleDecoder,
ArrayList<ByteBuffer> codecConfigs) throws Exception {
ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
// Open input/output.
IvfReader ivf = new IvfReader(inputIvfFilename);
int frameWidth = ivf.getWidth();
int frameHeight = ivf.getHeight();
int frameCount = ivf.getFrameCount();
int frameStride = frameWidth;
int frameSliceHeight = frameHeight;
int cropLeft = 0;
int cropTop = 0;
int cropWidth = frameWidth;
int cropHeight = frameHeight;
assertTrue(frameWidth > 0);
assertTrue(frameHeight > 0);
assertTrue(frameCount > 0);
// Create decoder.
MediaFormat format = MediaFormat.createVideoFormat(
codecMimeType, ivf.getWidth(), ivf.getHeight());
CodecProperties properties = getVideoCodecProperties(
false /* encoder */, format, forceGoogleDecoder);
if (properties == null) {
ivf.close();
return null;
}
int frameColorFormat = properties.colorFormat;
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
int csdIndex = 0;
for (ByteBuffer config : codecConfigs) {
format.setByteBuffer("csd-" + csdIndex, config);
++csdIndex;
}
FileOutputStream yuv = null;
if (outputYuvFilename != null) {
yuv = new FileOutputStream(outputYuvFilename, false);
}
Log.d(TAG, "Creating decoder " + properties.codecName +
". Color format: 0x" + Integer.toHexString(frameColorFormat) +
". " + frameWidth + " x " + frameHeight);
Log.d(TAG, " Format: " + format);
Log.d(TAG, " In: " + inputIvfFilename + ". Out:" + outputYuvFilename);
MediaCodec decoder = MediaCodec.createByCodecName(properties.codecName);
decoder.configure(format,
null, // surface
null, // crypto
0); // flags
decoder.start();
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
// decode loop
int inputFrameIndex = 0;
int outputFrameIndex = 0;
long inPresentationTimeUs = 0;
long outPresentationTimeUs = 0;
boolean sawOutputEOS = false;
boolean sawInputEOS = false;
while (!sawOutputEOS) {
if (!sawInputEOS) {
int inputBufIndex = decoder.dequeueInputBuffer(DEFAULT_DEQUEUE_TIMEOUT_US);
if (inputBufIndex >= 0) {
byte[] frame = ivf.readFrame(inputFrameIndex);
if (inputFrameIndex == frameCount - 1) {
Log.d(TAG, " Input EOS for frame # " + inputFrameIndex);
sawInputEOS = true;
}
inputBuffers[inputBufIndex].clear();
inputBuffers[inputBufIndex].put(frame);
inputBuffers[inputBufIndex].rewind();
inPresentationTimeUs = (inputFrameIndex * 1000000) / frameRate;
decoder.queueInputBuffer(
inputBufIndex,
0, // offset
frame.length,
inPresentationTimeUs,
sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
inputFrameIndex++;
}
}
int result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US);
while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = decoder.getOutputBuffers();
} else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Process format change
format = decoder.getOutputFormat();
frameWidth = format.getInteger(MediaFormat.KEY_WIDTH);
frameHeight = format.getInteger(MediaFormat.KEY_HEIGHT);
frameColorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Log.d(TAG, "Decoder output format change. Color: 0x" +
Integer.toHexString(frameColorFormat));
Log.d(TAG, "Format: " + format.toString());
// Parse frame and slice height from undocumented values
if (format.containsKey("stride")) {
frameStride = format.getInteger("stride");
} else {
frameStride = frameWidth;
}
if (format.containsKey("slice-height")) {
frameSliceHeight = format.getInteger("slice-height");
} else {
frameSliceHeight = frameHeight;
}
Log.d(TAG, "Frame stride and slice height: " + frameStride +
" x " + frameSliceHeight);
frameStride = Math.max(frameWidth, frameStride);
frameSliceHeight = Math.max(frameHeight, frameSliceHeight);
// Parse crop window for the area of recording decoded frame data.
if (format.containsKey("crop-left")) {
cropLeft = format.getInteger("crop-left");
}
if (format.containsKey("crop-top")) {
cropTop = format.getInteger("crop-top");
}
if (format.containsKey("crop-right")) {
cropWidth = format.getInteger("crop-right") - cropLeft + 1;
} else {
cropWidth = frameWidth;
}
if (format.containsKey("crop-bottom")) {
cropHeight = format.getInteger("crop-bottom") - cropTop + 1;
} else {
cropHeight = frameHeight;
}
Log.d(TAG, "Frame crop window origin: " + cropLeft + " x " + cropTop
+ ", size: " + cropWidth + " x " + cropHeight);
cropWidth = Math.min(frameWidth - cropLeft, cropWidth);
cropHeight = Math.min(frameHeight - cropTop, cropHeight);
}
result = decoder.dequeueOutputBuffer(bufferInfo, DEFAULT_DEQUEUE_TIMEOUT_US);
}
if (result >= 0) {
int outputBufIndex = result;
outPresentationTimeUs = bufferInfo.presentationTimeUs;
Log.v(TAG, "Writing buffer # " + outputFrameIndex +
". Size: " + bufferInfo.size +
". InTime: " + (inPresentationTimeUs + 500)/1000 +
". OutTime: " + (outPresentationTimeUs + 500)/1000);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
sawOutputEOS = true;
Log.d(TAG, " Output EOS for frame # " + outputFrameIndex);
}
if (bufferInfo.size > 0) {
// Save decoder output to yuv file.
if (yuv != null) {
byte[] frame = new byte[bufferInfo.size];
outputBuffers[outputBufIndex].position(bufferInfo.offset);
outputBuffers[outputBufIndex].get(frame, 0, bufferInfo.size);
// Convert NV12 to YUV420 if necessary.
if (frameColorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
frame = NV12ToYUV420(frameWidth, frameHeight,
frameStride, frameSliceHeight, frame);
}
int writeLength = Math.min(cropWidth * cropHeight * 3 / 2, frame.length);
// Pack frame if necessary.
if (writeLength < frame.length &&
(frameStride > cropWidth || frameSliceHeight > cropHeight)) {
frame = PackYUV420(cropLeft, cropTop, cropWidth, cropHeight,
frameStride, frameSliceHeight, frame);
}
yuv.write(frame, 0, writeLength);
}
outputFrameIndex++;
// Update statistics - store presentation time delay in offset
long presentationTimeUsDelta = inPresentationTimeUs - outPresentationTimeUs;
MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
bufferInfoCopy.set((int)presentationTimeUsDelta, bufferInfo.size,
outPresentationTimeUs, bufferInfo.flags);
bufferInfos.add(bufferInfoCopy);
}
decoder.releaseOutputBuffer(outputBufIndex, false);
}
}
decoder.stop();
decoder.release();
ivf.close();
if (yuv != null) {
yuv.close();
}
return bufferInfos;
}
/**
* Helper function to return InputStream from either fully specified filename (if set)
* or resource name within test assets (if filename is not set).
*/
private InputStream OpenFileOrResource(String filename, final String resource)
throws Exception {
if (filename != null) {
Preconditions.assertTestFileExists(filename);
return new FileInputStream(filename);
}
Preconditions.assertTestFileExists(mInpPrefix + resource);
return new FileInputStream(mInpPrefix + resource);
}
/**
* Results of frame encoding.
*/
protected class MediaEncoderOutput {
public long inPresentationTimeUs;
public long outPresentationTimeUs;
public boolean outputGenerated;
public int flags;
public byte[] buffer;
}
protected class MediaEncoderAsyncHelper {
private final EncoderOutputStreamParameters mStreamParams;
private final CodecProperties mProperties;
private final ArrayList<MediaCodec.BufferInfo> mBufferInfos;
private final IvfWriter mIvf;
private final ArrayList<ByteBuffer> mCodecConfigs;
private final byte[] mSrcFrame;
private InputStream mYuvStream;
private int mInputFrameIndex;
MediaEncoderAsyncHelper(
EncoderOutputStreamParameters streamParams,
CodecProperties properties,
ArrayList<MediaCodec.BufferInfo> bufferInfos,
IvfWriter ivf,
ArrayList<ByteBuffer> codecConfigs)
throws Exception {
mStreamParams = streamParams;
mProperties = properties;
mBufferInfos = bufferInfos;
mIvf = ivf;
mCodecConfigs = codecConfigs;
int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2;
mSrcFrame = new byte[srcFrameSize];
mYuvStream = OpenFileOrResource(
streamParams.inputYuvFilename, streamParams.inputResource);
}
public byte[] getInputFrame() {
// Check EOS
if (mStreamParams.frameCount == 0
|| (mStreamParams.frameCount > 0
&& mInputFrameIndex >= mStreamParams.frameCount)) {
Log.d(TAG, "---Sending EOS empty frame for frame # " + mInputFrameIndex);
return null;
}
try {
int bytesRead = mYuvStream.read(mSrcFrame);
if (bytesRead == -1) {
// rewind to beginning of file
mYuvStream.close();
mYuvStream = OpenFileOrResource(
mStreamParams.inputYuvFilename, mStreamParams.inputResource);
bytesRead = mYuvStream.read(mSrcFrame);
}
} catch (Exception e) {
Log.e(TAG, "Failed to read YUV file.");
return null;
}
mInputFrameIndex++;
// Convert YUV420 to NV12 if necessary
if (mProperties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
return YUV420ToNV(mStreamParams.frameWidth, mStreamParams.frameHeight,
mSrcFrame);
} else {
return mSrcFrame;
}
}
public boolean saveOutputFrame(MediaEncoderOutput out) {
if (out.outputGenerated) {
if ((out.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Log.d(TAG, "Storing codec config separately");
ByteBuffer csdBuffer = ByteBuffer.allocate(out.buffer.length).put(out.buffer);
csdBuffer.rewind();
mCodecConfigs.add(csdBuffer);
out.buffer = new byte[0];
}
if (out.buffer.length > 0) {
// Save frame
try {
mIvf.writeFrame(out.buffer, out.outPresentationTimeUs);
} catch (Exception e) {
Log.d(TAG, "Failed to write frame");
return true;
}
// Update statistics - store presentation time delay in offset
long presentationTimeUsDelta = out.inPresentationTimeUs -
out.outPresentationTimeUs;
MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
out.outPresentationTimeUs, out.flags);
mBufferInfos.add(bufferInfoCopy);
}
// Detect output EOS
if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "----Output EOS ");
return true;
}
}
return false;
}
}
/**
* Video encoder wrapper class.
* Allows to run the encoder either in a callee's thread or in a looper thread
* using buffer dequeue ready notification callbacks.
*
* Function feedInput() is used to send raw video frame to the encoder input. When encoder
* is configured to run in async mode the function will run in a looper thread.
* Encoded frame can be retrieved by calling getOutput() function.
*/
protected class MediaEncoderAsync extends Thread {
private int mId;
private MediaCodecWrapper mCodec;
private ByteBuffer[] mInputBuffers;
private ByteBuffer[] mOutputBuffers;
private int mInputFrameIndex;
private int mOutputFrameIndex;
private int mInputBufIndex;
private int mFrameRate;
private long mTimeout;
private MediaCodec.BufferInfo mBufferInfo;
private long mInPresentationTimeUs;
private long mOutPresentationTimeUs;
private boolean mAsync;
// Flag indicating if input frame was consumed by the encoder in feedInput() call.
private boolean mConsumedInput;
// Result of frame encoding returned by getOutput() call.
private MediaEncoderOutput mOutput;
// Object used to signal that looper thread has started and Handler instance associated
// with looper thread has been allocated.
private final Object mThreadEvent = new Object();
// Object used to signal that MediaCodec buffer dequeue notification callback
// was received.
private final Object mCallbackEvent = new Object();
private Handler mHandler;
private boolean mCallbackReceived;
private MediaEncoderAsyncHelper mHelper;
private final Object mCompletionEvent = new Object();
private boolean mCompleted;
private boolean mInitialSyncFrameReceived;
private MediaCodec.Callback mCallback = new MediaCodec.Callback() {
@Override
public void onInputBufferAvailable(MediaCodec codec, int index) {
if (mHelper == null) {
Log.e(TAG, "async helper not available");
return;
}
byte[] encFrame = mHelper.getInputFrame();
boolean inputEOS = (encFrame == null);
int encFrameLength = 0;
int flags = 0;
if (inputEOS) {
flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
} else {
encFrameLength = encFrame.length;
ByteBuffer byteBuffer = mCodec.getInputBuffer(index);
byteBuffer.put(encFrame);
byteBuffer.rewind();
mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate;
Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex +
". InTime: " + (mInPresentationTimeUs + 500)/1000);
mInputFrameIndex++;
}
mCodec.queueInputBuffer(
index,
0, // offset
encFrameLength, // size
mInPresentationTimeUs,
flags);
}
@Override
public void onOutputBufferAvailable(MediaCodec codec,
int index, MediaCodec.BufferInfo info) {
if (mHelper == null) {
Log.e(TAG, "async helper not available");
return;
}
MediaEncoderOutput out = new MediaEncoderOutput();
out.buffer = new byte[info.size];
ByteBuffer outputBuffer = mCodec.getOutputBuffer(index);
outputBuffer.get(out.buffer, 0, info.size);
mOutPresentationTimeUs = info.presentationTimeUs;
String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex;
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
logStr += " CONFIG. ";
}
if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
logStr += " KEY. ";
if (!mInitialSyncFrameReceived) {
mInitialSyncFrameReceived = true;
}
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
logStr += " EOS. ";
}
logStr += " Size: " + info.size;
logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 +
". OutTime: " + (mOutPresentationTimeUs + 500)/1000;
Log.v(TAG, logStr);
if (!mInitialSyncFrameReceived
&& (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
throw new RuntimeException("Non codec_config_frame before first sync.");
}
if (info.size > 0) {
mOutputFrameIndex++;
out.inPresentationTimeUs = mInPresentationTimeUs;
out.outPresentationTimeUs = mOutPresentationTimeUs;
}
mCodec.releaseOutputBuffer(index, false);
out.flags = info.flags;
out.outputGenerated = true;
if (mHelper.saveOutputFrame(out)) {
// output EOS
signalCompletion();
}
}
@Override
public void onError(MediaCodec codec, CodecException e) {
Log.e(TAG, "onError: " + e
+ ", transient " + e.isTransient()
+ ", recoverable " + e.isRecoverable()
+ ", error " + e.getErrorCode());
}
@Override
public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
Log.i(TAG, "onOutputFormatChanged: " + format.toString());
}
};
private synchronized void requestStart() throws Exception {
mHandler = null;
start();
// Wait for Hander allocation
synchronized (mThreadEvent) {
while (mHandler == null) {
mThreadEvent.wait();
}
}
}
public void setAsyncHelper(MediaEncoderAsyncHelper helper) {
mHelper = helper;
}
@Override
public void run() {
Looper.prepare();
synchronized (mThreadEvent) {
mHandler = new Handler();
mThreadEvent.notify();
}
Looper.loop();
}
private void runCallable(final Callable<?> callable) throws Exception {
if (mAsync) {
final Exception[] exception = new Exception[1];
final CountDownLatch countDownLatch = new CountDownLatch(1);
mHandler.post( new Runnable() {
@Override
public void run() {
try {
callable.call();
} catch (Exception e) {
exception[0] = e;
} finally {
countDownLatch.countDown();
}
}
} );
// Wait for task completion
countDownLatch.await();
if (exception[0] != null) {
throw exception[0];
}
} else {
callable.call();
}
}
private synchronized void requestStop() throws Exception {
mHandler.post( new Runnable() {
@Override
public void run() {
// This will run on the Looper thread
Log.v(TAG, "MediaEncoder looper quitting");
Looper.myLooper().quitSafely();
}
} );
// Wait for completion
join();
mHandler = null;
}
private void createCodecInternal(final String name,
final MediaFormat format, final long timeout, boolean useNdk) throws Exception {
mBufferInfo = new MediaCodec.BufferInfo();
mFrameRate = format.getInteger(MediaFormat.KEY_FRAME_RATE);
mTimeout = timeout;
mInputFrameIndex = 0;
mOutputFrameIndex = 0;
mInPresentationTimeUs = 0;
mOutPresentationTimeUs = 0;
if (useNdk) {
mCodec = new NdkMediaCodec(name);
} else {
mCodec = new SdkMediaCodec(MediaCodec.createByCodecName(name), mAsync);
}
if (mAsync) {
mCodec.setCallback(mCallback);
}
mCodec.configure(format, MediaCodec.CONFIGURE_FLAG_ENCODE);
mCodec.start();
// get the cached input/output only in sync mode
if (!mAsync) {
mInputBuffers = mCodec.getInputBuffers();
mOutputBuffers = mCodec.getOutputBuffers();
}
}
public void createCodec(int id, final String name, final MediaFormat format,
final long timeout, boolean async, final boolean useNdk) throws Exception {
mId = id;
mAsync = async;
if (mAsync) {
requestStart(); // start looper thread
}
runCallable( new Callable<Void>() {
@Override
public Void call() throws Exception {
createCodecInternal(name, format, timeout, useNdk);
return null;
}
} );
}
private void feedInputInternal(final byte[] encFrame, final boolean inputEOS) {
mConsumedInput = false;
// Feed input
mInputBufIndex = mCodec.dequeueInputBuffer(mTimeout);
if (mInputBufIndex >= 0) {
ByteBuffer inputBuffer = mCodec.getInputBuffer(mInputBufIndex);
inputBuffer.clear();
inputBuffer.put(encFrame);
inputBuffer.rewind();
int encFrameLength = encFrame.length;
int flags = 0;
if (inputEOS) {
encFrameLength = 0;
flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
}
if (!inputEOS) {
Log.v(TAG, "Enc" + mId + ". Frame in # " + mInputFrameIndex +
". InTime: " + (mInPresentationTimeUs + 500)/1000);
mInPresentationTimeUs = (mInputFrameIndex * 1000000) / mFrameRate;
mInputFrameIndex++;
}
mCodec.queueInputBuffer(
mInputBufIndex,
0, // offset
encFrameLength, // size
mInPresentationTimeUs,
flags);
mConsumedInput = true;
} else {
Log.v(TAG, "In " + mId + " - TRY_AGAIN_LATER");
}
mCallbackReceived = false;
}
public boolean feedInput(final byte[] encFrame, final boolean inputEOS) throws Exception {
runCallable( new Callable<Void>() {
@Override
public Void call() throws Exception {
feedInputInternal(encFrame, inputEOS);
return null;
}
} );
return mConsumedInput;
}
private void getOutputInternal() {
mOutput = new MediaEncoderOutput();
mOutput.inPresentationTimeUs = mInPresentationTimeUs;
mOutput.outPresentationTimeUs = mOutPresentationTimeUs;
mOutput.outputGenerated = false;
// Get output from the encoder
int result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout);
while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED ||
result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
mOutputBuffers = mCodec.getOutputBuffers();
} else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
Log.d(TAG, "Format changed: " + mCodec.getOutputFormatString());
}
result = mCodec.dequeueOutputBuffer(mBufferInfo, mTimeout);
}
if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.v(TAG, "Out " + mId + " - TRY_AGAIN_LATER");
}
if (result >= 0) {
int outputBufIndex = result;
mOutput.buffer = new byte[mBufferInfo.size];
ByteBuffer outputBuffer = mCodec.getOutputBuffer(outputBufIndex);
outputBuffer.position(mBufferInfo.offset);
outputBuffer.get(mOutput.buffer, 0, mBufferInfo.size);
mOutPresentationTimeUs = mBufferInfo.presentationTimeUs;
String logStr = "Enc" + mId + ". Frame # " + mOutputFrameIndex;
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
logStr += " CONFIG. ";
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
logStr += " KEY. ";
if (!mInitialSyncFrameReceived) {
mInitialSyncFrameReceived = true;
}
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
logStr += " EOS. ";
}
logStr += " Size: " + mBufferInfo.size;
logStr += ". InTime: " + (mInPresentationTimeUs + 500)/1000 +
". OutTime: " + (mOutPresentationTimeUs + 500)/1000;
Log.v(TAG, logStr);
if (!mInitialSyncFrameReceived
&& (mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
throw new RuntimeException("Non codec_config_frame before first sync.");
}
if (mBufferInfo.size > 0) {
mOutputFrameIndex++;
mOutput.outPresentationTimeUs = mOutPresentationTimeUs;
}
mCodec.releaseOutputBuffer(outputBufIndex, false);
mOutput.flags = mBufferInfo.flags;
mOutput.outputGenerated = true;
}
mCallbackReceived = false;
}
public MediaEncoderOutput getOutput() throws Exception {
runCallable( new Callable<Void>() {
@Override
public Void call() throws Exception {
getOutputInternal();
return null;
}
} );
return mOutput;
}
public void forceSyncFrame() throws Exception {
final Bundle syncFrame = new Bundle();
syncFrame.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
runCallable( new Callable<Void>() {
@Override
public Void call() throws Exception {
mCodec.setParameters(syncFrame);
return null;
}
} );
}
public void updateBitrate(int bitrate) throws Exception {
final Bundle bitrateUpdate = new Bundle();
bitrateUpdate.putInt(MediaCodec.PARAMETER_KEY_VIDEO_BITRATE, bitrate);
runCallable( new Callable<Void>() {
@Override
public Void call() throws Exception {
mCodec.setParameters(bitrateUpdate);
return null;
}
} );
}
public void waitForBufferEvent() throws Exception {
Log.v(TAG, "----Enc" + mId + " waiting for bufferEvent");
if (mAsync) {
synchronized (mCallbackEvent) {
if (!mCallbackReceived) {
mCallbackEvent.wait(1000); // wait 1 sec for a callback
// throw an exception if callback was not received
if (!mCallbackReceived) {
throw new RuntimeException("MediaCodec callback was not received");
}
}
}
} else {
Thread.sleep(5);
}
Log.v(TAG, "----Waiting for bufferEvent done");
}
public void waitForCompletion(long timeoutMs) throws Exception {
synchronized (mCompletionEvent) {
long timeoutExpiredMs = System.currentTimeMillis() + timeoutMs;
while (!mCompleted) {
mCompletionEvent.wait(timeoutExpiredMs - System.currentTimeMillis());
if (System.currentTimeMillis() >= timeoutExpiredMs) {
throw new RuntimeException("encoding has timed out!");
}
}
}
}
public void signalCompletion() {
synchronized (mCompletionEvent) {
mCompleted = true;
mCompletionEvent.notify();
}
}
public void deleteCodec() throws Exception {
runCallable( new Callable<Void>() {
@Override
public Void call() throws Exception {
mCodec.stop();
mCodec.release();
return null;
}
} );
if (mAsync) {
requestStop(); // Stop looper thread
}
}
}
/**
* @see #encode(EncoderOutputStreamParameters, ArrayList<ByteBuffer>)
*/
protected ArrayList<MediaCodec.BufferInfo> encode(
EncoderOutputStreamParameters streamParams) throws Exception {
return encode(streamParams, new ArrayList<ByteBuffer>());
}
/**
* Video encoding loop supporting encoding single streams with an option
* to run in a looper thread and use buffer ready notification callbacks.
*
* Output stream is described by encodingParams parameters.
*
* MediaCodec will raise an IllegalStateException
* whenever video encoder fails to encode a frame.
*
* Color format of input file should be YUV420, and frameWidth,
* frameHeight should be supplied correctly as raw input file doesn't
* include any header data.
*
* @param streamParams Structure with encoder parameters
* @param codecConfigs List to be filled with codec config buffers
* @return Returns array of encoded frames information for each frame.
*/
protected ArrayList<MediaCodec.BufferInfo> encode(
EncoderOutputStreamParameters streamParams,
ArrayList<ByteBuffer> codecConfigs) throws Exception {
ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " +
streamParams.frameHeight);
int bitrate = streamParams.bitrateSet[0];
// Create minimal media format signifying desired output.
MediaFormat format = MediaFormat.createVideoFormat(
streamParams.codecMimeType, streamParams.frameWidth,
streamParams.frameHeight);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
CodecProperties properties = getVideoCodecProperties(
true, format, streamParams.forceGoogleEncoder);
if (properties == null) {
return null;
}
// Open input/output
InputStream yuvStream = OpenFileOrResource(
streamParams.inputYuvFilename, streamParams.inputResource);
IvfWriter ivf = new IvfWriter(
streamParams.outputIvfFilename, streamParams.codecMimeType,
streamParams.frameWidth, streamParams.frameHeight);
// Create a media format signifying desired output.
if (streamParams.bitrateType == VIDEO_ControlRateConstant) {
format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
}
if (streamParams.temporalLayers > 0) {
format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer
}
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate);
int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) /
streamParams.frameRate;
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
// Create encoder
Log.d(TAG, "Creating encoder " + properties.codecName +
". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
streamParams.frameWidth + " x " + streamParams.frameHeight +
". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType +
". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers +
". Key frame:" + syncFrameInterval * streamParams.frameRate +
". Force keyFrame: " + streamParams.syncForceFrameInterval);
Log.d(TAG, " Format: " + format);
Log.d(TAG, " Output ivf:" + streamParams.outputIvfFilename);
MediaEncoderAsync codec = new MediaEncoderAsync();
codec.createCodec(0, properties.codecName, format,
streamParams.timeoutDequeue, streamParams.runInLooperThread, streamParams.useNdk);
// encode loop
boolean sawInputEOS = false; // no more data
boolean consumedInputEOS = false; // EOS flag is consumed dy encoder
boolean sawOutputEOS = false;
boolean inputConsumed = true;
int inputFrameIndex = 0;
int lastBitrate = bitrate;
int srcFrameSize = streamParams.frameWidth * streamParams.frameHeight * 3 / 2;
byte[] srcFrame = new byte[srcFrameSize];
while (!sawOutputEOS) {
// Read and feed input frame
if (!consumedInputEOS) {
// Read new input buffers - if previous input was consumed and no EOS
if (inputConsumed && !sawInputEOS) {
int bytesRead = yuvStream.read(srcFrame);
// Check EOS
if (streamParams.frameCount > 0 && inputFrameIndex >= streamParams.frameCount) {
sawInputEOS = true;
Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex);
}
if (!sawInputEOS && bytesRead == -1) {
if (streamParams.frameCount == 0) {
sawInputEOS = true;
Log.d(TAG, "---Sending EOS empty frame for frame # " + inputFrameIndex);
} else {
yuvStream.close();
yuvStream = OpenFileOrResource(
streamParams.inputYuvFilename, streamParams.inputResource);
bytesRead = yuvStream.read(srcFrame);
}
}
// Force sync frame if syncForceFrameinterval is set.
if (!sawInputEOS && inputFrameIndex > 0 &&
streamParams.syncForceFrameInterval > 0 &&
(inputFrameIndex % streamParams.syncForceFrameInterval) == 0) {
Log.d(TAG, "---Requesting sync frame # " + inputFrameIndex);
codec.forceSyncFrame();
}
// Dynamic bitrate change.
if (!sawInputEOS && streamParams.bitrateSet.length > inputFrameIndex) {
int newBitrate = streamParams.bitrateSet[inputFrameIndex];
if (newBitrate != lastBitrate) {
Log.d(TAG, "--- Requesting new bitrate " + newBitrate +
" for frame " + inputFrameIndex);
codec.updateBitrate(newBitrate);
lastBitrate = newBitrate;
}
}
// Convert YUV420 to NV12 if necessary
if (properties.colorFormat != CodecCapabilities.COLOR_FormatYUV420Planar) {
srcFrame = YUV420ToNV(streamParams.frameWidth, streamParams.frameHeight,
srcFrame);
}
}
inputConsumed = codec.feedInput(srcFrame, sawInputEOS);
if (inputConsumed) {
inputFrameIndex++;
consumedInputEOS = sawInputEOS;
}
}
// Get output from the encoder
MediaEncoderOutput out = codec.getOutput();
if (out.outputGenerated) {
// Detect output EOS
if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "----Output EOS ");
sawOutputEOS = true;
}
if ((out.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Log.d(TAG, "Storing codec config separately");
ByteBuffer csdBuffer = ByteBuffer.allocate(out.buffer.length).put(out.buffer);
csdBuffer.rewind();
codecConfigs.add(csdBuffer);
out.buffer = new byte[0];
}
if (out.buffer.length > 0) {
// Save frame
ivf.writeFrame(out.buffer, out.outPresentationTimeUs);
// Update statistics - store presentation time delay in offset
long presentationTimeUsDelta = out.inPresentationTimeUs -
out.outPresentationTimeUs;
MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
out.outPresentationTimeUs, out.flags);
bufferInfos.add(bufferInfoCopy);
}
}
// If codec is not ready to accept input/poutput - wait for buffer ready callback
if ((!inputConsumed || consumedInputEOS) && !out.outputGenerated) {
codec.waitForBufferEvent();
}
}
codec.deleteCodec();
ivf.close();
yuvStream.close();
return bufferInfos;
}
/**
* Video encoding run in a looper thread and use buffer ready callbacks.
*
* Output stream is described by encodingParams parameters.
*
* MediaCodec will raise an IllegalStateException
* whenever video encoder fails to encode a frame.
*
* Color format of input file should be YUV420, and frameWidth,
* frameHeight should be supplied correctly as raw input file doesn't
* include any header data.
*
* @param streamParams Structure with encoder parameters
* @param codecConfigs List to be filled with codec config buffers
* @return Returns array of encoded frames information for each frame.
*/
protected ArrayList<MediaCodec.BufferInfo> encodeAsync(
EncoderOutputStreamParameters streamParams,
ArrayList<ByteBuffer> codecConfigs) throws Exception {
if (!streamParams.runInLooperThread) {
throw new RuntimeException("encodeAsync should run with a looper thread!");
}
ArrayList<MediaCodec.BufferInfo> bufferInfos = new ArrayList<MediaCodec.BufferInfo>();
Log.d(TAG, "Source resolution: "+streamParams.frameWidth + " x " +
streamParams.frameHeight);
int bitrate = streamParams.bitrateSet[0];
// Create minimal media format signifying desired output.
MediaFormat format = MediaFormat.createVideoFormat(
streamParams.codecMimeType, streamParams.frameWidth,
streamParams.frameHeight);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
CodecProperties properties = getVideoCodecProperties(
true, format, streamParams.forceGoogleEncoder);
if (properties == null) {
return null;
}
// Open input/output
IvfWriter ivf = new IvfWriter(
streamParams.outputIvfFilename, streamParams.codecMimeType,
streamParams.frameWidth, streamParams.frameHeight);
// Create a media format signifying desired output.
if (streamParams.bitrateType == VIDEO_ControlRateConstant) {
format.setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
}
if (streamParams.temporalLayers > 0) {
format.setInteger("ts-layers", streamParams.temporalLayers); // 1 temporal layer
}
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
format.setInteger(MediaFormat.KEY_FRAME_RATE, streamParams.frameRate);
int syncFrameInterval = (streamParams.syncFrameInterval + streamParams.frameRate/2) /
streamParams.frameRate;
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
// Create encoder
Log.d(TAG, "Creating encoder " + properties.codecName +
". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
streamParams.frameWidth + " x " + streamParams.frameHeight +
". Bitrate: " + bitrate + " Bitrate type: " + streamParams.bitrateType +
". Fps:" + streamParams.frameRate + ". TS Layers: " + streamParams.temporalLayers +
". Key frame:" + syncFrameInterval * streamParams.frameRate +
". Force keyFrame: " + streamParams.syncForceFrameInterval);
Log.d(TAG, " Format: " + format);
Log.d(TAG, " Output ivf:" + streamParams.outputIvfFilename);
MediaEncoderAsync codec = new MediaEncoderAsync();
MediaEncoderAsyncHelper helper = new MediaEncoderAsyncHelper(
streamParams, properties, bufferInfos, ivf, codecConfigs);
codec.setAsyncHelper(helper);
codec.createCodec(0, properties.codecName, format,
streamParams.timeoutDequeue, streamParams.runInLooperThread, streamParams.useNdk);
codec.waitForCompletion(DEFAULT_ENCODE_TIMEOUT_MS);
codec.deleteCodec();
ivf.close();
return bufferInfos;
}
/**
* Video encoding loop supporting encoding multiple streams at a time.
* Each output stream is described by encodingParams parameters allowing
* simultaneous encoding of various resolutions, bitrates with an option to
* control key frame and dynamic bitrate for each output stream indepandently.
*
* MediaCodec will raise an IllegalStateException
* whenever video encoder fails to encode a frame.
*
* Color format of input file should be YUV420, and frameWidth,
* frameHeight should be supplied correctly as raw input file doesn't
* include any header data.
*
* @param srcFrameWidth Frame width of input yuv file
* @param srcFrameHeight Frame height of input yuv file
* @param encodingParams Encoder parameters
* @param codecConfigs List to be filled with codec config buffers
* @return Returns 2D array of encoded frames information for each stream and
* for each frame.
*/
protected ArrayList<ArrayList<MediaCodec.BufferInfo>> encodeSimulcast(
int srcFrameWidth,
int srcFrameHeight,
ArrayList<EncoderOutputStreamParameters> encodingParams,
ArrayList<ArrayList<ByteBuffer>> codecConfigs) throws Exception {
int numEncoders = encodingParams.size();
// Create arrays of input/output, formats, bitrates etc
ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos =
new ArrayList<ArrayList<MediaCodec.BufferInfo>>(numEncoders);
InputStream yuvStream[] = new InputStream[numEncoders];
IvfWriter[] ivf = new IvfWriter[numEncoders];
FileOutputStream[] yuvScaled = new FileOutputStream[numEncoders];
MediaFormat[] format = new MediaFormat[numEncoders];
MediaEncoderAsync[] codec = new MediaEncoderAsync[numEncoders];
int[] inputFrameIndex = new int[numEncoders];
boolean[] sawInputEOS = new boolean[numEncoders];
boolean[] consumedInputEOS = new boolean[numEncoders];
boolean[] inputConsumed = new boolean[numEncoders];
boolean[] bufferConsumed = new boolean[numEncoders];
boolean[] sawOutputEOS = new boolean[numEncoders];
byte[][] srcFrame = new byte[numEncoders][];
boolean sawOutputEOSTotal = false;
boolean bufferConsumedTotal = false;
CodecProperties[] codecProperties = new CodecProperties[numEncoders];
numEncoders = 0;
for (EncoderOutputStreamParameters params : encodingParams) {
int i = numEncoders;
Log.d(TAG, "Source resolution: " + params.frameWidth + " x " +
params.frameHeight);
int bitrate = params.bitrateSet[0];
// Create minimal media format signifying desired output.
format[i] = MediaFormat.createVideoFormat(
params.codecMimeType, params.frameWidth,
params.frameHeight);
format[i].setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
CodecProperties properties = getVideoCodecProperties(
true, format[i], params.forceGoogleEncoder);
if (properties == null) {
continue;
}
// Check if scaled image was created
int scale = params.frameWidth / srcFrameWidth;
if (!mScaledImages.contains(scale)) {
// resize image
cacheScaledImage(params.inputYuvFilename, params.inputResource,
srcFrameWidth, srcFrameHeight,
params.scaledYuvFilename, params.frameWidth, params.frameHeight);
mScaledImages.add(scale);
}
// Create buffer info storage
bufferInfos.add(new ArrayList<MediaCodec.BufferInfo>());
// Create YUV reader
yuvStream[i] = new FileInputStream(params.scaledYuvFilename);
// Create IVF writer
ivf[i] = new IvfWriter(
params.outputIvfFilename, params.codecMimeType,
params.frameWidth, params.frameHeight);
// Frame buffer
int frameSize = params.frameWidth * params.frameHeight * 3 / 2;
srcFrame[i] = new byte[frameSize];
// Create a media format signifying desired output.
if (params.bitrateType == VIDEO_ControlRateConstant) {
format[i].setInteger("bitrate-mode", VIDEO_ControlRateConstant); // set CBR
}
if (params.temporalLayers > 0) {
format[i].setInteger("ts-layers", params.temporalLayers); // 1 temporal layer
}
format[i].setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
format[i].setInteger(MediaFormat.KEY_FRAME_RATE, params.frameRate);
int syncFrameInterval = (params.syncFrameInterval + params.frameRate/2) /
params.frameRate; // in sec
format[i].setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, syncFrameInterval);
// Create encoder
Log.d(TAG, "Creating encoder #" + i +" : " + properties.codecName +
". Color format: 0x" + Integer.toHexString(properties.colorFormat)+ " : " +
params.frameWidth + " x " + params.frameHeight +
". Bitrate: " + bitrate + " Bitrate type: " + params.bitrateType +
". Fps:" + params.frameRate + ". TS Layers: " + params.temporalLayers +
". Key frame:" + syncFrameInterval * params.frameRate +
". Force keyFrame: " + params.syncForceFrameInterval);
Log.d(TAG, " Format: " + format[i]);
Log.d(TAG, " Output ivf:" + params.outputIvfFilename);
// Create encoder
codec[i] = new MediaEncoderAsync();
codec[i].createCodec(i, properties.codecName, format[i],
params.timeoutDequeue, params.runInLooperThread, params.useNdk);
codecProperties[i] = new CodecProperties(properties.codecName, properties.colorFormat);
inputConsumed[i] = true;
++numEncoders;
}
if (numEncoders == 0) {
Log.i(TAG, "no suitable encoders found for any of the streams");
return null;
}
while (!sawOutputEOSTotal) {
// Feed input buffer to all encoders
for (int i = 0; i < numEncoders; i++) {
bufferConsumed[i] = false;
if (consumedInputEOS[i]) {
continue;
}
EncoderOutputStreamParameters params = encodingParams.get(i);
// Read new input buffers - if previous input was consumed and no EOS
if (inputConsumed[i] && !sawInputEOS[i]) {
int bytesRead = yuvStream[i].read(srcFrame[i]);
// Check EOS
if (params.frameCount > 0 && inputFrameIndex[i] >= params.frameCount) {
sawInputEOS[i] = true;
Log.d(TAG, "---Enc" + i +
". Sending EOS empty frame for frame # " + inputFrameIndex[i]);
}
if (!sawInputEOS[i] && bytesRead == -1) {
if (params.frameCount == 0) {
sawInputEOS[i] = true;
Log.d(TAG, "---Enc" + i +
". Sending EOS empty frame for frame # " + inputFrameIndex[i]);
} else {
yuvStream[i].close();
yuvStream[i] = new FileInputStream(params.scaledYuvFilename);
bytesRead = yuvStream[i].read(srcFrame[i]);
}
}
// Convert YUV420 to NV12 if necessary
if (codecProperties[i].colorFormat !=
CodecCapabilities.COLOR_FormatYUV420Planar) {
srcFrame[i] =
YUV420ToNV(params.frameWidth, params.frameHeight, srcFrame[i]);
}
}
inputConsumed[i] = codec[i].feedInput(srcFrame[i], sawInputEOS[i]);
if (inputConsumed[i]) {
inputFrameIndex[i]++;
consumedInputEOS[i] = sawInputEOS[i];
bufferConsumed[i] = true;
}
}
// Get output from all encoders
for (int i = 0; i < numEncoders; i++) {
if (sawOutputEOS[i]) {
continue;
}
MediaEncoderOutput out = codec[i].getOutput();
if (out.outputGenerated) {
bufferConsumed[i] = true;
// Detect output EOS
if ((out.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.d(TAG, "----Enc" + i + ". Output EOS ");
sawOutputEOS[i] = true;
}
if ((out.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
Log.d(TAG, "----Enc" + i + ". Storing codec config separately");
ByteBuffer csdBuffer = ByteBuffer.allocate(out.buffer.length).put(out.buffer);
csdBuffer.rewind();
codecConfigs.get(i).add(csdBuffer);
out.buffer = new byte[0];
}
if (out.buffer.length > 0) {
// Save frame
ivf[i].writeFrame(out.buffer, out.outPresentationTimeUs);
// Update statistics - store presentation time delay in offset
long presentationTimeUsDelta = out.inPresentationTimeUs -
out.outPresentationTimeUs;
MediaCodec.BufferInfo bufferInfoCopy = new MediaCodec.BufferInfo();
bufferInfoCopy.set((int)presentationTimeUsDelta, out.buffer.length,
out.outPresentationTimeUs, out.flags);
bufferInfos.get(i).add(bufferInfoCopy);
}
}
}
// If codec is not ready to accept input/output - wait for buffer ready callback
bufferConsumedTotal = false;
for (boolean bufferConsumedCurrent : bufferConsumed) {
bufferConsumedTotal |= bufferConsumedCurrent;
}
if (!bufferConsumedTotal) {
// Pick the encoder to wait for
for (int i = 0; i < numEncoders; i++) {
if (!bufferConsumed[i] && !sawOutputEOS[i]) {
codec[i].waitForBufferEvent();
break;
}
}
}
// Check if EOS happened for all encoders
sawOutputEOSTotal = true;
for (boolean sawOutputEOSStream : sawOutputEOS) {
sawOutputEOSTotal &= sawOutputEOSStream;
}
}
for (int i = 0; i < numEncoders; i++) {
codec[i].deleteCodec();
ivf[i].close();
yuvStream[i].close();
if (yuvScaled[i] != null) {
yuvScaled[i].close();
}
}
return bufferInfos;
}
/**
* Some encoding statistics.
*/
protected class VideoEncodingStatistics {
VideoEncodingStatistics() {
mBitrates = new ArrayList<Integer>();
mFrames = new ArrayList<Integer>();
mKeyFrames = new ArrayList<Integer>();
mMinimumKeyFrameInterval = Integer.MAX_VALUE;
}
public ArrayList<Integer> mBitrates;// Bitrate values for each second of the encoded stream.
public ArrayList<Integer> mFrames; // Number of frames in each second of the encoded stream.
public int mAverageBitrate; // Average stream bitrate.
public ArrayList<Integer> mKeyFrames;// Stores the position of key frames in a stream.
public int mAverageKeyFrameInterval; // Average key frame interval.
public int mMaximumKeyFrameInterval; // Maximum key frame interval.
public int mMinimumKeyFrameInterval; // Minimum key frame interval.
}
/**
* Calculates average bitrate and key frame interval for the encoded streams.
* Output mBitrates field will contain bitrate values for every second
* of the encoded stream.
* Average stream bitrate will be stored in mAverageBitrate field.
* mKeyFrames array will contain the position of key frames in the encoded stream and
* mKeyFrameInterval - average key frame interval.
*/
protected VideoEncodingStatistics computeEncodingStatistics(int encoderId,
ArrayList<MediaCodec.BufferInfo> bufferInfos ) {
VideoEncodingStatistics statistics = new VideoEncodingStatistics();
int totalSize = 0;
int frames = 0;
int framesPerSecond = 0;
int totalFrameSizePerSecond = 0;
int maxFrameSize = 0;
int currentSecond;
int nextSecond = 0;
String keyFrameList = " IFrame List: ";
String bitrateList = " Bitrate list: ";
String framesList = " FPS list: ";
for (int j = 0; j < bufferInfos.size(); j++) {
MediaCodec.BufferInfo info = bufferInfos.get(j);
currentSecond = (int)(info.presentationTimeUs / 1000000);
boolean lastFrame = (j == bufferInfos.size() - 1);
if (!lastFrame) {
nextSecond = (int)(bufferInfos.get(j+1).presentationTimeUs / 1000000);
}
totalSize += info.size;
totalFrameSizePerSecond += info.size;
maxFrameSize = Math.max(maxFrameSize, info.size);
framesPerSecond++;
frames++;
// Update the bitrate statistics if the next frame will
// be for the next second
if (lastFrame || nextSecond > currentSecond) {
int currentBitrate = totalFrameSizePerSecond * 8;
bitrateList += (currentBitrate + " ");
framesList += (framesPerSecond + " ");
statistics.mBitrates.add(currentBitrate);
statistics.mFrames.add(framesPerSecond);
totalFrameSizePerSecond = 0;
framesPerSecond = 0;
}
// Update key frame statistics.
if ((info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
statistics.mKeyFrames.add(j);
keyFrameList += (j + " ");
}
}
int duration = (int)(bufferInfos.get(bufferInfos.size() - 1).presentationTimeUs / 1000);
duration = (duration + 500) / 1000;
statistics.mAverageBitrate = (int)(((long)totalSize * 8) / duration);
Log.d(TAG, "Statistics for encoder # " + encoderId);
// Calculate average key frame interval in frames.
int keyFrames = statistics.mKeyFrames.size();
if (keyFrames > 1) {
statistics.mAverageKeyFrameInterval =
statistics.mKeyFrames.get(keyFrames - 1) - statistics.mKeyFrames.get(0);
statistics.mAverageKeyFrameInterval =
Math.round((float)statistics.mAverageKeyFrameInterval / (keyFrames - 1));
for (int j = 1; j < keyFrames; j++) {
int keyFrameInterval =
statistics.mKeyFrames.get(j) - statistics.mKeyFrames.get(j - 1);
statistics.mMaximumKeyFrameInterval =
Math.max(statistics.mMaximumKeyFrameInterval, keyFrameInterval);
statistics.mMinimumKeyFrameInterval =
Math.min(statistics.mMinimumKeyFrameInterval, keyFrameInterval);
}
Log.d(TAG, " Key frame intervals: Max: " + statistics.mMaximumKeyFrameInterval +
". Min: " + statistics.mMinimumKeyFrameInterval +
". Avg: " + statistics.mAverageKeyFrameInterval);
}
Log.d(TAG, " Frames: " + frames + ". Duration: " + duration +
". Total size: " + totalSize + ". Key frames: " + keyFrames);
Log.d(TAG, keyFrameList);
Log.d(TAG, bitrateList);
Log.d(TAG, framesList);
Log.d(TAG, " Bitrate average: " + statistics.mAverageBitrate);
Log.d(TAG, " Maximum frame size: " + maxFrameSize);
return statistics;
}
protected VideoEncodingStatistics computeEncodingStatistics(
ArrayList<MediaCodec.BufferInfo> bufferInfos ) {
return computeEncodingStatistics(0, bufferInfos);
}
protected ArrayList<VideoEncodingStatistics> computeSimulcastEncodingStatistics(
ArrayList<ArrayList<MediaCodec.BufferInfo>> bufferInfos) {
int numCodecs = bufferInfos.size();
ArrayList<VideoEncodingStatistics> statistics = new ArrayList<VideoEncodingStatistics>();
for (int i = 0; i < numCodecs; i++) {
VideoEncodingStatistics currentStatistics =
computeEncodingStatistics(i, bufferInfos.get(i));
statistics.add(currentStatistics);
}
return statistics;
}
/**
* Calculates maximum latency for encoder/decoder based on buffer info array
* generated either by encoder or decoder.
*/
protected int maxPresentationTimeDifference(ArrayList<MediaCodec.BufferInfo> bufferInfos) {
int maxValue = 0;
for (MediaCodec.BufferInfo bufferInfo : bufferInfos) {
maxValue = Math.max(maxValue, bufferInfo.offset);
}
maxValue = (maxValue + 500) / 1000; // mcs -> ms
return maxValue;
}
/**
* Decoding PSNR statistics.
*/
protected class VideoDecodingStatistics {
VideoDecodingStatistics() {
mMinimumPSNR = Integer.MAX_VALUE;
}
public double mAveragePSNR;
public double mMinimumPSNR;
}
/**
* Calculates PSNR value between two video frames.
*/
private double computePSNR(byte[] data0, byte[] data1) {
long squareError = 0;
assertTrue(data0.length == data1.length);
int length = data0.length;
for (int i = 0 ; i < length; i++) {
int diff = ((int)data0[i] & 0xff) - ((int)data1[i] & 0xff);
squareError += diff * diff;
}
double meanSquareError = (double)squareError / length;
double psnr = 10 * Math.log10((double)255 * 255 / meanSquareError);
return psnr;
}
/**
* Calculates average and minimum PSNR values between
* set of reference and decoded video frames.
* Runs PSNR calculation for the full duration of the decoded data.
*/
protected VideoDecodingStatistics computeDecodingStatistics(
String referenceYuvFilename,
String referenceYuvRaw,
String decodedYuvFilename,
int width,
int height) throws Exception {
VideoDecodingStatistics statistics = new VideoDecodingStatistics();
InputStream referenceStream =
OpenFileOrResource(referenceYuvFilename, referenceYuvRaw);
InputStream decodedStream = new FileInputStream(decodedYuvFilename);
int ySize = width * height;
int uvSize = width * height / 4;
byte[] yRef = new byte[ySize];
byte[] yDec = new byte[ySize];
byte[] uvRef = new byte[uvSize];
byte[] uvDec = new byte[uvSize];
int frames = 0;
double averageYPSNR = 0;
double averageUPSNR = 0;
double averageVPSNR = 0;
double minimumYPSNR = Integer.MAX_VALUE;
double minimumUPSNR = Integer.MAX_VALUE;
double minimumVPSNR = Integer.MAX_VALUE;
int minimumPSNRFrameIndex = 0;
while (true) {
// Calculate Y PSNR.
int bytesReadRef = referenceStream.read(yRef);
int bytesReadDec = decodedStream.read(yDec);
if (bytesReadDec == -1) {
break;
}
if (bytesReadRef == -1) {
// Reference file wrapping up
referenceStream.close();
referenceStream =
OpenFileOrResource(referenceYuvFilename, referenceYuvRaw);
bytesReadRef = referenceStream.read(yRef);
}
double curYPSNR = computePSNR(yRef, yDec);
averageYPSNR += curYPSNR;
minimumYPSNR = Math.min(minimumYPSNR, curYPSNR);
double curMinimumPSNR = curYPSNR;
// Calculate U PSNR.
bytesReadRef = referenceStream.read(uvRef);
bytesReadDec = decodedStream.read(uvDec);
double curUPSNR = computePSNR(uvRef, uvDec);
averageUPSNR += curUPSNR;
minimumUPSNR = Math.min(minimumUPSNR, curUPSNR);
curMinimumPSNR = Math.min(curMinimumPSNR, curUPSNR);
// Calculate V PSNR.
bytesReadRef = referenceStream.read(uvRef);
bytesReadDec = decodedStream.read(uvDec);
double curVPSNR = computePSNR(uvRef, uvDec);
averageVPSNR += curVPSNR;
minimumVPSNR = Math.min(minimumVPSNR, curVPSNR);
curMinimumPSNR = Math.min(curMinimumPSNR, curVPSNR);
// Frame index for minimum PSNR value - help to detect possible distortions
if (curMinimumPSNR < statistics.mMinimumPSNR) {
statistics.mMinimumPSNR = curMinimumPSNR;
minimumPSNRFrameIndex = frames;
}
String logStr = String.format(Locale.US, "PSNR #%d: Y: %.2f. U: %.2f. V: %.2f",
frames, curYPSNR, curUPSNR, curVPSNR);
Log.v(TAG, logStr);
frames++;
}
averageYPSNR /= frames;
averageUPSNR /= frames;
averageVPSNR /= frames;
statistics.mAveragePSNR = (4 * averageYPSNR + averageUPSNR + averageVPSNR) / 6;
Log.d(TAG, "PSNR statistics for " + frames + " frames.");
String logStr = String.format(Locale.US,
"Average PSNR: Y: %.1f. U: %.1f. V: %.1f. Average: %.1f",
averageYPSNR, averageUPSNR, averageVPSNR, statistics.mAveragePSNR);
Log.d(TAG, logStr);
logStr = String.format(Locale.US,
"Minimum PSNR: Y: %.1f. U: %.1f. V: %.1f. Overall: %.1f at frame %d",
minimumYPSNR, minimumUPSNR, minimumVPSNR,
statistics.mMinimumPSNR, minimumPSNRFrameIndex);
Log.d(TAG, logStr);
referenceStream.close();
decodedStream.close();
return statistics;
}
}