Blank frames after using MediaCodec + MediaMuxer on Android
I am trying to create .mp4 video from .png images in my application like this: decode bitmap from .png file -> convert RGB bitmap to YUV420SP -> pass hidden bitmap to codec input and queue -> output output frame -> write it with using the cartoon to the .mp4 file → repeat with another image → close and release the multiplexer. The output .mp4 file cannot be played on Android, but in VLC player, it plays the correct length (only 16 seconds, 4 seconds with 4 images), but the frames are empty, only the VLC logo is displayed. I can't figure out what happened over the course of a few days, this latest code is a combination of several stackoverflow answers on similar questions, but I haven't found enough information to make it work.Below snippets
This is where it all starts:
encoder = new AvcEncoder(this);
Bitmap bitmap = BitmapFactory.decodeFile(path);
byte[] data = getNV21(bitmap.getWidth(), bitmap.getHeight(), bitmap);
encoder.offerEncoder(data);
...
muxer.stop();
muxer.release();
This method is called from AvcEncoder when exiting the codec:
@Override
public void frameReceived(byte[] outData, int i, int length) {
ByteBuffer buf = ByteBuffer.wrap(outData);
bufferInfo.set(0, outData.length, duration * frameNo++ * 1000000, MediaCodec.BUFFER_FLAG_SYNC_FRAME);
muxer.writeSampleData(videoTrackIndex, buf, bufferInfo);
}
Here's the chunks being converted:
byte[] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {
int[] argb = new int[inputWidth * inputHeight];
scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
scaled.recycle();
return yuv;
}
void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
// well known RGB to YUV algorithm
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
// NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other
// pixel AND every other scanline.
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
}
index++;
}
}
}
And the AvcEncoder class:
public class AvcEncoder {
private EncodedFrameListener frameListener;
private MediaCodec mediaCodec;
private byte[] sps;
private byte[] pps;
//private ParameterSetsListener parameterSetsListener;
public AvcEncoder(EncodedFrameListener frameListener) {
this.frameListener = frameListener;
MediaFormat mediaFormat;
mediaFormat = MediaFormat.createVideoFormat("video/avc", 720, 720);
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 125000);
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15);
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);
mediaCodec = MediaCodec.createEncoderByType("video/avc");
mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
}
public MediaFormat getOutputFormat(){
return mediaCodec.getOutputFormat();
}
public void close() throws IOException {
mediaCodec.stop();
mediaCodec.release();
}
public void offerEncoder(byte[] input) {
try {
ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();
int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex, 0, input.length, 0, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, -1);
while (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
if (sps != null && pps != null) {
ByteBuffer frameBuffer = ByteBuffer.wrap(outData);
frameBuffer.putInt(bufferInfo.size - 4);
frameListener.frameReceived(outData, 0, outData.length);
} else {
ByteBuffer spsPpsBuffer = ByteBuffer.wrap(outData);
if (spsPpsBuffer.getInt() == 0x00000001) {
System.out.println("parsing sps/pps");
} else {
System.out.println("something is amiss?");
}
int ppsIndex = 0;
while(!(spsPpsBuffer.get() == 0x00 && spsPpsBuffer.get() == 0x00 && spsPpsBuffer.get() == 0x00 && spsPpsBuffer.get() == 0x01)) {
}
ppsIndex = spsPpsBuffer.position();
sps = new byte[ppsIndex - 8];
System.arraycopy(outData, 4, sps, 0, sps.length);
pps = new byte[outData.length - ppsIndex];
System.arraycopy(outData, ppsIndex, pps, 0, pps.length);
/*if (null != parameterSetsListener) {
parameterSetsListener.avcParametersSetsEstablished(sps, pps);
}*/
}
mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
} catch (Throwable t) {
t.printStackTrace();
}
}
}
source to share
No one has answered this question yet
Check out similar questions: