本文共 8808 字,大约阅读时间需要 29 分钟。
使用 MediaCodec 对 YUV 数据进行编码,输出格式为 H.264(AVC)。随后,使用 MediaMuxer 将视频轨道和音频轨道混合到 MP4 容器中。视频编码通常使用 H.264(AVC),音频编码通常使用 AAC。
首先,创建一个 MediaCodec 实例,并配置其编码参数。以下是详细的配置步骤:
MediaFormat mediaFormat = MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, videoWidth, videoHeight);mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible);mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 6);mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);encoder = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);encoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);encoder.start();
MediaFormat 创建:使用 MediaFormat.createVideoFormat 方法创建视频格式,指定 MIME 媒体类型、宽度和高度。在编码过程中,需要将 YUV 数据提交给编码器,并获取编码后的输出数据:
private void encode(byte[] yuv, long presentationTimeUs) { int inputBufferIndex = mEncoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US); if (inputBufferIndex == -1) { return; } ByteBuffer inputBuffer = mEncoder.getInputBuffer(inputBufferIndex); inputBuffer.put(yuv); mEncoder.queueInputBuffer(inputBufferIndex, 0, yuv.length, presentationTimeUs, 0); while (!mStop) { MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputBufferIndex = mEncoder.dequeueOutputBuffer(bufferInfo, DEFAULT_TIMEOUT_US); if (outputBufferIndex >= 0) { ByteBuffer outputBuffer = mEncoder.getOutputBuffer(outputBufferIndex); if (mVideoTrackIndex == -1) { mVideoTrackIndex = writeHeadInfo(outputBuffer, bufferInfo); } mMediaMuxer.writeSampleData(mVideoTrackIndex, outputBuffer, bufferInfo); mEncoder.releaseOutputBuffer(outputBufferIndex, false); break; } }} 在编码后,需要将编码数据写入 MediaMuxer 中。需要注意的是,H.264 视频编码需要配置一些编解码参数(csd),否则可能会导致编码错误。以下是写入头部信息的示例:
private int writeHeadInfo(ByteBuffer outputBuffer, MediaCodec.BufferInfo bufferInfo) { byte[] csd = new byte[bufferInfo.size]; outputBuffer.limit(bufferInfo.offset + bufferInfo.size); outputBuffer.position(bufferInfo.offset); outputBuffer.get(csd); ByteBuffer sps = null; ByteBuffer pps = null; for (int i = bufferInfo.size - 1; i > 3; i--) { if (csd[i] == 1 && csd[i - 1] == 0 && csd[i - 2] == 0 && csd[i - 3] == 0) { sps = ByteBuffer.allocate(i - 3); pps = ByteBuffer.allocate(bufferInfo.size - (i - 3)); sps.put(csd, 0, i - 3).position(0); pps.put(csd, i - 3, bufferInfo.size - (i - 3)).position(0); } } MediaFormat outputFormat = mEncoder.getOutputFormat(); if (sps != null && pps != null) { outputFormat.setByteBuffer("csd-0", sps); outputFormat.setByteBuffer("csd-1", pps); } int videoTrackIndex = mMediaMuxer.addTrack(outputFormat); mMediaMuxer.start(); return videoTrackIndex;} 在编码完成后,需要正确释放编码器和 MediaMuxer:
mEncoder.stop();mEncoder.release();mMediaMuxer.release();
以下是完整的编码类代码:
import android.media.MediaCodec;import android.media.MediaCodecInfo;import android.media.MediaFormat;import android.media.MediaMuxer;import android.util.Log;import java.io.IOException;import java.nio.ByteBuffer;public class VideoEncoder { private static final String TAG = "VideoEncoder"; private final static String MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC; private static final long DEFAULT_TIMEOUT_US = 10000; private MediaCodec mEncoder; private MediaMuxer mMediaMuxer; private int mVideoTrackIndex; private boolean mStop = false; public void init(String outPath, int width, int height) throws IOException { mStop = false; mVideoTrackIndex = -1; mMediaMuxer = new MediaMuxer(outPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, width, height); mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, width * height * 6); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mEncoder.start(); } public void release() { mStop = true; if (mEncoder != null) { mEncoder.stop(); mEncoder.release(); mEncoder = null; } if (mMediaMuxer != null) { mMediaMuxer.release(); mMediaMuxer = null; } } public void encode(byte[] yuv, long presentationTimeUs) { if (mEncoder == null || mMediaMuxer == null) { Log.e(TAG, "mEncoder or mMediaMuxer is null"); return; } if (yuv == null) { Log.e(TAG, "input yuv data is null"); return; } int inputBufferIndex = mEncoder.dequeueInputBuffer(DEFAULT_TIMEOUT_US); if (inputBufferIndex == -1) { Log.e(TAG, "no valid buffer available"); return; } ByteBuffer inputBuffer = mEncoder.getInputBuffer(inputBufferIndex); inputBuffer.put(yuv); mEncoder.queueInputBuffer(inputBufferIndex, 0, yuv.length, presentationTimeUs, 0); while (!mStop) { MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); int outputBufferIndex = mEncoder.dequeueOutputBuffer(bufferInfo, DEFAULT_TIMEOUT_US); if (outputBufferIndex >= 0) { ByteBuffer outputBuffer = mEncoder.getOutputBuffer(outputBufferIndex); if (mVideoTrackIndex == -1) { mVideoTrackIndex = writeHeadInfo(outputBuffer, bufferInfo); } if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) { mMediaMuxer.writeSampleData(mVideoTrackIndex, outputBuffer, bufferInfo); } mEncoder.releaseOutputBuffer(outputBufferIndex, false); break; } } } private int writeHeadInfo(ByteBuffer outputBuffer, MediaCodec.BufferInfo bufferInfo) { byte[] csd = new byte[bufferInfo.size]; outputBuffer.limit(bufferInfo.offset + bufferInfo.size); outputBuffer.position(bufferInfo.offset); outputBuffer.get(csd); ByteBuffer sps = null; ByteBuffer pps = null; for (int i = bufferInfo.size - 1; i > 3; i--) { if (csd[i] == 1 && csd[i - 1] == 0 && csd[i - 2] == 0 && csd[i - 3] == 0) { sps = ByteBuffer.allocate(i - 3); pps = ByteBuffer.allocate(bufferInfo.size - (i - 3)); sps.put(csd, 0, i - 3).position(0); pps.put(csd, i - 3, bufferInfo.size - (i - 3)).position(0); } } MediaFormat outputFormat = mEncoder.getOutputFormat(); if (sps != null && pps != null) { outputFormat.setByteBuffer("csd-0", sps); outputFormat.setByteBuffer("csd-1", pps); } int videoTrackIndex = mMediaMuxer.addTrack(outputFormat); mMediaMuxer.start(); return videoTrackIndex; }} 以下是一个使用该编码器的示例:
VideoDecoder mVideoDecoder = new VideoDecoder();mVideoDecoder.setOutputFormat(VideoDecoder.COLOR_FORMAT_NV12);VideoEncoder mVideoEncoder = null;new Thread(new Runnable() { @Override public void run() { mVideoDecoder.decode("/sdcard/test.mp4", new VideoDecoder.DecodeCallback() { @Override public void onDecode(byte[] yuv, int width, int height, int frameCount, long presentationTimeUs) { if (mVideoEncoder == null) { mVideoEncoder = new VideoEncoder(); mVideoEncoder.init("/sdcard/test_out.mp4", width, height); } mVideoEncoder.encode(yuv, presentationTimeUs); } @Override public void onFinish() { if (mVideoEncoder != null) { mVideoEncoder.release(); } } @Override public void onStop() { if (mVideoEncoder != null) { mVideoEncoder.release(); } } }); }}).start(); MediaMuxer 中,完成音频编码和混合。MediaMuxer 的写入操作,以避免 UI 阻塞。通过以上流程,可以实现将 YUV 数据编码为 H.264 格式,并将其与音频混合到 MP4 文件中。这对于实现视频编码和传输的需求非常有用。
转载地址:http://xvru.baihongyu.com/