要将Android中的两个视频并排合并,可以使用Android的MediaCodec类和MediaMuxer类来实现。下面是一个使用这两个类的代码示例:
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.os.Environment;
import java.io.IOException;
import java.nio.ByteBuffer;
public class VideoMergeUtil {
private static final String OUTPUT_VIDEO_MIME_TYPE = "video/avc";
private static final String OUTPUT_VIDEO_FILE_NAME = "merged_video.mp4";
private static final int OUTPUT_VIDEO_BIT_RATE = 2000000;
private static final int OUTPUT_VIDEO_FRAME_RATE = 30;
private static final int OUTPUT_VIDEO_IFRAME_INTERVAL = 5;
public static void mergeVideos(String videoFilePath1, String videoFilePath2) {
try {
MediaExtractor extractor1 = new MediaExtractor();
extractor1.setDataSource(videoFilePath1);
MediaExtractor extractor2 = new MediaExtractor();
extractor2.setDataSource(videoFilePath2);
int videoTrackIndex1 = getMediaTrackIndex(extractor1, "video/");
int videoTrackIndex2 = getMediaTrackIndex(extractor2, "video/");
MediaFormat videoFormat1 = extractor1.getTrackFormat(videoTrackIndex1);
MediaFormat videoFormat2 = extractor2.getTrackFormat(videoTrackIndex2);
MediaCodec codec = MediaCodec.createDecoderByType(videoFormat1.getString(MediaFormat.KEY_MIME));
MediaMuxer muxer = new MediaMuxer(Environment.getExternalStorageDirectory() + "/" + OUTPUT_VIDEO_FILE_NAME,
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
int videoTrackIndex = muxer.addTrack(videoFormat1);
muxer.start();
ByteBuffer[] inputBuffers = codec.getInputBuffers();
ByteBuffer[] outputBuffers = codec.getOutputBuffers();
extractor1.selectTrack(videoTrackIndex1);
extractor2.selectTrack(videoTrackIndex2);
MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) {
int inputBufferIndex = codec.dequeueInputBuffer(10000);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
int sampleSize = extractor1.readSampleData(inputBuffer, 0);
if (sampleSize < 0) {
codec.queueInputBuffer(inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
} else {
long presentationTimeUs = extractor1.getSampleTime();
codec.queueInputBuffer(inputBufferIndex, 0, sampleSize, presentationTimeUs, 0);
extractor1.advance();
}
}
int outputBufferIndex = codec.dequeueOutputBuffer(info, 10000);
if (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
muxer.writeSampleData(videoTrackIndex, outputBuffer, info);
codec.releaseOutputBuffer(outputBufferIndex, false);
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
while (true) {
int inputBufferIndex = codec.dequeueInputBuffer(10000);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
int sampleSize = extractor2.readSampleData(inputBuffer, 0);
if (sampleSize < 0) {
codec.queueInputBuffer(inputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
} else {
long presentationTimeUs = extractor2.getSampleTime();
codec.queueInputBuffer(inputBufferIndex, 0, sampleSize, presentationTimeUs, 0);
extractor2.advance();
}
}
int outputBufferIndex = codec.dequeueOutputBuffer(info, 10000);
if (outputBufferIndex >= 0) {
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
muxer.writeSampleData(videoTrackIndex, outputBuffer, info);
codec.releaseOutputBuffer(outputBufferIndex, false);
}
if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
break;
}
}
muxer.stop();
muxer.release();
codec.stop();
codec.release();
extractor1.release();
extractor2.release();
} catch (IOException e) {
e.printStackTrace();
}
}
private static int getMediaTrackIndex(MediaExtractor extractor, String mimeType) {
int trackCount = extractor.getTrackCount();
for (int i = 0;