Android屏幕镜像二:MediaCodec解码显示
2019-12-07 本文已影响0人
DON_1007
在真正的开始做两部手机的镜像功能前,先在本机完成解码显示的功能。从Android屏幕镜像一:屏幕采集 + MediaCodec编码中,我们得到了屏幕采集编码输出的h264
视频流数据,在本篇我们通过下面这三个步骤完成MediaCodec
解码显示:
- 1、将屏幕采集编码数据存储为本地文件
- 2、解析本地文件获取
h264
视频流数据 - 3、将
h264
视频数据丢给MediaCodec
解码显示
一、将屏幕采集编码数据存储为本地文件
int count = 0;
ScreenCapture.OnCaptureVideoCallback mVideoCallback = new ScreenCapture.OnCaptureVideoCallback() {
@Override
public void onCaptureVideoCallback(byte[] buf) {
count++;
byte[] bytes = new byte[buf.length + 4];
byte[] head = CodecUtils.intToBytes(buf.length);
//Log.i(TAG,"onCaptureVideoCallback frameLen " + buf.length + "/" + count);
System.arraycopy(head, 0, bytes, 0, head.length);
System.arraycopy(buf, 0, bytes, head.length, buf.length);
writeVideo(bytes);
}
};
...
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode != RESULT_OK || requestCode != REQUEST_CODE)
return;
mediaProjection = mMediaProjectionManager.getMediaProjection(resultCode, data);
if (mediaProjection == null) {
return;
}
DisplayMetrics dm = getResources().getDisplayMetrics();
mScreenCapture = new ScreenCapture(dm.widthPixels, dm.heightPixels, mediaProjection);
mScreenCapture.setOnCaptureVideoCallback(mVideoCallback);
mScreenCapture.startCapture();
}
开启屏幕采集之后,就可以在OnCaptureVideoCallback
中得到编码之后的h264
数据,这里将其存储在文件中,同时为了之后解析方便,在每帧数据的头部插入4个字节,记录当前视频帧的长度。
private String videoPath = "sdcard/mc_video.h264";
private OutputStream mVideoStream;
...
private void writeVideo(byte[] bytes) {
//Log.i(TAG, "writeVideo");
if (mVideoStream == null) {
File videoFile = new File(videoPath);
if (videoFile.exists()) {
videoFile.delete();
}
try {
videoFile.createNewFile();
mVideoStream = new FileOutputStream(videoFile);
} catch (Exception e) {
Log.w(TAG, e);
}
}
try {
mVideoStream.write(bytes);
} catch (Exception e) {
Log.w(TAG, e);
}
}
private void stopWriteVideo() {
if (mVideoStream != null) {
try {
mVideoStream.flush();
mVideoStream.close();
} catch (Exception e) {
Log.w(TAG, e);
}
mVideoStream = null;
}
}
这样就会在SD
卡根目录生成一个mc_video.h264
文件。
二、解析本地文件获取h264视频流数据
public class AVCFileReader extends Thread {
//文件路径
private String path;
//文件读取完成标识
private boolean isFinish = false;
public AVCFileReader(String path) {
this.path = path;
}
private AVCDecoder mDecoder;
public void setDecoder(AVCDecoder decoder) {
mDecoder = decoder;
}
@Override
public void run() {
super.run();
File file = new File(path);
//判断文件是否存在
if (file.exists()) {
try {
FileInputStream fis = new FileInputStream(file);
// 保存帧文件的时候,每帧数据的前4个字节记录了当前帧的长度,方便读取
byte[] frameLength = new byte[4];
//当前帧长度
int frameLen = 0;
//每次从文件读取的数据
byte[] readData;
//循环读取数据
int count = 0;
while (!isFinish) {
if (fis.available() > 0) {
fis.read(frameLength);
frameLen = CodecUtils.bytesToInt(frameLength);
// Log.i(TAG, "frameLen " + frameLen);
readData = new byte[frameLen];
fis.read(readData);
count++;
onFrame(readData, 0, readData.length);
try {
// 由于每帧数据读取完毕立即丢给解码器显示,没有时间戳(PTS、DTS)控制解码显示
// 这里通过sleep做个简单的显示控制 1000/60 ≈ 16
Thread.sleep(16);
} catch (Exception e) {
Log.w(TAG, e);
}
} else {
//文件读取结束
isFinish = true;
}
}
//Log.i(TAG, "frameLen finish " + count);
} catch (Exception e) {
e.printStackTrace();
}
}
}
private String TAG = getClass().getSimpleName();
private void onFrame(byte[] frame, int offset, int length) {
//Log.i(TAG, "onFrame " + offset + "/" + length);
mDecoder.onFrame(frame, offset, length);
}
}
由于我们在存储的时候,没帧数据前面都插入了4个字节记录当前帧的长度,所以我们可以很轻易的去处每一帧的数据。又由于每帧数据读取完毕立即丢给解码器显示,没有时间戳(PTS
、DTS
)控制解码显示,为了防止视频快速播放,这里通过在文件读取的时候sleep线程的方式做了个简单的显示控制。读取的帧数据丢给对象AVCDecoder
做解码显示。
三、MediaCodec解码显示
要显示解码的数据我们需要一个Surface
,Surface
可以通过SurfaceView
、TextureView
获取,这里推荐SurfaceView
1、在布局文件中添加SurfaceView
<SurfaceView
android:id="@+id/videoSurface"
android:layout_width="720px"
android:layout_height="1280px"
android:layout_gravity="center_horizontal"
android:visibility="visible" />
加载SurfaceView
,等待其创建完成
private void initView() {
...
mSurfaceView = (SurfaceView) findViewById(R.id.videoSurface);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
//surface 创建完成,此时surface才是可以丢给MediaCodec做解码显示的
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
}
});
}
2、创建AVCDecoder
在AVCDecoder
中我们会创建解码器,需要一个可用的surface
,在SurfaceView
的surface
创建成功之后,我们可以创建AVCDecoder
对象。
avcDecoder = new AVCDecoder(mSurfaceView);
public class AVCDecoder {
private String TAG = "MC_AVCDecoder";
private MediaCodec mCodec;
private MediaFormat mCodecFormat;
private final static String MIME_TYPE = "video/avc";
private final static int VIDEO_WIDTH = 720;
private final static int VIDEO_HEIGHT = 1280;
private SurfaceView mSurfaceView;
private LinkedList<VideoFrame> mFrameList = new LinkedList<>();
public final static int DECODE_ASYNC = 0;
public final static int DECODE_SYNC = 1;
public final static int DECODE_SYNC_DEPRECATED = 2;
private int mDecodeType = 0;
private LinkedList<Integer> mInputIndexList = new LinkedList<>();
public AVCDecoder(SurfaceView surfaceView) {
mSurfaceView = surfaceView;
initDecoder();
}
public void initDecoder() {
try {
mCodec = MediaCodec.createDecoderByType(MIME_TYPE);
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, 0, 0);
mCodecFormat = format;
if (mDecodeType == DECODE_ASYNC) {
mCodec.setCallback(new MediaCodec.Callback() {
@Override
public void onInputBufferAvailable(MediaCodec codec, int index) {
//Log.i(TAG, "onInputBufferAvailable " + Thread.currentThread().getName());
mInputIndexList.add(index);
}
@Override
public void onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info) {
//Log.i(TAG, "onOutputBufferAvailable");
mCodec.releaseOutputBuffer(index, true);
}
@Override
public void onError(MediaCodec codec, MediaCodec.CodecException e) {
Log.i(TAG, "onError");
}
@Override
public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) {
Log.i(TAG, "onOutputFormatChanged");
}
});
queueInputBuffer();
}
} catch (IOException e) {
e.printStackTrace();
}
}
private void queueInputBuffer() {
new Thread(new Runnable() {
@Override
public void run() {
while (true) {
if (mFrameList.isEmpty() || mInputIndexList.isEmpty()) {
continue;
}
VideoFrame frame = mFrameList.poll();
Integer index = mInputIndexList.poll();
ByteBuffer inputBuffer = mCodec.getInputBuffer(index);
inputBuffer.clear();
inputBuffer.put(frame.buf, frame.offset, frame.length);
// Log.i(TAG, "queueInputBuffer " + frame.offset + "/" + frame.length);
mCodec.queueInputBuffer(index, 0, frame.length - frame.offset, 0, 0);
}
}
}).start();
}
...
}
onFrame
中接收数据
public void onFrame(byte[] buf, int offset, int length) {
// 首帧是SPS PPS,需要设置给解码器,才能工作
if (CodecUtils.getFrameType(buf) == CodecUtils.NAL_SPS) {
int ppsPosition = getPPSPosition(buf);
if (ppsPosition > 0) {
byte[] sps = new byte[ppsPosition];
System.arraycopy(buf, 0, sps, 0, sps.length);
byte[] pps = new byte[buf.length - sps.length];
System.arraycopy(buf, sps.length, pps, 0, pps.length);
Log.i(TAG, "queueInputBuffer " + Arrays.toString(sps) + "/" + Arrays.toString(pps));
mCodecFormat.setByteBuffer("csd-0", ByteBuffer.wrap(sps));
mCodecFormat.setByteBuffer("csd-1", ByteBuffer.wrap(pps));
mCodec.configure(mCodecFormat, mSurfaceView.getHolder().getSurface(),
null, 0);
mCodec.start();
}
}
switch (mDecodeType) {
case DECODE_ASYNC:
decodeAsync(buf, offset, length);
break;
case DECODE_SYNC:
decodeSync(buf, offset, length);
break;
case DECODE_SYNC_DEPRECATED:
decodeDeprecated(buf, offset, length);
break;
}
}
private void decodeAsync(byte[] buf, int offset, int length) {
VideoFrame frame = new VideoFrame();
frame.buf = buf;
frame.offset = offset;
frame.length = length;
mFrameList.add(frame);
// Log.i(TAG, "decodeAsync " + mFrameList.size());
}
解码器在设置SPS
PPS
数据后才能工作。
跟编码类似的,Android5.0
之后系统推荐异步解码,但是也提供了同步解码的方式。
private void decodeSync(byte[] buf, int offset, int length) {
int inputBufferIndex = mCodec.dequeueInputBuffer(100);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = mCodec.getInputBuffer(inputBufferIndex);
inputBuffer.clear();
inputBuffer.put(buf, offset, length);
mCodec.queueInputBuffer(inputBufferIndex, 0, length, System.currentTimeMillis(), 0);
} else {
return;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 100);
while (outputBufferIndex >= 0) {
mCodec.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
Android5.0
之前的同步解码方式也已经标记废弃了。
private void decodeDeprecated(byte[] buf, int offset, int length) {
ByteBuffer[] inputBuffers = mCodec.getInputBuffers();
int inputBufferIndex = mCodec.dequeueInputBuffer(100);
if (inputBufferIndex >= 0) {
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(buf, offset, length);
mCodec.queueInputBuffer(inputBufferIndex, 0, length, System.currentTimeMillis(), 0);
} else {
return;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 100);
while (outputBufferIndex >= 0) {
mCodec.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
整个流程为:屏幕采集编码-》文件存储-》文件读取-》解码显示。
最终效果如下图:
1575715957544 (1).gif
视频的编解码显示暂告一段落,接下来需要实现音频的录制和播放。