MediaProjection是Android 5.0引入的一个API,它允许应用程序捕获和录制设备屏幕上的内容。
MediaCodec是Android平台提供的一个底层的音视频编解码框架,它是安卓底层多媒体基础框架的重要组成部分。它经常和 MediaExtractor, MediaSync, MediaMuxer, MediaCrypto, MediaDrm, Image, Surface, AudioTrack一起使用。
1、将CMakeLists.txt放到项目的app目录下
2、将cpp文件夹放到项目的main目录下
3、在app的gradle里android内添加
externalNativeBuild {
cmake {
path "CMakeLists.txt"
}
}
在defaultConfig里添加
externalNativeBuild {
cmake {
cppFlags "-frtti -fexceptions"
}
}
然后编译一下。
4、创建RtmpUtil
package com.swz.screen.rtmp;
import android.text.TextUtils;
public class RtmpUtil {
private OnConnectionListener onConnectionListener;
//加载push.cpp以及关联连接和推送相关的方法
static {
System.loadLibrary("push");
}
private native void n_init(String url);
private native void n_pushSPSPPS(byte[] sps, int spsLen, byte[] pps, int ppsLen);
private native void n_pushVideoData(byte[] data, int dataLen, boolean keyFrame);
private native void n_pushAudioData(byte[] data, int dataLen);
private native void n_stop();
//创建rtmp连接回调,提供给CallJava.cpp使用
public interface OnConnectionListener {
void onConnecting();
void onConnectSuccess();
void onConnectFail(String msg);
}
public void initConnection(String url) {
if (TextUtils.isEmpty(url)) return;
n_init(url);
}
public void pushSPSPPS(byte[] sps, byte[] pps) {
if (sps == null || pps == null) return;
n_pushSPSPPS(sps, sps.length, pps, pps.length);
}
public void pushVideoData(byte[] data, boolean keyFrame) {
if (data == null) return;
n_pushVideoData(data, data.length, keyFrame);
//Log.e("swz", data.length + "keyFrame:" + keyFrame);
}
public void pushAudioData(byte[] data) {
if (data == null) return;
n_pushAudioData(data, data.length);
}
public void stop() {
n_stop();
}
}
5、通过mediacodec编码
package com.swz.screen.util;
import android.hardware.display.DisplayManager;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.projection.MediaProjection;
import android.os.Build;
import android.util.Log;
import android.view.Surface;
import androidx.annotation.RequiresApi;
import com.swz.screen.rtmp.RtmpUtil;
import com.swz.screen.util.ByteUtil;
import java.io.IOException;
import java.nio.ByteBuffer;
public class ScreenEncoder extends Thread {
//不同手机支持的编码最大分辨率不同
private static final int VIDEO_WIDTH = 1920;
private static final int VIDEO_HEIGHT = 1080;
private static final int SCREEN_FRAME_RATE = 30;
private static final int SCREEN_FRAME_INTERVAL = 1;
private static final long SOCKET_TIME_OUT = 10;
private final MediaProjection mMediaProjection;
private final RtmpUtil mRtmpUtil;
private MediaCodec mMediaCodec;
private boolean mPlaying = true;
private Surface surface;
private long l;
private boolean sendFrame;
private byte[] mBytes;
public ScreenEncoder(RtmpUtil RtmpUtil, MediaProjection mediaProjection) {
mRtmpUtil = RtmpUtil;
mMediaProjection = mediaProjection;
}
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public void startEncode() {
MediaFormat mediaFormat =
MediaFormat.createVideoFormat(MediaFormat.MIMETYPE_VIDEO_AVC, VIDEO_WIDTH, VIDEO_HEIGHT);
mediaFormat.setInteger(
MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
// 比特率(比特/秒)
mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, VIDEO_WIDTH * VIDEO_HEIGHT);
// 帧率
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, SCREEN_FRAME_RATE);
// I帧的频率
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, SCREEN_FRAME_INTERVAL);
//视频旋转
//mediaFormat.setInteger(MediaFormat.KEY_ROTATION, 180);
try {
mMediaCodec = MediaCodec.createEncoderByType(MediaFormat.MIMETYPE_VIDEO_AVC);
mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
surface = mMediaCodec.createInputSurface();
//surface.setFrameRate(CHANGE_FRAME_RATE_ALWAYS, FRAME_RATE_COMPATIBILITY_DEFAULT);
mMediaProjection.createVirtualDisplay(
"screen",
VIDEO_WIDTH,
VIDEO_HEIGHT,
1,
DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,
surface,
null,
null);
} catch (IOException e) {
e.printStackTrace();
}
start();
}
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
@Override
public void run() {
mPlaying = true;
mMediaCodec.start();
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
while (mPlaying) {
// TODO MediaCodec问题,当画面静止时,mediacodec输出就变得少,需要补帧
if (System.currentTimeMillis() - l > 100) {
sendFrame = true;
} else {
sendFrame = false;
}
int outPutBufferId = mMediaCodec.dequeueOutputBuffer(bufferInfo, SOCKET_TIME_OUT);
if (outPutBufferId >= 0) {
ByteBuffer byteBuffer = null;
try {
byteBuffer = mMediaCodec.getOutputBuffer(outPutBufferId);
} catch (IllegalStateException e) {
e.printStackTrace();
}
if (byteBuffer == null) {
continue;
}
encodeData(byteBuffer, bufferInfo);
mMediaCodec.releaseOutputBuffer(outPutBufferId, false);
} else {
//Log.e("swz", "outPutBufferId - " + outPutBufferId);
//Log.e("swz", "sendFrame - " + sendFrame);
/*if (sendFrame && mBytes != null) {
mRtmpUtil.pushVideoData(mBytes, false);
}*/
}
}
mPlaying = false;
if (mMediaCodec != null) {
mMediaCodec.release();
}
if (mMediaProjection != null) {
mMediaProjection.stop();
}
}
private void encodeData(ByteBuffer byteBuffer, MediaCodec.BufferInfo bufferInfo) {
byte[] bytes = new byte[bufferInfo.size];
byteBuffer.get(bytes);
l = System.currentTimeMillis();
int type = bufferInfo.flags;
mRtmpUtil.pushVideoData(bytes, type == MediaCodec.BUFFER_FLAG_KEY_FRAME);
/*if (type == MediaCodec.BUFFER_FLAG_KEY_FRAME) {
mBytes = new byte[bufferInfo.size];
System.arraycopy(bytes, 0, mBytes, 0, bytes.length);
}*/
Log.e("swz", "pushVideoData -" + ByteUtil.bytesToHexString(bytes));
}
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
public void stopEncode() {
mPlaying = false;
if (mMediaCodec != null) {
mMediaCodec.release();
}
if (mMediaProjection != null) {
mMediaProjection.stop();
}
}
}
最后在MainActivity调用即可
package com.swz.screen;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.media.projection.MediaProjectionManager;
import android.os.Build;
import android.os.Bundle;
import com.swz.screen.service.ScreenService;
public class MainActivity extends AppCompatActivity {
private MediaProjectionManager mediaProjectionManager;
private static final int PROJECTION_REQUEST_CODE = 0x01;
private Intent service;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
startProjection();
}
// 请求开始录屏
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void startProjection() {
Intent intent = mediaProjectionManager.createScreenCaptureIntent();
startActivityForResult(intent, PROJECTION_REQUEST_CODE);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode != RESULT_OK) {
return;
}
if (requestCode == PROJECTION_REQUEST_CODE) {
service = new Intent(this, ScreenService.class);
service.putExtra("code", resultCode);
service.putExtra("data", data);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
startForegroundService(service);
} else {
startService(service);
}
}
}
}