注:没有实现音频推送
录屏到直播逻辑过程
- 推流端:得到录屏权限,开始录屏
- 推流端:开启线程,解码得到裸流
- 推流端:通过Rtsp建立服务器推流
- 播放端:得到rtsp://ip 通过vlc播放
什么是Rtsp?
RTSP协议以客户服务器方式工作,,如:暂停/继续、后退、前进等。它是一个多媒体播放控制协议,用来使用户在播放从因特网下载的实时数据时能够进行控制,
因此 RTSP 又称为“因特网录像机??匦椤薄?a target="_blank" rel="nofollow">RTSP协议详解
如何录屏?
运用Android 5.0开放接口MediaProjection,通过MediaProjectionManage申请录屏权限,用户允许后开始录制屏幕;
步骤:
1.获取MediaProjectionManager实例
mMediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
2.申请权限
Intent captureIntent = mMediaProjectionManager.createScreenCaptureIntent();
startActivityForResult(captureIntent, REQUEST_CODE);
3.在onActivityResult获取授权结果
public void onActivityResult(int requestCode, int resultCode, Intent data) {
try {
MediaProjection mediaProjection = mMediaProjectionManager.getMediaProjection(resultCode, data);
if(mediaProjection == null){
T.showShort(ServerActivity.this, "程序发生错误:MediaProjection@1");
RunState.getInstance().setRun(false);
return;
}
}
catch (Exception e){
}
}
4.创建录屏线程
public class ScreenRecordThread extends Thread {
private final static String TAG = "ScreenRecord";
private Surface mSurface;
private Context mContext;
private VirtualDisplay mVirtualDisplay;
private MediaProjection mMediaProjection;
private VideoMediaCodec mVideoMediaCodec;
private WindowManager wm;
private int windowWidth;
private int windowHeight;
private int screenDensity;
public ScreenRecordThread(Context context, MediaProjection mp, H264DataCollecter mH264Collecter){
this.mContext = context;
this.mMediaProjection = mp;
wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE);
windowWidth = DisplayUtils.getDisplayW(context);
windowHeight = DisplayUtils.getDisplayH(context);
DisplayMetrics displayMetrics = new DisplayMetrics();
wm.getDefaultDisplay().getMetrics(displayMetrics);
screenDensity = displayMetrics.densityDpi;
mVideoMediaCodec = new VideoMediaCodec(wm, context, mH264Collecter);
}
@Override
public void run() {
DisplayMetrics displayMetrics = new DisplayMetrics();
wm.getDefaultDisplay().getMetrics(displayMetrics);
screenDensity = displayMetrics.densityDpi;
Log.e("wm-dpi", windowWidth + "-" + windowHeight + "-" + screenDensity);
mVideoMediaCodec.prepare();
mSurface = mVideoMediaCodec.getSurface();
mVirtualDisplay =mMediaProjection.createVirtualDisplay(TAG + "-display", windowWidth, windowHeight, Constant.VIDEO_DPI,
DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR,
mSurface, null, null);
mVideoMediaCodec.isRun(true);
mVideoMediaCodec.getBuffer();
}
/**
* 停止
* **/
public void release(){
mVideoMediaCodec.release();
}
}
5.解码得到裸流
public void getBuffer(){
try
{
while(isRun){
if(mEncoder == null)
break;
if (startTime == 0) {
startTime = mBufferInfo.presentationTimeUs * 1000;
}
if (System.currentTimeMillis() - timeStamp >= 1000) {
timeStamp = System.currentTimeMillis();
Bundle params = new Bundle();
params.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
mEncoder.setParameters(params);
}
int outputBufferIndex = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
MediaFormat outputFormat = mEncoder.getOutputFormat();
byte[] AVCDecoderConfigurationRecord = Packager.H264Packager.generateAVCDecoderConfigurationRecord(outputFormat);
int packetLen = Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH +
AVCDecoderConfigurationRecord.length;
byte[] finalBuff = new byte[packetLen];
Packager.FLVPackager.fillFlvVideoTag(finalBuff,
0,
true,
true,
AVCDecoderConfigurationRecord.length);
System.arraycopy(AVCDecoderConfigurationRecord, 0,
finalBuff, Packager.FLVPackager.FLV_VIDEO_TAG_LENGTH, AVCDecoderConfigurationRecord.length);
H264Data data = new H264Data(finalBuff, 1, 10);
if (mH264Collecter != null){
mH264Collecter.collect(data);
}
}
while (outputBufferIndex >= 0){
ByteBuffer outputBuffer = mEncoder.getOutputBuffer(outputBufferIndex);
// MediaFormat bufferFormat = mEncoder.getOutputFormat(outputBufferIndex);
byte[] outData = new byte[mBufferInfo.size];
outputBuffer.get(outData);
if(mBufferInfo.flags == 2){
configbyte = new byte[mBufferInfo.size];
configbyte = outData;
}else if(mBufferInfo.flags == MediaCodec.BUFFER_FLAG_KEY_FRAME){
byte[] keyframe = new byte[mBufferInfo.size + configbyte.length];
System.arraycopy(configbyte, 0, keyframe, 0, configbyte.length);
System.arraycopy(outData, 0, keyframe, configbyte.length, outData.length);
H264Data data = new H264Data(keyframe, 1, mBufferInfo.presentationTimeUs*1000);
if (mH264Collecter != null){
mH264Collecter.collect(data);
}
}else{
H264Data data = new H264Data(outData, 2, mBufferInfo.presentationTimeUs*1000);
if (mH264Collecter != null){
mH264Collecter.collect(data);
}
}
mEncoder.releaseOutputBuffer(outputBufferIndex, true);
outputBufferIndex = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
}
}
}
catch (Exception e){
}
try {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
} catch (Exception e){
e.printStackTrace();
}
}
自此录屏得到裸流过程完成
如何推流?
1.依赖rtsp lib
rtsp开源项目
libstreaming
步骤:
1.bindService And rtsp.start()
bindService(new Intent(this,RtspServer.class), mRtspServiceConnection, Context.BIND_AUTO_CREATE);
private ServiceConnection mRtspServiceConnection = new ServiceConnection() {
@Override
public void onServiceConnected(ComponentName name, IBinder service) {
mRtspServer = ((RtspServer.LocalBinder)service).getService();
mRtspServer.addCallbackListener(mRtspCallbackListener);
mRtspServer.start();
}
@Override
public void onServiceDisconnected(ComponentName name) {}
};
2.encode后send出去
@Override
public void collect(H264Data data) {
DataUtil.getInstance().putData(data);
}
//RtspServer
public void run() {
Log.i(TAG,"RTSP server listening on port "+mServer.getLocalPort());
while (!Thread.interrupted()) {
try {
new WorkerThread(mServer.accept()).start();
} catch (SocketException e) {
break;
} catch (IOException e) {
Log.e(TAG,e.getMessage());
continue;
}
}
Log.i(TAG,"RTSP server stopped !");
}
//Response
public void send(OutputStream output) throws IOException {
int seqid = -1;
try {
if (mRequest != null && mRequest.headers != null){
if (!TextUtils.isEmpty(mRequest.headers.get("cseq"))){
seqid = Integer.parseInt(mRequest.headers.get("cseq").replace(" ",""));
}
}
} catch (Exception e) {
Log.e(TAG,"Error parsing CSeq: "+(e.getMessage()!=null?e.getMessage():""));
}
String response = "RTSP/1.0 "+status+"\r\n" +
"Server: "+SERVER_NAME+"\r\n" +
(seqid>=0?("Cseq: " + seqid + "\r\n"):"") +
"Content-Length: " + content.length() + "\r\n" +
attributes +
"\r\n" +
content;
Log.d(TAG,response.replace("\r", ""));
output.write(response.getBytes());
}
}
具体过程请看libstreaming,自此解码推流过程结束
如何播放?
简单,vlc开源项目,有人已经帮你编译好了直接拿来用
步骤:
1.布局
<SurfaceView
android:id="@+id/player_surface"
android:layout_width="match_parent"
android:layout_height="match_parent" />
2.实例化和配置
private LibVLC libVLC = null;
private MediaPlayer mediaPlayer;
private SurfaceHolder mSurfaceHolder;
SurfaceView mPlayView;
//播放配置
ArrayList<String> options = new ArrayList<>();
libVLC = new LibVLC(getApplication(), options);
mediaPlayer = new MediaPlayer(libVLC);
mSurfaceHolder = mPlayView.getHolder();
mSurfaceHolder.setFixedSize(DisplayUtil.getDisplayW(this), DisplayUtil.getDisplayH(this));
mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
mediaPlayer.getVLCVout().setVideoSurface(mPlayView.getHolder().getSurface(), mSurfaceHolder);
mediaPlayer.getVLCVout().attachViews();
Media media = new Media(libVLC, Uri.parse(playUrl));
mediaPlayer.setMedia(media);
@Override
protected void onPause() {
super.onPause();
if (mediaPlayer != null) {
mediaPlayer.pause();
}
}
@Override
protected void onResume() {
super.onResume();
if (mediaPlayer != null) {
mediaPlayer.play();
}
}
@Override
protected void onDestroy() {
super.onDestroy();
if (mediaPlayer != null) {
mediaPlayer.release();
mediaPlayer = null;
}
}
3.播放
mediaPlayer.play();
体验Apk
Thanks
下一篇文章:
三步实现无纸化会议封装