【直播技术探讨第二章】
视频的采集,那到底怎么采集呢?小白努力中。。
采集是整个视频推流过程中的第一个环节,它从系统的采集设备中获取原始视频数据,将其输出到下一个环节。视频的采集涉及两方面数据的采集:音频采集和图像采集,它们分别对应两种完全不同的输入源和数据格式。
· 音频采集
音频数据既能与图像结合组合成视频数据,也能以纯音频的方式采集播放,后者在很多成熟的应用场景如在线电台和语音电台等起着非常重要的作用。音频的采集过程主要通过设备将环境中的模拟信号采集成 PCM编码的原始数据,然后编码压缩成 MP3等格式的数据分发出去。常见的音频压缩格式有:MP3,AAC,HE-AAC,Opus,FLAC,Vorbis (Ogg),Speex和 AMR等。
音频采集和编码主要面临的挑战在于:延时敏感、卡顿敏感、噪声消除(Denoise)、回声消除(AEC)、静音检测(VAD)和各种混音算法等。
· 图像采集
将图像采集的图片结果组合成一组连续播放的动画,即构成视频中可肉眼观看的内容。图像的采集过程主要由摄像头等设备拍摄成 YUV编码的原始数据,然后经过编码压缩成 H.264等格式的数据分发出去。常见的视频封装格式有:MP4、3GP、AVI、MKV、WMV、MPG、VOB、FLV、SWF、MOV、RMVB和 WebM 等。
图像由于其直观感受最强并且体积也比较大,构成了一个视频内容的主要部分。图像采集和编码面临的主要挑战在于:设备兼容性差、延时敏感、卡顿敏感以及各种对图像的处理操作如美颜和水印等。
视频采集的采集源主要有 摄像头采集、屏幕录制和从视频文件推流。
【此段文字简介来自:】
【具体请看这个链接:http://news.sina.com.cn/o/2016-08-25/doc-ifxvitex8951990.shtml】
图像采集
1、 摄像头采集
通过Android Camera拍摄预览中设置setPreviewCallback实现onPreviewFrame接口,实时截取每一帧视频流数据
代码实现如下:
packagecom.example.rtpalyerone;
.IOException;
importjava.util.List;
.Activity;
importandroid.content.Context;
.ActivityInfo;
importandroid.hardware.Camera;
importandroid.hardware.Camera.Parameters;
importandroid.hardware.Camera.Size;
importandroid.os.Bundle;
importandroid.os.PowerManager;
importandroid.os.PowerManager.WakeLock;
importandroid.util.Log;
importandroid.view.Menu;
importandroid.view.MenuItem;
importandroid.view.SurfaceHolder;
importandroid.view.SurfaceHolder.Callback;
importandroid.view.Surface;
importandroid.view.SurfaceView;
importandroid.view.View;
importandroid.view.Window;
importandroid.view.WindowManager;
public classMainActivity extends Activity {
private SurfaceView _surface;
private WakeLock wakeLock;
public int _iDegrees;
public boolean _bIsFront;
public Camera _mCamera;
private Integer _iCameraCodecType;
private final int FRAMERATE_DEF =20;
@Override
protected void onCreate(BundlesavedInstanceState) {
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_PORTRAIT);
PowerManager pm =(PowerManager) getSystemService(Context.POWER_SERVICE);
wakeLock =pm.newWakeLock(PowerManager.SCREEN_DIM_WAKE_LOCK, "my tag");
init();
}
private void init() {
// TODOAuto-generated method stub
_surface =(SurfaceView)findViewById(.surface);
//_surface.getHolder().setFixedSize(width,height)
_surface.getHolder().setKeepScreenOn(true);
_surface.getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
_surface.getHolder().addCallback(newSurceCallBack());
}
@Override
protected void onResume() {
// TODOAuto-generated method stub
super.onResume();
wakeLock.acquire();
}
@Override
protected void onPause() {
// TODOAuto-generated method stub
super.onPause();
wakeLock.release();
}
/**
* 获取旋转角度
* @return
*/
private int getDispalyRotation() {
int i =getWindowManager().getDefaultDisplay().getRotation();
switch (i) {
caseSurface.ROTATION_0:
return0;
caseSurface.ROTATION_90:
return90;
caseSurface.ROTATION_180:
return180;
caseSurface.ROTATION_270:
return270;
}
return 0;
}
/**
* 获取摄像头应该对应的角度
* @param degrees
* @param cameraId
* @return
*/
private intgetDisplayOritation(int degrees, int cameraId) {
Camera.CameraInfoinfo = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId,info);
int result = 0;
if (info.facing ==Camera.CameraInfo.CAMERA_FACING_FRONT) {
result= (info.orientation + degrees) % 360;
result= (360 - result) % 360;
} else {
result= (info.orientation - degrees + 360) % 360;
}
return result;
}
/**
* surfaceView 回调
* @author Administrator
*
*/
private class SurceCallBack implements SurfaceHolder.Callback{
@Override
public voidsurfaceCreated(SurfaceHolder holder) {
//TODO Auto-generated method stub
_iDegrees= getDisplayOritation(getDispalyRotation(), 0);
//华为i7前后共用摄像头
if(Camera.getNumberOfCameras() == 1) {
_bIsFront= false;
_mCamera= Camera.open(Camera.CameraInfo.CAMERA_FACING_BACK);
} else{
_mCamera= Camera.open(Camera.CameraInfo.CAMERA_FACING_FRONT);
}
InitCamera();
}
@Override
public voidsurfaceChanged(SurfaceHolder holder, int format, int width,
intheight) {
_mCamera.autoFocus(newCamera.AutoFocusCallback() {
@Override
publicvoid onAutoFocus(boolean success, Camera camera) {
if(success) {
InitCamera();
camera.cancelAutoFocus();//只有加上了这一句,才会自动对焦。
}
}
});
}
@Override
public voidsurfaceDestroyed(SurfaceHolder holder) {
//TODO Auto-generated method stub
}
}
/**
* 初始化相机设置
*/
private void InitCamera(){
Camera.Parameters p= _mCamera.getParameters();
Size prevewSize =p.getPreviewSize();
showlog("OriginalWidth:" + prevewSize.width + ", height:" + prevewSize.height);
List<Size>PreviewSizeList = p.getSupportedPreviewSizes();
List<Integer>PreviewFormats = p.getSupportedPreviewFormats();
showlog("Listingall supported preview sizes");
for (Camera.Sizesize : PreviewSizeList) {
showlog(" w: " + size.width + ", h: " +size.height);
}
showlog("Listingall supported preview formats");
Integer iNV21Flag =0;
Integer iYV12Flag =0;
for (IntegeryuvFormat : PreviewFormats) {
if(yuvFormat==android.graphics.ImageFormat.YV12){
iYV12Flag= android.graphics.ImageFormat.YV12;
}
if(yuvFormat == android.graphics.ImageFormat.NV21) {
iNV21Flag= android.graphics.ImageFormat.NV21;
}
}
if (iNV21Flag != 0){
_iCameraCodecType= iNV21Flag;
} else if (iYV12Flag!= 0) {
_iCameraCodecType= iYV12Flag;
}
p.setPreviewFormat(_iCameraCodecType);
p.setPreviewFrameRate(FRAMERATE_DEF);
_mCamera.setDisplayOrientation(_iDegrees);
p.setRotation(_iDegrees);
_mCamera.setPreviewCallback(_previewCallback);
_mCamera.setParameters(p);
try {
_mCamera.setPreviewDisplay(_surface.getHolder());
} catch (IOExceptione) {
//TODO Auto-generated catch block
e.printStackTrace();
}
_mCamera.cancelAutoFocus();//只有加上了这一句,才会自动对焦。
_mCamera.startPreview();
}
/**
* 相机采集到的数据信息
*/
private Camera.PreviewCallback_previewCallback = new Camera.PreviewCallback() {
@Override
public voidonPreviewFrame(byte[] YUV, Camera currentCamera) {
/*
* 此处为相机采集到的数据,后续,需要数据转码处理
*/
}
};
private void showlog(Stringlogmsg){
Log.e("log",logmsg);
}
}
注意还有一些权限设置别忘记了
<uses-permission android:name="android.permission.CAMERA"/>
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WAKE_LOCK" />
<uses-permission android:name="android.permission.READ_PHONE_STATE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
2、 屏幕录制
通过使用 MediaProjectionManager, VirtualDisplay, MediaCodec 以及 MediaMuxer 等API,故而这个项目仅支持Android 5.0。
原理
-
Display
可以“投影”到一个VirtualDisplay
- 通过
MediaProjectionManager
取得的MediaProjection
创建VirtualDisplay
-
VirtualDisplay
会将图像渲染到Surface
中,而这个Surface
是由MediaCodec
所创建的
代码如下:
/*
* Copyright (c) 2014 Yrom Wang <http://www.yrom.net>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.rtpalyerone.screen;
import .Activity;
import android.content.Intent;
import .projection.MediaProjection;
import .projection.MediaProjectionManager;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import java.io.File;
import com.example.rtpalyerone.R;
public class MainActivity extends Activity implements View.OnClickListener {
private static final int REQUEST_CODE = 1;
private MediaProjectionManager mMediaProjectionManager;
private ScreenRecorder mRecorder;
private Button mButton;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.screen_main);
mButton = (Button) findViewById(.button);
mButton.setOnClickListener(this);
//noinspection ResourceType
mMediaProjectionManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
MediaProjection mediaProjection = mMediaProjectionManager.getMediaProjection(resultCode, data);
if (mediaProjection == null) {
Log.e("@@", "media projection is null");
return;
}
// video size
final int width = 720;
final int height = 1280;
File file = new File(Environment.getExternalStorageDirectory(),
"record-" + width + "x" + height + "-" + System.currentTimeMillis() + ".mp4");
final int bitrate = 6000000;
mRecorder = new ScreenRecorder(width, height, bitrate, 1, mediaProjection, file.getAbsolutePath());
mRecorder.start();
mButton.setText("Stop Recorder");
Toast.makeText(this, "Screen recorder is running...", Toast.LENGTH_SHORT).show();
moveTaskToBack(true);
}
@Override
public void onClick(View v) {
if (mRecorder != null) {
mRecorder.quit();
mRecorder = null;
mButton.setText("Restart recorder");
} else {
Intent captureIntent = mMediaProjectionManager.createScreenCaptureIntent();
startActivityForResult(captureIntent, REQUEST_CODE);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
if(mRecorder != null){
mRecorder.quit();
mRecorder = null;
}
}
}
/*
* Copyright (c) 2014 Yrom Wang <http://www.yrom.net>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.rtpalyerone.screen;
import android.hardware.display.DisplayManager;
import android.hardware.display.VirtualDisplay;
import .MediaCodec;
import .MediaCodecInfo;
import .MediaFormat;
import .MediaMuxer;
import .projection.MediaProjection;
import android.util.Log;
import android.view.Surface;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicBoolean;
/**
* @author Yrom
*/
public class ScreenRecorder extends Thread {
private static final String TAG = "ScreenRecorder";
private int mWidth;
private int mHeight;
private int mBitRate;
private int mDpi;
private String mDstPath;
private MediaProjection mMediaProjection;
// parameters for the encoder
private static final String MIME_TYPE = "video/avc"; // H.264 Advanced Video Coding
private static final int FRAME_RATE = 30; // 30 fps
private static final int IFRAME_INTERVAL = 10; // 10 seconds between I-frames
private static final int TIMEOUT_US = 10000;
private MediaCodec mEncoder;
private Surface mSurface;
private MediaMuxer mMuxer;
private boolean mMuxerStarted = false;
private int mVideoTrackIndex = -1;
private AtomicBoolean mQuit = new AtomicBoolean(false);
private MediaCodec.BufferInfo mBufferInfo = new MediaCodec.BufferInfo();
private VirtualDisplay mVirtualDisplay;
public ScreenRecorder(int width, int height, int bitrate, int dpi, MediaProjection mp, String dstPath) {
super(TAG);
mWidth = width;
mHeight = height;
mBitRate = bitrate;
mDpi = dpi;
mMediaProjection = mp;
mDstPath = dstPath;
}
public ScreenRecorder(MediaProjection mp) {
// 480p 2Mbps
this(640, 480, 2000000, 1, mp, "/sdcard/test.mp4");
}
/**
* stop task
*/
public final void quit() {
mQuit.set(true);
}
@Override
public void run() {
try {
try {
prepareEncoder();
mMuxer = new MediaMuxer(mDstPath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
} catch (IOException e) {
throw new RuntimeException(e);
}
mVirtualDisplay = mMediaProjection.createVirtualDisplay(TAG + "-display",
mWidth, mHeight, mDpi, DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC,
mSurface, null, null);
Log.d(TAG, "created virtual display: " + mVirtualDisplay);
recordVirtualDisplay();
} finally {
release();
}
}
private void recordVirtualDisplay() {
while (!mQuit.get()) {
int index = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);
Log.i(TAG, "dequeue output buffer index=" + index);
if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
resetOutputFormat();
} else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "retrieving buffers time out!");
try {
// wait 10ms
Thread.sleep(10);
} catch (InterruptedException e) {
}
} else if (index >= 0) {
if (!mMuxerStarted) {
throw new IllegalStateException("MediaMuxer dose not call addTrack(format) ");
}
encodeToVideoTrack(index);
mEncoder.releaseOutputBuffer(index, false);
}
}
}
private void encodeToVideoTrack(int index) {
ByteBuffer encodedData = mEncoder.getOutputBuffer(index);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status.
// Ignore it.
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size == 0) {
Log.d(TAG, "info.size == 0, drop it.");
encodedData = null;
} else {
Log.d(TAG, "got buffer, info: size=" + mBufferInfo.size
+ ", presentationTimeUs=" + mBufferInfo.presentationTimeUs
+ ", offset=" + mBufferInfo.offset);
}
if (encodedData != null) {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mVideoTrackIndex, encodedData, mBufferInfo);
Log.i(TAG, "sent " + mBufferInfo.size + " bytes to muxer...");
}
}
private void resetOutputFormat() {
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new IllegalStateException("output format already changed!");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.i(TAG, "output format changed.\n new format: " + newFormat.toString());
mVideoTrackIndex = mMuxer.addTrack(newFormat);
mMuxer.start();
mMuxerStarted = true;
Log.i(TAG, "started media muxer, videoIndex=" + mVideoTrackIndex);
}
private void prepareEncoder() throws IOException {
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
Log.d(TAG, "created video format: " + format);
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mSurface = mEncoder.createInputSurface();
Log.d(TAG, "created input surface: " + mSurface);
mEncoder.start();
}
private void release() {
if (mEncoder != null) {
mEncoder.stop();
mEncoder.release();
mEncoder = null;
}
if (mVirtualDisplay != null) {
mVirtualDisplay.release();
}
if (mMediaProjection != null) {
mMediaProjection.stop();
}
if (mMuxer != null) {
mMuxer.stop();
mMuxer.release();
mMuxer = null;
}
}
}