Xmpp作为WebRTC信令的音视频通话02-创建PeerCon
2018-12-19 本文已影响0人
张俊峰0613
创建PeerConnection的操作都封装在PeerConnectionClient中,而且创建PeerConnection的操作必须是单线程的,而PeerConnection是由PeerConnectionFactory完成的。
1、创建PeerConnectionFactory
我们前面说过PeerConnectionFactory一个用来生成PeerConnection的工厂类,同时负责初始化全局和底层交互。
/**
* 创建PeerConnectionFactory的方法
* @param context
* @param localRender 本地视频
* @param renderEGLContext
* @param peerConnectionParameters
*/
public void createPeerConnectionFactory(
final Context context,
final VideoRenderer.Callbacks localRender,
final EglBase.Context renderEGLContext,
final PeerConnectionParameters peerConnectionParameters) {
this.peerConnectionParameters = peerConnectionParameters;
this.localRender = localRender;
videoCallEnabled = peerConnectionParameters.videoCallEnabled;
if(localRender==null)
videoCallEnabled = false;
// Reset variables to initial states.
factory = null;
preferIsac = false;
videoCapturerStopped = false;
isError = false;
mediaStream = null;
videoCapturer = null;
renderVideo = true;
localVideoTrack = null;
enableAudio = true;
localAudioTrack = null;
this.videoWidth = peerConnectionParameters.videoWidth;
this.videoHeight = peerConnectionParameters.videoHeight;
this.videoFps = peerConnectionParameters.videoFps;
statsTimer = new Timer();
//用于保存对端的IceCandidate
queuedRemoteCandidates = new ConcurrentHashMap<String,LinkedList<IceCandidate>>();
if(isRTCClosed) return;
executor.execute(new Runnable() {
@Override
public void run() {
if(isRTCClosed) return;
try {
//创建媒体约束,创建PeerConnection时用
createMediaConstraintsInternal();
createPeerConnectionFactoryInternal(context, renderEGLContext);
}
catch (Exception e){
reportError("Failed to create peer connection: " + e.getMessage());
return;
}
}
});
}
主要是根据PeerConnectionParameters中的参数创建媒体约束
/**
* 创建媒体约束
*/
private void createMediaConstraintsInternal() {
// Create peer connection constraints.
pcConstraints = new MediaConstraints();
//是否允许呼叫自己
if (peerConnectionParameters.loopback) {
pcConstraints.optional.add(
new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "false"));
} else {
pcConstraints.optional.add(
new MediaConstraints.KeyValuePair(DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT, "true"));
}
pcConstraints.optional.add(new MediaConstraints.KeyValuePair(
"RtpDataChannels", "true"));
if (videoCallEnabled) {
videoWidth = peerConnectionParameters.videoWidth;
videoHeight = peerConnectionParameters.videoHeight;
videoFps = peerConnectionParameters.videoFps;
if (videoWidth == 0 || videoHeight == 0) {
videoWidth = HD_VIDEO_WIDTH;
videoHeight = HD_VIDEO_HEIGHT;
}
if (videoFps == 0) {
videoFps = 30;
}
Logging.d(TAG, "Capturing format: " + videoWidth + "x" + videoHeight + "@" + videoFps);
}
audioConstraints = new MediaConstraints();
if (peerConnectionParameters.noAudioProcessing) {
Log.d(TAG, "Disabling audio processing");
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false"));
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false"));
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false"));
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false"));
}
// Create SDP constraints.
sdpMediaConstraints = new MediaConstraints();
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveAudio", "true"));
if (videoCallEnabled || peerConnectionParameters.loopback) {
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveVideo", "true"));
} else {
sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(
"OfferToReceiveVideo", "false"));
}
}
创建PeerConnectionFactory实例和MediaStream实例
/**
* createPeerConnectionFactory
* @param context
* @param renderEGLContext
*/
private void createPeerConnectionFactoryInternal(Context context, final EglBase.Context renderEGLContext) {
PeerConnectionFactory.initializeInternalTracer();
if (peerConnectionParameters.tracing) {
PeerConnectionFactory.startInternalTracingCapture(
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator
+ "webrtc-trace.txt");
}
Log.d(TAG, "Create peer connection factory. Use video: " +
peerConnectionParameters.videoCallEnabled);
isError = false;
String fieldTrials = "";
if (peerConnectionParameters.videoFlexfecEnabled) {
fieldTrials += VIDEO_FLEXFEC_FIELDTRIAL;
Log.d(TAG, "Enable FlexFEC field trial.");
}
fieldTrials += VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL;
preferredVideoCodec = VIDEO_CODEC_VP8;
if (videoCallEnabled && peerConnectionParameters.videoCodec != null) {
switch (peerConnectionParameters.videoCodec) {
case VIDEO_CODEC_VP8:
preferredVideoCodec = VIDEO_CODEC_VP8;
break;
case VIDEO_CODEC_VP9:
preferredVideoCodec = VIDEO_CODEC_VP9;
break;
case VIDEO_CODEC_H264_BASELINE:
preferredVideoCodec = VIDEO_CODEC_H264;
break;
case VIDEO_CODEC_H264_HIGH:
// TODO(magjed): Strip High from SDP when selecting Baseline instead of using field trial.
fieldTrials += VIDEO_H264_HIGH_PROFILE_FIELDTRIAL;
preferredVideoCodec = VIDEO_CODEC_H264;
break;
default:
preferredVideoCodec = VIDEO_CODEC_VP8;
}
}
// Initialize field trials.
Log.d(TAG, "Preferred video codec: " + preferredVideoCodec);
PeerConnectionFactory.initializeFieldTrials(fieldTrials);
preferIsac = false;
if (peerConnectionParameters.audioCodec != null
&& peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC)) {
preferIsac = true;
}
if (!peerConnectionParameters.useOpenSLES) {
Log.d(TAG, "Disable OpenSL ES audio even if device supports it");
WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(true /* enable */);
} else {
Log.d(TAG, "Allow OpenSL ES audio if device supports it");
WebRtcAudioManager.setBlacklistDeviceForOpenSLESUsage(false);
}
if (peerConnectionParameters.disableBuiltInAEC) {
Log.d(TAG, "Disable built-in AEC even if device supports it");
WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(true);
} else {
Log.d(TAG, "Enable built-in AEC if device supports it");
WebRtcAudioUtils.setWebRtcBasedAcousticEchoCanceler(false);
}
if (peerConnectionParameters.disableBuiltInAGC) {
Log.d(TAG, "Disable built-in AGC even if device supports it");
WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(true);
} else {
Log.d(TAG, "Enable built-in AGC if device supports it");
WebRtcAudioUtils.setWebRtcBasedAutomaticGainControl(false);
}
if (peerConnectionParameters.disableBuiltInNS) {
Log.d(TAG, "Disable built-in NS even if device supports it");
WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(true);
} else {
Log.d(TAG, "Enable built-in NS if device supports it");
WebRtcAudioUtils.setWebRtcBasedNoiseSuppressor(false);
}
WebRtcAudioRecord.setErrorCallback(new WebRtcAudioRecord.WebRtcAudioRecordErrorCallback() {
@Override
public void onWebRtcAudioRecordInitError(String errorMessage) {
Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage);
reportError(errorMessage);
}
@Override
public void onWebRtcAudioRecordStartError(
WebRtcAudioRecord.AudioRecordStartErrorCode errorCode, String errorMessage) {
Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage);
reportError(errorMessage);
}
@Override
public void onWebRtcAudioRecordError(String errorMessage) {
Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage);
reportError(errorMessage);
}
});
WebRtcAudioTrack.setErrorCallback(new WebRtcAudioTrack.WebRtcAudioTrackErrorCallback() {
@Override
public void onWebRtcAudioTrackInitError(String errorMessage) {
reportError(errorMessage);
}
@Override
public void onWebRtcAudioTrackStartError(String errorMessage) {
reportError(errorMessage);
}
@Override
public void onWebRtcAudioTrackError(String errorMessage) {
reportError(errorMessage);
}
});
PeerConnectionFactory.initializeAndroidGlobals(context, peerConnectionParameters.videoCodecHwAcceleration);
if (options != null) {
Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask);
}
//创建PeerConnectionFactory
factory = new PeerConnectionFactory(options);
Log.d(TAG, "Peer connection factory created.");
//创建本地媒体流
mediaStream = factory.createLocalMediaStream("ARDAMS");
if (videoCallEnabled) {
//创建videoCapturer
videoCapturer = createVideoCapturer();
if (videoCapturer == null) {
Log.e(TAG,"Failed to open camera");
}
else {
//创建视频轨道并添加到媒体流中
mediaStream.addTrack(createVideoTrack(videoCapturer));
}
}
//创建音频轨道并添加到媒体流中
//如果videoCallEnabled == true的话,此时的mediaStream中有两条轨道
mediaStream.addTrack(createAudioTrack());
if (videoCallEnabled) {
Log.d(TAG, "EGLContext: " + renderEGLContext);
//视频硬件加速时需要用到renderEGLContext
factory.setVideoHwAccelerationOptions(renderEGLContext, renderEGLContext);
}
}
2、创建PeerConnection
在前面的步骤中已经创建了PeerConnectionFactory的实例,
/**
*
* @param peerName 对方
* @param remoteRender 远端的视频流
* @param iceServers IceServer
* @param isCallout 呼叫还是接听
* @param iceTransportsType
* @param events 创建PeerConnection的回调
*/
public void createPeerConnection(
final String peerName,
final VideoRenderer.Callbacks remoteRender,
final List<PeerConnection.IceServer> iceServers,
final boolean isCallout,
final PeerConnection.IceTransportsType iceTransportsType,
final PeerConnectionEvents events) {
Log.e(TAG,"createPeerConnection...");
this.peerName = peerName;
this.isCallout = isCallout;
//this.executor = Executors.newSingleThreadScheduledExecutor();
this.remoteRender = remoteRender;
isError = false;
this.events = events;
if (peerConnectionParameters == null) {
Log.e(TAG, "Creating peer connection without initializing factory.");
return;
}
executor.execute(new Runnable() {
@Override
public void run() {
createPeerConnectionInternal(mediaStream, iceServers, iceTransportsType);
peerConnections.put(peerName,peerConnection);
}
});
}
创建PeerConnection并添加本地媒体流,创建PeerConnection需要3个参数:RTCConfiguration rtcConfig, MediaConstraints constraints, Observer observer。
constraints在创建factory实例时已经创建完成,rtcConfig在方法中完成,observer需要实现。
/**
*
* @param localmediaStream 本地视频流
* @param iceServers IceServer
* @param iceTransportsType 转发还是P2P
*/
private void createPeerConnectionInternal(MediaStream localmediaStream, List<PeerConnection.IceServer> iceServers, PeerConnection.IceTransportsType iceTransportsType) {
PeerConnection.RTCConfiguration rtcConfig =
new PeerConnection.RTCConfiguration(iceServers);
rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED;
if(PreferenceUtil.getInstance().getString("BundlePolicy","0").equals("0")){
rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.BALANCED;
}else if(PreferenceUtil.getInstance().getString("BundlePolicy","0").equals("1")) {
rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE;
}else if(PreferenceUtil.getInstance().getString("BundlePolicy","0").equals("2")){
rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXCOMPAT;
}
if(PreferenceUtil.getInstance().getString("RtcpMuxPolicy","0").equals("0")){
rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.NEGOTIATE;
}else if(PreferenceUtil.getInstance().getString("RtcpMuxPolicy","0").equals("1")){
rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE;
}
rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY;
rtcConfig.iceTransportsType = iceTransportsType;
rtcConfig.keyType = PeerConnection.KeyType.ECDSA;
Log.d(TAG, "createPeerConnection begin .");
//创建peerConnection实例
peerConnection = factory.createPeerConnection(rtcConfig, pcConstraints, pcObserver);
Log.d(TAG, "createPeerConnection finish .");
if (dataChannelEnabled) {
DataChannel.Init init = new DataChannel.Init();
init.ordered = true;
init.negotiated = false;
init.maxRetransmits = -1;
init.maxRetransmitTimeMs = -1;
init.id = -1;
init.protocol = "";
dataChannel = peerConnection.createDataChannel("ApprtcDemo data", init);
}
Logging.enableTracing("logcat:", EnumSet.of(Logging.TraceLevel.TRACE_DEFAULT));
Logging.enableLogToDebugOutput(Logging.Severity.LS_ERROR);
mediaStream = localmediaStream;
peerConnection.addStream(mediaStream);//将本地视频流添加到peerConnection
Log.d(TAG, "Peer connection created.");
}
3、实现PeerConnection.Observer
作为PeerConnectionClient的内部类,创建PeerConnection过程中的回调
private class PCObserver implements PeerConnection.Observer{
@Override
public void onSignalingChange(PeerConnection.SignalingState signalingState) {
Log.d(TAG,"SignalingState: " + signalingState);
}
@Override
public void onIceConnectionChange(final PeerConnection.IceConnectionState iceConnectionState) {
executor.execute(new Runnable() {
@Override
public void run() {
Log.d(TAG,"IceConnectionState: " + iceConnectionState);
if (iceConnectionState == PeerConnection.IceConnectionState.CONNECTED) {
events.onIceConnected();
} else if (iceConnectionState == PeerConnection.IceConnectionState.DISCONNECTED) {
events.onIceDisconnected();
} else if (iceConnectionState == PeerConnection.IceConnectionState.FAILED) {
}
}
});
}
@Override
public void onIceConnectionReceivingChange(final boolean b) {
Log.d(TAG,"IceConnectionReceiving changed to " + b);
}
@Override
public void onIceGatheringChange(final PeerConnection.IceGatheringState iceGatheringState) {
Log.d(TAG,"IceGatheringState: " + iceGatheringState);
}
@Override
public void onIceCandidate(final IceCandidate iceCandidate) {
executor.execute(new Runnable() {
@Override
public void run() {
Log.d(TAG,"onIceCandidate: ");
events.onIceCandidate(iceCandidate);
}
});
}
@Override
public void onIceCandidatesRemoved(final IceCandidate[] iceCandidates) {
executor.execute(new Runnable() {
@Override
public void run() {
Log.d(TAG,"onIceCandidatesRemoved: ");
events.onIceCandidatesRemoved(iceCandidates);
}
});
}
@Override
public void onAddStream(final MediaStream mediaStream) {
executor.execute(new Runnable() {
@Override
public void run() {
Log.d(TAG,"onAddStream " + peerConnection);
if (peerConnection == null || isError) {
return;
}
if (mediaStream.audioTracks.size() > 1 || mediaStream.videoTracks.size() > 1) {
Log.d(TAG,"Weird-looking stream: " + mediaStream);
return;
}
if(mediaStream.audioTracks.size() == 1){
remoteAudioTrack = mediaStream.audioTracks.get(0);
}
if (mediaStream.videoTracks.size() == 1) {
remoteVideoTrack = mediaStream.videoTracks.get(0);
remoteVideoTrack.setEnabled(true);
remoteVideoTrack.addRenderer(new VideoRenderer(remoteRender));
}
}
});
}
@Override
public void onRemoveStream(final MediaStream mediaStream) {
executor.execute(new Runnable() {
@Override
public void run() {
Log.d(TAG,"onRemoveStream");
remoteAudioTrack = null;
remoteVideoTrack = null;
}
});
}
@Override
public void onDataChannel(final DataChannel dataChannel) {
Log.d(TAG,"dc rev!");
}
@Override
public void onRenegotiationNeeded() {
Log.d(TAG,"onRenegotiationNeeded!");
}
@Override
public void onAddTrack(final RtpReceiver rtpReceiver, final MediaStream[] mediaStreams) {
Log.d(TAG,"onAddTrack!");
}
}
4、总结
至此已经创建了PeerConnection对象,将音视频数据封装成MediaStream添加到PeerConnection中。创建PeerConnection对象之后就可以调用PeerConnection的几个方法了,但是还需要包装一下,以后再说!