我们从上一篇编译后构建获取的工程一点一点分析。首先,整个android的启动围绕这ConnectActivity展开。
从onCreate开始主要代码是获取了一些相应的设置内容与输入信息,如下图所示
其中的触发了拨打视频的动作,其实质是把收集到的信息传递给下一个界面类(CallActivity)
private final OnClickListener connectListener = new OnClickListener() {
@Override
public void onClick(View view) {
//点击拨打视频按钮触发
connectToRoom(roomEditText.getText().toString(), false, false, false, 0);
}
};
// 连接信令服务器,进入房间
@SuppressWarnings("StringSplitter")
private void connectToRoom(String roomId, boolean commandLineRun, boolean loopback,
boolean useValuesFromIntent, int runTimeMs) {
ConnectActivity.commandLineRun = commandLineRun;
// roomId is random for loopback. 对于环回,roomId是随机的。
if (loopback) {
roomId = Integer.toString((new Random()).nextInt(100000000));
}
String roomUrl = sharedPref.getString(
keyprefRoomServerUrl, getString(R.string.pref_room_server_url_default));
// Video call enabled flag. 开启视频通话标志。
boolean videoCallEnabled = sharedPrefGetBoolean(R.string.pref_videocall_key,
CallActivity.EXTRA_VIDEO_CALL, R.string.pref_videocall_default, useValuesFromIntent);
// Use screencapture option. 使用屏幕截图选项。
boolean useScreencapture = sharedPrefGetBoolean(R.string.pref_screencapture_key,
CallActivity.EXTRA_SCREENCAPTURE, R.string.pref_screencapture_default, useValuesFromIntent);
// Use Camera2 option. 使用Camera2选项。
boolean useCamera2 = sharedPrefGetBoolean(R.string.pref_camera2_key, CallActivity.EXTRA_CAMERA2,
R.string.pref_camera2_default, useValuesFromIntent);
// Get default codecs.
String videoCodec = sharedPrefGetString(R.string.pref_videocodec_key,
CallActivity.EXTRA_VIDEOCODEC, R.string.pref_videocodec_default, useValuesFromIntent);
String audioCodec = sharedPrefGetString(R.string.pref_audiocodec_key,
CallActivity.EXTRA_AUDIOCODEC, R.string.pref_audiocodec_default, useValuesFromIntent);
// Check HW codec flag.
boolean hwCodec = sharedPrefGetBoolean(R.string.pref_hwcodec_key,
CallActivity.EXTRA_HWCODEC_ENABLED, R.string.pref_hwcodec_default, useValuesFromIntent);
// Check Capture to texture.
boolean captureToTexture = sharedPrefGetBoolean(R.string.pref_capturetotexture_key,
CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, R.string.pref_capturetotexture_default,
useValuesFromIntent);
// Check FlexFEC.
boolean flexfecEnabled = sharedPrefGetBoolean(R.string.pref_flexfec_key,
CallActivity.EXTRA_FLEXFEC_ENABLED, R.string.pref_flexfec_default, useValuesFromIntent);
// Check Disable Audio Processing flag.
boolean noAudioProcessing = sharedPrefGetBoolean(R.string.pref_noaudioprocessing_key,
CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, R.string.pref_noaudioprocessing_default,
useValuesFromIntent);
boolean aecDump = sharedPrefGetBoolean(R.string.pref_aecdump_key,
CallActivity.EXTRA_AECDUMP_ENABLED, R.string.pref_aecdump_default, useValuesFromIntent);
boolean saveInputAudioToFile =
sharedPrefGetBoolean(R.string.pref_enable_save_input_audio_to_file_key,
CallActivity.EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED,
R.string.pref_enable_save_input_audio_to_file_default, useValuesFromIntent);
// Check OpenSL ES enabled flag.
boolean useOpenSLES = sharedPrefGetBoolean(R.string.pref_opensles_key,
CallActivity.EXTRA_OPENSLES_ENABLED, R.string.pref_opensles_default, useValuesFromIntent);
// Check Disable built-in AEC flag.
boolean disableBuiltInAEC = sharedPrefGetBoolean(R.string.pref_disable_built_in_aec_key,
CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, R.string.pref_disable_built_in_aec_default,
useValuesFromIntent);
// Check Disable built-in AGC flag.
boolean disableBuiltInAGC = sharedPrefGetBoolean(R.string.pref_disable_built_in_agc_key,
CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, R.string.pref_disable_built_in_agc_default,
useValuesFromIntent);
// Check Disable built-in NS flag.
boolean disableBuiltInNS = sharedPrefGetBoolean(R.string.pref_disable_built_in_ns_key,
CallActivity.EXTRA_DISABLE_BUILT_IN_NS, R.string.pref_disable_built_in_ns_default,
useValuesFromIntent);
// Check Disable gain control
boolean disableWebRtcAGCAndHPF = sharedPrefGetBoolean(
R.string.pref_disable_webrtc_agc_and_hpf_key, CallActivity.EXTRA_DISABLE_WEBRTC_AGC_AND_HPF,
R.string.pref_disable_webrtc_agc_and_hpf_key, useValuesFromIntent);
// Get video resolution from settings.
int videoWidth = 0;
int videoHeight = 0;
if (useValuesFromIntent) {
videoWidth = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_WIDTH, 0);
videoHeight = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_HEIGHT, 0);
}
if (videoWidth == 0 && videoHeight == 0) {
String resolution =
sharedPref.getString(keyprefResolution, getString(R.string.pref_resolution_default));
String[] dimensions = resolution.split("[ x]+");
if (dimensions.length == 2) {
try {
videoWidth = Integer.parseInt(dimensions[0]);
videoHeight = Integer.parseInt(dimensions[1]);
} catch (NumberFormatException e) {
videoWidth = 0;
videoHeight = 0;
Log.e(TAG, "Wrong video resolution setting: " + resolution);
}
}
}
// Get camera fps from settings.
int cameraFps = 0;
if (useValuesFromIntent) {
cameraFps = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_FPS, 0);
}
if (cameraFps == 0) {
String fps = sharedPref.getString(keyprefFps, getString(R.string.pref_fps_default));
String[] fpsValues = fps.split("[ x]+");
if (fpsValues.length == 2) {
try {
cameraFps = Integer.parseInt(fpsValues[0]);
} catch (NumberFormatException e) {
cameraFps = 0;
Log.e(TAG, "Wrong camera fps setting: " + fps);
}
}
}
// Check capture quality slider flag.
boolean captureQualitySlider = sharedPrefGetBoolean(R.string.pref_capturequalityslider_key,
CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED,
R.string.pref_capturequalityslider_default, useValuesFromIntent);
// Get video and audio start bitrate.
int videoStartBitrate = 0;
if (useValuesFromIntent) {
videoStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_VIDEO_BITRATE, 0);
}
if (videoStartBitrate == 0) {
String bitrateTypeDefault = getString(R.string.pref_maxvideobitrate_default);
String bitrateType = sharedPref.getString(keyprefVideoBitrateType, bitrateTypeDefault);
if (!bitrateType.equals(bitrateTypeDefault)) {
String bitrateValue = sharedPref.getString(
keyprefVideoBitrateValue, getString(R.string.pref_maxvideobitratevalue_default));
videoStartBitrate = Integer.parseInt(bitrateValue);
}
}
int audioStartBitrate = 0;
if (useValuesFromIntent) {
audioStartBitrate = getIntent().getIntExtra(CallActivity.EXTRA_AUDIO_BITRATE, 0);
}
if (audioStartBitrate == 0) {
String bitrateTypeDefault = getString(R.string.pref_startaudiobitrate_default);
String bitrateType = sharedPref.getString(keyprefAudioBitrateType, bitrateTypeDefault);
if (!bitrateType.equals(bitrateTypeDefault)) {
String bitrateValue = sharedPref.getString(
keyprefAudioBitrateValue, getString(R.string.pref_startaudiobitratevalue_default));
audioStartBitrate = Integer.parseInt(bitrateValue);
}
}
// Check statistics display option. 检查统计数据显示选项。
boolean displayHud = sharedPrefGetBoolean(R.string.pref_displayhud_key,
CallActivity.EXTRA_DISPLAY_HUD, R.string.pref_displayhud_default, useValuesFromIntent);
boolean tracing = sharedPrefGetBoolean(R.string.pref_tracing_key, CallActivity.EXTRA_TRACING,
R.string.pref_tracing_default, useValuesFromIntent);
// Check Enable RtcEventLog. 选中“启用RtcEventLog”。
boolean rtcEventLogEnabled = sharedPrefGetBoolean(R.string.pref_enable_rtceventlog_key,
CallActivity.EXTRA_ENABLE_RTCEVENTLOG, R.string.pref_enable_rtceventlog_default,
useValuesFromIntent);
// Get datachannel options 获取数据通道选项
boolean dataChannelEnabled = sharedPrefGetBoolean(R.string.pref_enable_datachannel_key,
CallActivity.EXTRA_DATA_CHANNEL_ENABLED, R.string.pref_enable_datachannel_default,
useValuesFromIntent);
boolean ordered = sharedPrefGetBoolean(R.string.pref_ordered_key, CallActivity.EXTRA_ORDERED,
R.string.pref_ordered_default, useValuesFromIntent);
boolean negotiated = sharedPrefGetBoolean(R.string.pref_negotiated_key,
CallActivity.EXTRA_NEGOTIATED, R.string.pref_negotiated_default, useValuesFromIntent);
int maxRetrMs = sharedPrefGetInteger(R.string.pref_max_retransmit_time_ms_key,
CallActivity.EXTRA_MAX_RETRANSMITS_MS, R.string.pref_max_retransmit_time_ms_default,
useValuesFromIntent);
int maxRetr =
sharedPrefGetInteger(R.string.pref_max_retransmits_key, CallActivity.EXTRA_MAX_RETRANSMITS,
R.string.pref_max_retransmits_default, useValuesFromIntent);
int id = sharedPrefGetInteger(R.string.pref_data_id_key, CallActivity.EXTRA_ID,
R.string.pref_data_id_default, useValuesFromIntent);
String protocol = sharedPrefGetString(R.string.pref_data_protocol_key,
CallActivity.EXTRA_PROTOCOL, R.string.pref_data_protocol_default, useValuesFromIntent);
// Start AppRTCMobile activity. 启动AppRTCMobile活动。
Log.d(TAG, "Connecting to room " + roomId + " at URL " + roomUrl);
if (validateUrl(roomUrl)) {
Uri uri = Uri.parse(roomUrl);
Intent intent = new Intent(this, CallActivity.class);
intent.setData(uri);
intent.putExtra(CallActivity.EXTRA_ROOMID, roomId);
intent.putExtra(CallActivity.EXTRA_LOOPBACK, loopback);
intent.putExtra(CallActivity.EXTRA_VIDEO_CALL, videoCallEnabled);
intent.putExtra(CallActivity.EXTRA_SCREENCAPTURE, useScreencapture);
intent.putExtra(CallActivity.EXTRA_CAMERA2, useCamera2);
intent.putExtra(CallActivity.EXTRA_VIDEO_WIDTH, videoWidth);
intent.putExtra(CallActivity.EXTRA_VIDEO_HEIGHT, videoHeight);
intent.putExtra(CallActivity.EXTRA_VIDEO_FPS, cameraFps);
intent.putExtra(CallActivity.EXTRA_VIDEO_CAPTUREQUALITYSLIDER_ENABLED, captureQualitySlider);
intent.putExtra(CallActivity.EXTRA_VIDEO_BITRATE, videoStartBitrate);
intent.putExtra(CallActivity.EXTRA_VIDEOCODEC, videoCodec);
intent.putExtra(CallActivity.EXTRA_HWCODEC_ENABLED, hwCodec);
intent.putExtra(CallActivity.EXTRA_CAPTURETOTEXTURE_ENABLED, captureToTexture);
intent.putExtra(CallActivity.EXTRA_FLEXFEC_ENABLED, flexfecEnabled);
intent.putExtra(CallActivity.EXTRA_NOAUDIOPROCESSING_ENABLED, noAudioProcessing);
intent.putExtra(CallActivity.EXTRA_AECDUMP_ENABLED, aecDump);
intent.putExtra(CallActivity.EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED, saveInputAudioToFile);
intent.putExtra(CallActivity.EXTRA_OPENSLES_ENABLED, useOpenSLES);
intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AEC, disableBuiltInAEC);
intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_AGC, disableBuiltInAGC);
intent.putExtra(CallActivity.EXTRA_DISABLE_BUILT_IN_NS, disableBuiltInNS);
intent.putExtra(CallActivity.EXTRA_DISABLE_WEBRTC_AGC_AND_HPF, disableWebRtcAGCAndHPF);
intent.putExtra(CallActivity.EXTRA_AUDIO_BITRATE, audioStartBitrate);
intent.putExtra(CallActivity.EXTRA_AUDIOCODEC, audioCodec);
intent.putExtra(CallActivity.EXTRA_DISPLAY_HUD, displayHud);
intent.putExtra(CallActivity.EXTRA_TRACING, tracing);
intent.putExtra(CallActivity.EXTRA_ENABLE_RTCEVENTLOG, rtcEventLogEnabled);
intent.putExtra(CallActivity.EXTRA_CMDLINE, commandLineRun);
intent.putExtra(CallActivity.EXTRA_RUNTIME, runTimeMs);
intent.putExtra(CallActivity.EXTRA_DATA_CHANNEL_ENABLED, dataChannelEnabled);
if (dataChannelEnabled) {
intent.putExtra(CallActivity.EXTRA_ORDERED, ordered);
intent.putExtra(CallActivity.EXTRA_MAX_RETRANSMITS_MS, maxRetrMs);
intent.putExtra(CallActivity.EXTRA_MAX_RETRANSMITS, maxRetr);
intent.putExtra(CallActivity.EXTRA_PROTOCOL, protocol);
intent.putExtra(CallActivity.EXTRA_NEGOTIATED, negotiated);
intent.putExtra(CallActivity.EXTRA_ID, id);
}
if (useValuesFromIntent) {
if (getIntent().hasExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA)) {
String videoFileAsCamera =
getIntent().getStringExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA);
intent.putExtra(CallActivity.EXTRA_VIDEO_FILE_AS_CAMERA, videoFileAsCamera);
}
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE)) {
String saveRemoteVideoToFile =
getIntent().getStringExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE, saveRemoteVideoToFile);
}
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH)) {
int videoOutWidth =
getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, videoOutWidth);
}
if (getIntent().hasExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT)) {
int videoOutHeight =
getIntent().getIntExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
intent.putExtra(CallActivity.EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, videoOutHeight);
}
}
startActivityForResult(intent, CONNECTION_REQUEST);
}
}
下面进入正题CallActivity
在进入CallActivity后,会进入到Oncreate中,其中我们按照程序启动流程来梳理一些重要的部分。
1.设置Android系统相关内容
// 设置窗口样式为全屏窗口大小。需要提前完成添加内容。进行屏幕常亮设置等
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(LayoutParams.FLAG_FULLSCREEN | LayoutParams.FLAG_KEEP_SCREEN_ON
| LayoutParams.FLAG_SHOW_WHEN_LOCKED | LayoutParams.FLAG_TURN_SCREEN_ON);
getWindow().getDecorView().setSystemUiVisibility(getSystemUiVisibility());
setContentView(R.layout.activity_call);
2.创建了UI相关,其中包括了近端和远端的渲染窗口组件
// Create UI controls. 创建UI控件。
pipRenderer = findViewById(R.id.pip_video_view);//webrtc中org.webrtc.SurfaceViewRenderer提供的
fullscreenRenderer = findViewById(R.id.fullscreen_video_view);//全屏渲染器,webrtc中org.webrtc.SurfaceViewRenderer提供的
3.向消费者列表中加入消费者,具体如下
创建Video消费者
private final List<VideoSink> remoteSinks = new ArrayList<>();//消费者列表
private final ProxyVideoSink remoteProxyRenderer = new ProxyVideoSink();
private final ProxyVideoSink localProxyVideoSink = new ProxyVideoSink();
remoteSinks.add(remoteProxyRenderer);//VideoSinkInterface 添加远端渲染sink
4.创建OpenES环境,创建视频渲染器
//创建opengl环境
final EglBase eglBase = EglBase.create();
// Create video renderers. 创建视频渲染器。
pipRenderer.init(eglBase.getEglBaseContext(), null);
pipRenderer.setScalingType(ScalingType.SCALE_ASPECT_FIT);
5.小细节(如何巧妙的使用资源),以下内容是将远程的视频保存的本地,恰如其分的将保存到本地的操作作为消费者,加入到消费者列表中
//获取保存远端视频的文件路径
String saveRemoteVideoToFile = intent.getStringExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE);
// When saveRemoteVideoToFile is set we save the video from the remote to a file.
// 当设置saveRemoteVideoToFile时,我们将视频从远程保存到一个文件。
if (saveRemoteVideoToFile != null) {
int videoOutWidth = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_WIDTH, 0);
int videoOutHeight = intent.getIntExtra(EXTRA_SAVE_REMOTE_VIDEO_TO_FILE_HEIGHT, 0);
try {
videoFileRenderer = new VideoFileRenderer(
saveRemoteVideoToFile, videoOutWidth, videoOutHeight, eglBase.getEglBaseContext());
remoteSinks.add(videoFileRenderer);//VideoSinkInterface ,以本地存储文件的形式作为消费者,添加本地文件保存sink
} catch (IOException e) {
throw new RuntimeException(
"Failed to open video file for output: " + saveRemoteVideoToFile, e);
}
}
6.DateChannel的结构化创建
DataChannelParameters dataChannelParameters = null;//构建DataChannel参数结构化
if (intent.getBooleanExtra(EXTRA_DATA_CHANNEL_ENABLED, false)) {
dataChannelParameters = new DataChannelParameters(intent.getBooleanExtra(EXTRA_ORDERED, true),
intent.getIntExtra(EXTRA_MAX_RETRANSMITS_MS, -1),
intent.getIntExtra(EXTRA_MAX_RETRANSMITS, -1), intent.getStringExtra(EXTRA_PROTOCOL),
intent.getBooleanExtra(EXTRA_NEGOTIATED, false), intent.getIntExtra(EXTRA_ID, -1));
}
7.PeerConnectionParameters的参数化结构构建
//peerConnection参数设置
peerConnectionParameters =
new PeerConnectionParameters(intent.getBooleanExtra(EXTRA_VIDEO_CALL, true), loopback,
tracing, videoWidth, videoHeight, intent.getIntExtra(EXTRA_VIDEO_FPS, 0),
intent.getIntExtra(EXTRA_VIDEO_BITRATE, 0), intent.getStringExtra(EXTRA_VIDEOCODEC),
intent.getBooleanExtra(EXTRA_HWCODEC_ENABLED, true),
intent.getBooleanExtra(EXTRA_FLEXFEC_ENABLED, false),
intent.getIntExtra(EXTRA_AUDIO_BITRATE, 0), intent.getStringExtra(EXTRA_AUDIOCODEC),
intent.getBooleanExtra(EXTRA_NOAUDIOPROCESSING_ENABLED, false),
intent.getBooleanExtra(EXTRA_AECDUMP_ENABLED, false),
intent.getBooleanExtra(EXTRA_SAVE_INPUT_AUDIO_TO_FILE_ENABLED, false),
intent.getBooleanExtra(EXTRA_OPENSLES_ENABLED, false),
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AEC, false),
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_AGC, false),
intent.getBooleanExtra(EXTRA_DISABLE_BUILT_IN_NS, false),
intent.getBooleanExtra(EXTRA_DISABLE_WEBRTC_AGC_AND_HPF, false),
intent.getBooleanExtra(EXTRA_ENABLE_RTCEVENTLOG, false), dataChannelParameters);
8.创建信令连接客户端,创建连接参数,
// Create connection client. Use DirectRTCClient if room name is an IP otherwise use the
// standard WebSocketRTCClient.
// 创建连接客户端。如果房间名是IP,则使用directrtclient,否则使用
// websocketrtclient
if (loopback || !DirectRTCClient.IP_PATTERN.matcher(roomId).matches()) {
appRtcClient = new WebSocketRTCClient(this);
} else {
Log.i(TAG, "Using DirectRTCClient because room name looks like an IP.");
appRtcClient = new DirectRTCClient(this);
}
// Create connection parameters. 创建连接参数。
String urlParameters = intent.getStringExtra(EXTRA_URLPARAMETERS);
roomConnectionParameters =
new RoomConnectionParameters(roomUri.toString(), roomId, loopback, urlParameters);
9.创建peerConnectionClient对外对象,该动作比较重要,其构造函数引发了一系列后续反应
peerConnectionClient = new PeerConnectionClient(
getApplicationContext(), eglBase, peerConnectionParameters, CallActivity.this);
/**
* Create a PeerConnectionClient with the specified parameters. PeerConnectionClient takes
* ownership of |eglBase|.
*/
public PeerConnectionClient(Context appContext, EglBase eglBase,
PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events) {
this.rootEglBase = eglBase; //保存渲染EGL
this.appContext = appContext;//当前appContext环境
this.events = events; //回调绑定
this.peerConnectionParameters = peerConnectionParameters;//参数配置绑定
this.dataChannelEnabled = peerConnectionParameters.dataChannelParameters != null;
//首选视频编码器
Log.d(TAG, "Preferred video codec: " + getSdpVideoCodecName(peerConnectionParameters));
final String fieldTrials = getFieldTrials(peerConnectionParameters);
executor.execute(() -> {
Log.d(TAG, "Initialize WebRTC. Field trials: " + fieldTrials);
//初始化
PeerConnectionFactory.initialize(
PeerConnectionFactory.InitializationOptions.builder(appContext)
.setFieldTrials(fieldTrials)
.setEnableInternalTracer(true)
.createInitializationOptions());
});
}
10.具体分析PeerConnectionFactory.initialize()的接口
//初始化
PeerConnectionFactory.initialize(
PeerConnectionFactory.InitializationOptions.builder(appContext)
.setFieldTrials(fieldTrials)
.setEnableInternalTracer(true)
.createInitializationOptions());//内部触发PeerConnectionFactory.InitializationOptions
//加载并初始化webrtc,创建对象之前至少要调用一次
public static void initialize(InitializationOptions options) {
ContextUtils.initialize(options.applicationContext);//1
NativeLibrary.initialize(options.nativeLibraryLoader, options.nativeLibraryName);//2
nativeInitializeAndroidGlobals();//3
nativeInitializeFieldTrials(options.fieldTrials);
if (options.enableInternalTracer && !internalTracerInitialized) {
initializeInternalTracer();//4
}
if (options.loggable != null) {
Logging.injectLoggable(options.loggable, options.loggableSeverity);
nativeInjectLoggable(new JNILogging(options.loggable), options.loggableSeverity.ordinal());
} else {
Logging.d(TAG,
"PeerConnectionFactory was initialized without an injected Loggable. "
+ "Any existing Loggable will be deleted.");
Logging.deleteInjectedLoggable();
nativeDeleteLoggable();
}
}
11.开始连接StartCall,向后端进行连接,并且返回信令结果
private void startCall() {
if (appRtcClient == null) {
Log.e(TAG, "AppRTC client is not allocated for a call.");
return;
}
callStartedTimeMs = System.currentTimeMillis();
// Start room connection. 开始房间连接。结果会回调到本Activity的onConnectedToRoomInternal
logAndToast(getString(R.string.connecting_to, roomConnectionParameters.roomUrl));
appRtcClient.connectToRoom(roomConnectionParameters);
// Create and audio manager that will take care of audio routing,
// audio modes, audio device enumeration etc.
//创建一个音频管理器来处理音频路由
//音频模式,音频设备枚举等
audioManager = AppRTCAudioManager.create(getApplicationContext());
// Store existing audio settings and change audio mode to
// MODE_IN_COMMUNICATION for best possible VoIP performance.
// 存储现有音频设置,并将音频模式更改为
// MODE_IN_COMMUNICATION获得最好的VoIP性能。
Log.d(TAG, "Starting the audio manager...");
audioManager.start(new AudioManagerEvents() {
// This method will be called each time the number of available audio
// devices has changed.
//此方法将调用每次可用音频的数量
//设备已经改变。
@Override
public void onAudioDeviceChanged(
AudioDevice audioDevice, Set<AudioDevice> availableAudioDevices) {
onAudioManagerDevicesChanged(audioDevice, availableAudioDevices);
}
});
}
12.onConnectedToRoom信令参数回调
// -----Implementation of AppRTCClient.AppRTCSignalingEvents ---------------
// All callbacks are invoked from websocket signaling looper thread and
// are routed to UI thread.
//从ConnecttoRoom到收到信令服务器的消息的回调
private void onConnectedToRoomInternal(final SignalingParameters params) {
final long delta = System.currentTimeMillis() - callStartedTimeMs;
//房间内容赋值
signalingParameters = params;
logAndToast("Creating peer connection, delay=" + delta + "ms");
VideoCapturer videoCapturer = null;
if (peerConnectionParameters.videoCallEnabled) {
videoCapturer = createVideoCapturer();//创建视频采集
}
peerConnectionClient.createPeerConnection(//真正创建PeerConnection的地方
localProxyVideoSink, remoteSinks, videoCapturer, signalingParameters);
//如果是发起者就创建offer
if (signalingParameters.initiator) {
logAndToast("Creating OFFER...");
// Create offer. Offer SDP will be sent to answering client in
// PeerConnectionEvents.onLocalDescription event.
peerConnectionClient.createOffer();//创建协商SDP
} else {//如果是被呼叫者就创建answer
if (params.offerSdp != null) {
peerConnectionClient.setRemoteDescription(params.offerSdp);
logAndToast("Creating ANSWER...");
// Create answer. Answer SDP will be sent to offering client in
// PeerConnectionEvents.onLocalDescription event.
peerConnectionClient.createAnswer();
}
if (params.iceCandidates != null) {
// Add remote ICE candidates from room.
for (IceCandidate iceCandidate : params.iceCandidates) {
peerConnectionClient.addRemoteIceCandidate(iceCandidate);
}
}
}
}