WebRtcConnection源码分析(一)

Licode服务与启动过程分析

WebRtcConnection源码分析(一)

WebRtcConnection源码分析(二)

WebRtcConnection源码分析(三)

MediaStream源码分析(一)

MediaStream源码分析(二)

MediaStream源码分析(三)

licode的核心WebRtcConnection模块,起到拉流的作用


DEFINE_LOGGER(WebRtcConnection, "WebRtcConnection");

// webRtcConnection的构造函数
// 参数worker,io_worker,conection_id, ice_config, rtp_mappings, ext_mappings,listener
WebRtcConnection::WebRtcConnection(std::shared_ptr<Worker> worker, std::shared_ptr<IOWorker> io_worker,
    const std::string& connection_id, const IceConfig& ice_config, const std::vector<RtpMap> rtp_mappings,
    const std::vector<erizo::ExtMap> ext_mappings, WebRtcConnectionEventListener* listener) :
    connection_id_{connection_id},
    audio_enabled_{false}, video_enabled_{false}, bundle_{false}, conn_event_listener_{listener},
    ice_config_{ice_config}, rtp_mappings_{rtp_mappings}, extension_processor_{ext_mappings},
    worker_{worker}, io_worker_{io_worker},
    remote_sdp_{std::make_shared<SdpInfo>(rtp_mappings)}, local_sdp_{std::make_shared<SdpInfo>(rtp_mappings)},
    audio_muted_{false}, video_muted_{false}, first_remote_sdp_processed_{false}
    {
  ELOG_INFO("%s message: constructor, stunserver: %s, stunPort: %d, minPort: %d, maxPort: %d",
      toLog(), ice_config.stun_server.c_str(), ice_config.stun_port, ice_config.min_port, ice_config.max_port);
  stats_ = std::make_shared<Stats>();
  distributor_ = std::unique_ptr<BandwidthDistributionAlgorithm>(new TargetVideoBWDistributor());
  global_state_ = CONN_INITIAL;

  trickle_enabled_ = ice_config_.should_trickle;
  slide_show_mode_ = false;

  sending_ = true;
}

// 析构函数
WebRtcConnection::~WebRtcConnection() {
  ELOG_DEBUG("%s message:Destructor called", toLog());
  ELOG_DEBUG("%s message: Destructor ended", toLog());
}

// 关闭WebRtcConnection, 主要包括 video_transport, audio_transport,conn_event_listener
void WebRtcConnection::syncClose() {
  ELOG_DEBUG("%s message: Close called", toLog());
  if (!sending_) {
    return;
  }
  sending_ = false;
  media_streams_.clear();
  if (video_transport_.get()) {
    video_transport_->close();
  }
  if (audio_transport_.get()) {
    audio_transport_->close();
  }
  global_state_ = CONN_FINISHED;
  if (conn_event_listener_ != nullptr) {
    conn_event_listener_ = nullptr;
  }

  ELOG_DEBUG("%s message: Close ended", toLog());
}

// 关闭WebRtcConnection,异步调用 synClose
void WebRtcConnection::close() {
  ELOG_DEBUG("%s message: Async close called", toLog());
  std::shared_ptr<WebRtcConnection> shared_this = shared_from_this();
  asyncTask([shared_this] (std::shared_ptr<WebRtcConnection> connection) {
    shared_this->syncClose();
  });
}

// 初始化WebRtcConnection,仅仅告诉上层 当前是初始化状态
bool WebRtcConnection::init() {
  maybeNotifyWebRtcConnectionEvent(global_state_, "");
  return true;
}

// 异步调用 createOfferSync 生成offer
boost::future<void> WebRtcConnection::createOffer(bool video_enabled, bool audio_enabled, bool bundle) {
  return asyncTask([video_enabled, audio_enabled, bundle] (std::shared_ptr<WebRtcConnection> connection) {
    connection->createOfferSync(video_enabled, audio_enabled, bundle);
  });
}

// 创建本地sdp,设置local_sdp参数,通过new DtlsTransport创建video_transport_和audio_transport_,并start开启运行
bool WebRtcConnection::createOfferSync(bool video_enabled, bool audio_enabled, bool bundle) {
  boost::mutex::scoped_lock lock(update_state_mutex_);
  bundle_ = bundle;
  video_enabled_ = video_enabled;
  audio_enabled_ = audio_enabled;
  local_sdp_->createOfferSdp(video_enabled_, audio_enabled_, bundle_);

  local_sdp_->dtlsRole = ACTPASS;
  if (local_sdp_->internal_dtls_role == ACTPASS) {
    local_sdp_->internal_dtls_role = PASSIVE;
  }

  ELOG_DEBUG("%s message: Creating sdp offer, isBundle: %d, setup: %d",
    toLog(), bundle_, local_sdp_->internal_dtls_role);

  forEachMediaStream([this] (const std::shared_ptr<MediaStream> &media_stream) {
    if (!media_stream->isReady() || media_stream->isPublisher()) {
      ELOG_DEBUG("%s message: getting local SDPInfo stream not running, stream_id: %s", toLog(), media_stream->getId());
      return;
    }
    if (video_enabled_) {
      std::vector<uint32_t> video_ssrc_list = std::vector<uint32_t>();
      if (media_stream->getVideoSinkSSRC() != kDefaultVideoSinkSSRC && media_stream->getVideoSinkSSRC() != 0) {
        video_ssrc_list.push_back(media_stream->getVideoSinkSSRC());
      }
      ELOG_DEBUG("%s message: getting local SDPInfo, stream_id: %s, audio_ssrc: %u",
                 toLog(), media_stream->getId(), media_stream->getAudioSinkSSRC());
      if (!video_ssrc_list.empty()) {
        local_sdp_->video_ssrc_map[media_stream->getLabel()] = video_ssrc_list;
      }
    }
    if (audio_enabled_) {
      if (media_stream->getAudioSinkSSRC() != kDefaultAudioSinkSSRC && media_stream->getAudioSinkSSRC() != 0) {
        local_sdp_->audio_ssrc_map[media_stream->getLabel()] = media_stream->getAudioSinkSSRC();
      }
    }
  });

  auto listener = std::dynamic_pointer_cast<TransportListener>(shared_from_this());

  if (bundle_) {
    if (video_transport_.get() == nullptr && (video_enabled_ || audio_enabled_)) {
      video_transport_.reset(new DtlsTransport(VIDEO_TYPE, "video", connection_id_, bundle_, true,
                                              listener, ice_config_ , "", "", true, worker_, io_worker_));
      video_transport_->copyLogContextFrom(*this);
      video_transport_->start();
    }
  } else {
    if (video_transport_.get() == nullptr && video_enabled_) {
      // For now we don't re/check transports, if they are already created we leave them there
      video_transport_.reset(new DtlsTransport(VIDEO_TYPE, "video", connection_id_, bundle_, true,
                                              listener, ice_config_ , "", "", true, worker_, io_worker_));
      video_transport_->copyLogContextFrom(*this);
      video_transport_->start();
    }
    if (audio_transport_.get() == nullptr && audio_enabled_) {
      audio_transport_.reset(new DtlsTransport(AUDIO_TYPE, "audio", connection_id_, bundle_, true,
                                              listener, ice_config_, "", "", true, worker_, io_worker_));
      audio_transport_->copyLogContextFrom(*this);
      audio_transport_->start();
    }
  }

  std::string msg = this->getLocalSdp();
  maybeNotifyWebRtcConnectionEvent(global_state_, msg);

  return true;
}

// 异步向media_streams加入新的MediaStream流
boost::future<void> WebRtcConnection::addMediaStream(std::shared_ptr<MediaStream> media_stream) {
  return asyncTask([media_stream] (std::shared_ptr<WebRtcConnection> connection) {
    ELOG_DEBUG("%s message: Adding mediaStream, id: %s", connection->toLog(), media_stream->getId().c_str());
    connection->media_streams_.push_back(media_stream);
  });
}

// 异步移除media_streams_指定stream_id的MediaStream流
boost::future<void> WebRtcConnection::removeMediaStream(const std::string& stream_id) {
  return asyncTask([stream_id] (std::shared_ptr<WebRtcConnection> connection) {
    boost::mutex::scoped_lock lock(connection->update_state_mutex_);
    ELOG_DEBUG("%s message: removing mediaStream, id: %s", connection->toLog(), stream_id.c_str());
    connection->media_streams_.erase(std::remove_if(connection->media_streams_.begin(),
                                                    connection->media_streams_.end(),
      [stream_id, connection](const std::shared_ptr<MediaStream> &stream) {
        bool isStream = stream->getId() == stream_id;
        if (isStream) {
          auto video_it = connection->local_sdp_->video_ssrc_map.find(stream->getLabel());
          if (video_it != connection->local_sdp_->video_ssrc_map.end()) {
            connection->local_sdp_->video_ssrc_map.erase(video_it);
          }
          auto audio_it = connection->local_sdp_->audio_ssrc_map.find(stream->getLabel());
          if (audio_it != connection->local_sdp_->audio_ssrc_map.end()) {
            connection->local_sdp_->audio_ssrc_map.erase(audio_it);
          }
        }
        return isStream;
      }));
    });
}

// 遍历media_streams_执行 func函数
void WebRtcConnection::forEachMediaStream(std::function<void(const std::shared_ptr<MediaStream>&)> func) {
  std::for_each(media_streams_.begin(), media_streams_.end(), func);
}

// 异步media_streams_向futures加入函数操作
boost::future<void> WebRtcConnection::forEachMediaStreamAsync(
    std::function<void(const std::shared_ptr<MediaStream>&)> func) {
  auto futures = std::make_shared<std::vector<boost::future<void>>>();
  std::for_each(media_streams_.begin(), media_streams_.end(),
    [func, futures] (const std::shared_ptr<MediaStream> &stream) {
      futures->push_back(stream->asyncTask([func] (const std::shared_ptr<MediaStream> &stream) {
        func(stream);
      }));
  });

  auto future_when = boost::when_all(futures->begin(), futures->end());
  return future_when.then([](decltype(future_when)) {
    });
}

//异步遍历media_streams_执行 func函数
void WebRtcConnection::forEachMediaStreamAsyncNoPromise(
    std::function<void(const std::shared_ptr<MediaStream>&)> func) {
  std::for_each(media_streams_.begin(), media_streams_.end(),
    [func] (const std::shared_ptr<MediaStream> &stream) {
      stream->asyncTask([func] (const std::shared_ptr<MediaStream> &stream) {
        func(stream);
      });
  });
}

上述对每个函数都进行了相应注解。

猜你喜欢

转载自blog.csdn.net/tong5956/article/details/108240644