MediaStreamソースコード分析(1)

Licodeサービスと起動プロセスの分析

MediaStreamソースコード分析(1)

MediaStreamソースコード分析(2)

MediaStreamソースコード分析(3)

WebRtcConnectionソースコード分析(1)

WebRtcConnectionソースコード分析(2)

WebRtcConnectionソースコード分析(3)

MediaStreamの分析:

// MediaStream构造函数
MediaStream::MediaStream(std::shared_ptr<Worker> worker,
  std::shared_ptr<WebRtcConnection> connection,
  const std::string& media_stream_id,
  const std::string& media_stream_label,
  bool is_publisher) :
    audio_enabled_{false}, video_enabled_{false},
    media_stream_event_listener_{nullptr},
    connection_{std::move(connection)},
    stream_id_{media_stream_id},
    mslabel_ {media_stream_label},
    bundle_{false},
    pipeline_{Pipeline::create()},
    worker_{std::move(worker)},
    audio_muted_{false}, video_muted_{false},
    pipeline_initialized_{false},
    is_publisher_{is_publisher},
    simulcast_{false},
    bitrate_from_max_quality_layer_{0},
    video_bitrate_{0},
    random_generator_{random_device_()} {
  if (is_publisher) {
    setVideoSinkSSRC(kDefaultVideoSinkSSRC);
    setAudioSinkSSRC(kDefaultAudioSinkSSRC);
  } else {
    setAudioSinkSSRC(1000000000 + getRandomValue(0, 999999999));
    setVideoSinkSSRC(1000000000 + getRandomValue(0, 999999999));
  }
  ELOG_INFO("%s message: constructor, id: %s",
      toLog(), media_stream_id.c_str());
  source_fb_sink_ = this;
  sink_fb_source_ = this;
  stats_ = std::make_shared<Stats>();
  log_stats_ = std::make_shared<Stats>();
  quality_manager_ = std::make_shared<QualityManager>();
  packet_buffer_ = std::make_shared<PacketBufferService>();

  rtcp_processor_ = std::make_shared<RtcpForwarder>(static_cast<MediaSink*>(this), static_cast<MediaSource*>(this));

  should_send_feedback_ = true;
  slide_show_mode_ = false;

  mark_ = clock::now();

  rate_control_ = 0;
  sending_ = true;
  ready_ = false;
}

// MediaStream析构函数
MediaStream::~MediaStream() {
  ELOG_DEBUG("%s message:Destructor called", toLog());
  ELOG_DEBUG("%s message: Destructor ended", toLog());
}

// 获取随机值
uint32_t MediaStream::getRandomValue(uint32_t min, uint32_t max) {
  std::uniform_int_distribution<> distr(min, max);
  return std::round(distr(random_generator_));
}

// 得到最大视频编码码率
uint32_t MediaStream::getMaxVideoBW() {
  uint32_t bitrate = rtcp_processor_ ? rtcp_processor_->getMaxVideoBW() : 0;
  return bitrate;
}

// 设置最大视频编码码率
void MediaStream::setMaxVideoBW(uint32_t max_video_bw) {
  asyncTask([max_video_bw] (std::shared_ptr<MediaStream> stream) {
    if (stream->rtcp_processor_) {
      stream->rtcp_processor_->setMaxVideoBW(max_video_bw * 1000);
      if (stream->pipeline_) {
        stream->pipeline_->notifyUpdate();
      }
    }
  });
}

// 同步关闭MediaStream,停止发送,vide_sink_和audio_sink_置空
void MediaStream::syncClose() {
  ELOG_DEBUG("%s message:Close called", toLog());
  if (!sending_) {
    return;
  }
  sending_ = false;
  ready_ = false;
  video_sink_ = nullptr;
  audio_sink_ = nullptr;
  fb_sink_ = nullptr;
  pipeline_initialized_ = false;
  pipeline_->close();
  pipeline_.reset();
  connection_.reset();
  ELOG_DEBUG("%s message: Close ended", toLog());
}

// 异步关闭MediaStream
boost::future<void> MediaStream::close() {
  ELOG_DEBUG("%s message: Async close called", toLog());
  std::shared_ptr<MediaStream> shared_this = shared_from_this();
  return asyncTask([shared_this] (std::shared_ptr<MediaStream> stream) {
    shared_this->syncClose();
  });
}

// 初始化MediaStream, 设置ready_
bool MediaStream::init(bool doNotWaitForRemoteSdp) {
  if (doNotWaitForRemoteSdp) {
    ready_ = true;
  }
  return true;
}

// 判断是否是source SSRC标识符
bool MediaStream::isSourceSSRC(uint32_t ssrc) {
  return isVideoSourceSSRC(ssrc) || isAudioSourceSSRC(ssrc);
}

// 判断是否是sink SSRC标识符
bool MediaStream::isSinkSSRC(uint32_t ssrc) {
  return isVideoSinkSSRC(ssrc) || isAudioSinkSSRC(ssrc);
}

// 设置远端sdp信息,向rtcp_processor中加入ssrc信息,初始化pipeline和stats,并通知上层已经ready了
bool MediaStream::setRemoteSdp(std::shared_ptr<SdpInfo> sdp) {
  ELOG_DEBUG("%s message: setting remote SDP to Stream, sending: %d, initialized: %d",
    toLog(), sending_, pipeline_initialized_);
  if (!sending_) {
    return true;
  }

  std::shared_ptr<SdpInfo> remote_sdp = std::make_shared<SdpInfo>(*sdp.get());
  auto video_ssrc_list_it = remote_sdp->video_ssrc_map.find(getLabel());
  auto audio_ssrc_it = remote_sdp->audio_ssrc_map.find(getLabel());

  if (isPublisher() && !ready_) {
    bool stream_found = false;

    if (video_ssrc_list_it != remote_sdp->video_ssrc_map.end() ||
        audio_ssrc_it != remote_sdp->audio_ssrc_map.end()) {
      stream_found = true;
    }

    if (!stream_found) {
      return true;
    }
  }

  remote_sdp_ = remote_sdp;

  if (remote_sdp_->videoBandwidth != 0) {
    ELOG_DEBUG("%s message: Setting remote BW, maxVideoBW: %u", toLog(), remote_sdp_->videoBandwidth);
    this->rtcp_processor_->setMaxVideoBW(remote_sdp_->videoBandwidth*1000);
  }

  ready_ = true;

  if (pipeline_initialized_ && pipeline_) {
    pipeline_->notifyUpdate();
    return true;
  }

  bundle_ = remote_sdp_->isBundle;
  if (video_ssrc_list_it != remote_sdp_->video_ssrc_map.end()) {
    setVideoSourceSSRCList(video_ssrc_list_it->second);
  }

  if (audio_ssrc_it != remote_sdp_->audio_ssrc_map.end()) {
    setAudioSourceSSRC(audio_ssrc_it->second);
  }

  if (getVideoSourceSSRCList().empty() ||
      (getVideoSourceSSRCList().size() == 1 && getVideoSourceSSRCList()[0] == 0)) {
    std::vector<uint32_t> default_ssrc_list;
    default_ssrc_list.push_back(kDefaultVideoSinkSSRC);
    setVideoSourceSSRCList(default_ssrc_list);
  }

  if (getAudioSourceSSRC() == 0) {
    setAudioSourceSSRC(kDefaultAudioSinkSSRC);
  }

  audio_enabled_ = remote_sdp_->hasAudio;
  video_enabled_ = remote_sdp_->hasVideo;

  rtcp_processor_->addSourceSsrc(getAudioSourceSSRC());
  std::for_each(video_source_ssrc_list_.begin(), video_source_ssrc_list_.end(), [this] (uint32_t new_ssrc){
      rtcp_processor_->addSourceSsrc(new_ssrc);
  });

  initializePipeline();

  initializeStats();

  notifyMediaStreamEvent("ready", "");

  return true;
}

 

おすすめ

転載: blog.csdn.net/tong5956/article/details/108261583