MediaStreamソースコード分析(2)

Licodeサービスと起動プロセスの分析

MediaStreamソースコード分析(1)

MediaStreamソースコード分析(2)

MediaStreamソースコード分析(3)

WebRtcConnectionソースコード分析(1)

WebRtcConnectionソースコード分析(2)

WebRtcConnectionソースコード分析(3)

MediaStreamの分析を続けます。

// 初始化log_stats,统计状态信息
void MediaStream::initializeStats() {
  log_stats_->getNode().insertStat("streamId", StringStat{getId()});
  log_stats_->getNode().insertStat("audioBitrate", CumulativeStat{0});
  log_stats_->getNode().insertStat("audioFL", CumulativeStat{0});
  log_stats_->getNode().insertStat("audioPL", CumulativeStat{0});
  log_stats_->getNode().insertStat("audioJitter", CumulativeStat{0});
  log_stats_->getNode().insertStat("audioMuted", CumulativeStat{0});
  log_stats_->getNode().insertStat("audioNack", CumulativeStat{0});
  log_stats_->getNode().insertStat("audioRemb", CumulativeStat{0});

  log_stats_->getNode().insertStat("videoBitrate", CumulativeStat{0});
  log_stats_->getNode().insertStat("videoFL", CumulativeStat{0});
  log_stats_->getNode().insertStat("videoPL", CumulativeStat{0});
  log_stats_->getNode().insertStat("videoJitter", CumulativeStat{0});
  log_stats_->getNode().insertStat("videoMuted", CumulativeStat{0});
  log_stats_->getNode().insertStat("slideshow", CumulativeStat{0});
  log_stats_->getNode().insertStat("videoNack", CumulativeStat{0});
  log_stats_->getNode().insertStat("videoPli", CumulativeStat{0});
  log_stats_->getNode().insertStat("videoFir", CumulativeStat{0});
  log_stats_->getNode().insertStat("videoRemb", CumulativeStat{0});
  log_stats_->getNode().insertStat("videoErizoRemb", CumulativeStat{0});
  log_stats_->getNode().insertStat("videoKeyFrames", CumulativeStat{0});

  log_stats_->getNode().insertStat("SL0TL0", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL0TL1", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL0TL2", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL0TL3", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL1TL0", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL1TL1", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL1TL2", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL1TL3", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL2TL0", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL2TL1", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL2TL2", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL2TL3", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL3TL0", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL3TL1", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL3TL2", CumulativeStat{0});
  log_stats_->getNode().insertStat("SL3TL3", CumulativeStat{0});

  log_stats_->getNode().insertStat("maxActiveSL", CumulativeStat{0});
  log_stats_->getNode().insertStat("maxActiveTL", CumulativeStat{0});
  log_stats_->getNode().insertStat("selectedSL", CumulativeStat{0});
  log_stats_->getNode().insertStat("selectedTL", CumulativeStat{0});
  log_stats_->getNode().insertStat("isPublisher", CumulativeStat{is_publisher_});

  log_stats_->getNode().insertStat("totalBitrate", CumulativeStat{0});
  log_stats_->getNode().insertStat("rtxBitrate", CumulativeStat{0});
  log_stats_->getNode().insertStat("paddingBitrate", CumulativeStat{0});
  log_stats_->getNode().insertStat("bwe", CumulativeStat{0});

  log_stats_->getNode().insertStat("maxVideoBW", CumulativeStat{0});
  log_stats_->getNode().insertStat("qualityCappedByConstraints", CumulativeStat{0});

  std::weak_ptr<MediaStream> weak_this = shared_from_this();
  worker_->scheduleEvery([weak_this] () {
    if (auto stream = weak_this.lock()) {
      if (stream->sending_) {
        stream->printStats();
        return true;
      }
    }
    return false;
  }, kStreamStatsPeriod);
}

// 传输layer统计信息
void MediaStream::transferLayerStats(std::string spatial, std::string temporal) {
  std::string node = "SL" + spatial + "TL" + temporal;
  if (stats_->getNode().hasChild("qualityLayers") &&
      stats_->getNode()["qualityLayers"].hasChild(spatial) &&
      stats_->getNode()["qualityLayers"][spatial].hasChild(temporal)) {
    log_stats_->getNode()
      .insertStat(node, CumulativeStat{stats_->getNode()["qualityLayers"][spatial][temporal].value()});
  }
}

// 传输媒体统计信息
void MediaStream::transferMediaStats(std::string target_node, std::string source_parent, std::string source_node) {
  if (stats_->getNode().hasChild(source_parent) &&
      stats_->getNode()[source_parent].hasChild(source_node)) {
    log_stats_->getNode()
      .insertStat(target_node, CumulativeStat{stats_->getNode()[source_parent][source_node].value()});
  }
}

// 输出所有统计信息
void MediaStream::printStats() {
  std::string video_ssrc;
  std::string audio_ssrc;

  log_stats_->getNode().insertStat("audioEnabled", CumulativeStat{audio_enabled_});
  log_stats_->getNode().insertStat("videoEnabled", CumulativeStat{video_enabled_});

  log_stats_->getNode().insertStat("maxVideoBW", CumulativeStat{getMaxVideoBW()});

  transferMediaStats("qualityCappedByConstraints", "qualityLayers", "qualityCappedByConstraints");

  if (audio_enabled_) {
    audio_ssrc = std::to_string(is_publisher_ ? getAudioSourceSSRC() : getAudioSinkSSRC());
    transferMediaStats("audioBitrate", audio_ssrc, "bitrateCalculated");
    transferMediaStats("audioPL",      audio_ssrc, "packetsLost");
    transferMediaStats("audioFL",      audio_ssrc, "fractionLost");
    transferMediaStats("audioJitter",  audio_ssrc, "jitter");
    transferMediaStats("audioMuted",   audio_ssrc, "erizoAudioMute");
    transferMediaStats("audioNack",    audio_ssrc, "NACK");
    transferMediaStats("audioRemb",    audio_ssrc, "bandwidth");
  }
  if (video_enabled_) {
    video_ssrc = std::to_string(is_publisher_ ? getVideoSourceSSRC() : getVideoSinkSSRC());
    transferMediaStats("videoBitrate", video_ssrc, "bitrateCalculated");
    transferMediaStats("videoPL",      video_ssrc, "packetsLost");
    transferMediaStats("videoFL",      video_ssrc, "fractionLost");
    transferMediaStats("videoJitter",  video_ssrc, "jitter");
    transferMediaStats("videoMuted",   audio_ssrc, "erizoVideoMute");
    transferMediaStats("slideshow",    video_ssrc, "erizoSlideShow");
    transferMediaStats("videoNack",    video_ssrc, "NACK");
    transferMediaStats("videoPli",     video_ssrc, "PLI");
    transferMediaStats("videoFir",     video_ssrc, "FIR");
    transferMediaStats("videoRemb",    video_ssrc, "bandwidth");
    transferMediaStats("videoErizoRemb", video_ssrc, "erizoBandwidth");
    transferMediaStats("videoKeyFrames", video_ssrc, "keyFrames");
  }

  for (uint32_t spatial = 0; spatial <= 3; spatial++) {
    for (uint32_t temporal = 0; temporal <= 3; temporal++) {
      transferLayerStats(std::to_string(spatial), std::to_string(temporal));
    }
  }

  transferMediaStats("maxActiveSL", "qualityLayers", "maxActiveSpatialLayer");
  transferMediaStats("maxActiveTL", "qualityLayers", "maxActiveTemporalLayer");
  transferMediaStats("selectedSL", "qualityLayers", "selectedSpatialLayer");
  transferMediaStats("selectedTL", "qualityLayers", "selectedTemporalLayer");
  transferMediaStats("totalBitrate", "total", "bitrateCalculated");
  transferMediaStats("paddingBitrate", "total", "paddingBitrate");
  transferMediaStats("rtxBitrate", "total", "rtxBitrate");
  transferMediaStats("bwe", "total", "senderBitrateEstimation");

  ELOG_INFOT(statsLogger, "%s", log_stats_->getStats());
}

// 初始化pipeline管道,加入service(share_from_this()、handler_manager_、rtcp_processor_、stats_、quality_manager_和packet_buffer_)
// 加入Front,包含各类 handler
void MediaStream::initializePipeline() {
  if (pipeline_initialized_) {
    return;
  }
  handler_manager_ = std::make_shared<HandlerManager>(shared_from_this());
  pipeline_->addService(shared_from_this());
  pipeline_->addService(handler_manager_);
  pipeline_->addService(rtcp_processor_);
  pipeline_->addService(stats_);
  pipeline_->addService(quality_manager_);
  pipeline_->addService(packet_buffer_);

  pipeline_->addFront(std::make_shared<PacketReader>(this));

  pipeline_->addFront(std::make_shared<RtcpProcessorHandler>());
  pipeline_->addFront(std::make_shared<FecReceiverHandler>());
  pipeline_->addFront(std::make_shared<LayerBitrateCalculationHandler>());
  pipeline_->addFront(std::make_shared<QualityFilterHandler>());
  pipeline_->addFront(std::make_shared<IncomingStatsHandler>());
  pipeline_->addFront(std::make_shared<FakeKeyframeGeneratorHandler>());
  pipeline_->addFront(std::make_shared<RtpTrackMuteHandler>());
  pipeline_->addFront(std::make_shared<RtpSlideShowHandler>());
  pipeline_->addFront(std::make_shared<RtpPaddingGeneratorHandler>());
  pipeline_->addFront(std::make_shared<PliPacerHandler>());
  pipeline_->addFront(std::make_shared<BandwidthEstimationHandler>());
  pipeline_->addFront(std::make_shared<RtpPaddingRemovalHandler>());
  pipeline_->addFront(std::make_shared<RtcpFeedbackGenerationHandler>());
  pipeline_->addFront(std::make_shared<RtpRetransmissionHandler>());
  pipeline_->addFront(std::make_shared<SRPacketHandler>());
  pipeline_->addFront(std::make_shared<SenderBandwidthEstimationHandler>());
  pipeline_->addFront(std::make_shared<LayerDetectorHandler>());
  pipeline_->addFront(std::make_shared<OutgoingStatsHandler>());
  pipeline_->addFront(std::make_shared<PacketCodecParser>());

  pipeline_->addFront(std::make_shared<PacketWriter>(this));
  pipeline_->finalize();
  pipeline_initialized_ = true;
}

// 发送音频数据
int MediaStream::deliverAudioData_(std::shared_ptr<DataPacket> audio_packet) {
  if (audio_enabled_) {
    sendPacketAsync(std::make_shared<DataPacket>(*audio_packet));
  }
  return audio_packet->length;
}

// 发送视频数据
int MediaStream::deliverVideoData_(std::shared_ptr<DataPacket> video_packet) {
  if (video_enabled_) {
    sendPacketAsync(std::make_shared<DataPacket>(*video_packet));
  }
  return video_packet->length;
}

// 发送反馈包
int MediaStream::deliverFeedback_(std::shared_ptr<DataPacket> fb_packet) {
  RtcpHeader *chead = reinterpret_cast<RtcpHeader*>(fb_packet->data);
  uint32_t recvSSRC = chead->getSourceSSRC();
  if (chead->isREMB()) {
    for (uint8_t index = 0; index < chead->getREMBNumSSRC(); index++) {
      uint32_t ssrc = chead->getREMBFeedSSRC(index);
      if (isVideoSourceSSRC(ssrc)) {
        recvSSRC = ssrc;
        break;
      }
    }
  }
  if (isVideoSourceSSRC(recvSSRC)) {
    fb_packet->type = VIDEO_PACKET;
    sendPacketAsync(fb_packet);
  } else if (isAudioSourceSSRC(recvSSRC)) {
    fb_packet->type = AUDIO_PACKET;
    sendPacketAsync(fb_packet);
  } else {
    ELOG_DEBUG("%s deliverFeedback unknownSSRC: %u, localVideoSSRC: %u, localAudioSSRC: %u",
                toLog(), recvSSRC, this->getVideoSourceSSRC(), this->getAudioSourceSSRC());
  }
  return fb_packet->length;
}

// 发送事件通知
int MediaStream::deliverEvent_(MediaEventPtr event) {
  auto stream_ptr = shared_from_this();
  worker_->task([stream_ptr, event]{
    if (!stream_ptr->pipeline_initialized_) {
      return;
    }

    if (stream_ptr->pipeline_) {
      stream_ptr->pipeline_->notifyEvent(event);
    }
  });
  return 1;
}

 

おすすめ

転載: blog.csdn.net/tong5956/article/details/108262548