Análisis de código fuente de MediaStream (3)

Servicio Licode y análisis del proceso de inicio.

Análisis de código fuente de MediaStream (1)

Análisis del código fuente de MediaStream (dos)

Análisis de código fuente de MediaStream (3)

Análisis de código fuente de WebRtcConnection (1)

Análisis del código fuente de WebRtcConnection (dos)

Análisis de código fuente de WebRtcConnection (3)

MediaStream es un poco difícil de entender:

// pipeline_读取传进来的packet
void MediaStream::onTransportData(std::shared_ptr<DataPacket> incoming_packet, Transport *transport) {
  if ((audio_sink_ == nullptr && video_sink_ == nullptr && fb_sink_ == nullptr)) {
    return;
  }

  std::shared_ptr<DataPacket> packet = std::make_shared<DataPacket>(*incoming_packet);

  if (transport->mediaType == AUDIO_TYPE) {
    packet->type = AUDIO_PACKET;
  } else if (transport->mediaType == VIDEO_TYPE) {
    packet->type = VIDEO_PACKET;
  }
  auto stream_ptr = shared_from_this();

  worker_->task([stream_ptr, packet]{
    if (!stream_ptr->pipeline_initialized_) {
      ELOG_DEBUG("%s message: Pipeline not initialized yet.", stream_ptr->toLog());
      return;
    }

    char* buf = packet->data;
    RtpHeader *head = reinterpret_cast<RtpHeader*> (buf);
    RtcpHeader *chead = reinterpret_cast<RtcpHeader*> (buf);
    if (!chead->isRtcp()) {
      uint32_t recvSSRC = head->getSSRC();
      if (stream_ptr->isVideoSourceSSRC(recvSSRC)) {
        packet->type = VIDEO_PACKET;
      } else if (stream_ptr->isAudioSourceSSRC(recvSSRC)) {
        packet->type = AUDIO_PACKET;
      }
    }

    if (stream_ptr->pipeline_) {
      stream_ptr->pipeline_->read(std::move(packet));
    }
  });
}

// 读取packet,通过audio_sink_、video_sink_和fb_sink_的deliver函数发送出去
void MediaStream::read(std::shared_ptr<DataPacket> packet) {
  char* buf = packet->data;
  int len = packet->length;
  // PROCESS RTCP
  RtpHeader *head = reinterpret_cast<RtpHeader*> (buf);
  RtcpHeader *chead = reinterpret_cast<RtcpHeader*> (buf);
  uint32_t recvSSRC = 0;
  if (!chead->isRtcp()) {
    recvSSRC = head->getSSRC();
  } else if (chead->packettype == RTCP_Sender_PT || chead->packettype == RTCP_SDES_PT) {  // Sender Report
    recvSSRC = chead->getSSRC();
  }
  // DELIVER FEEDBACK (RR, FEEDBACK PACKETS)
  if (chead->isFeedback()) {
    if (fb_sink_ != nullptr && should_send_feedback_) {
      fb_sink_->deliverFeedback(std::move(packet));
    }
  } else {
    // RTP or RTCP Sender Report
    if (bundle_) {
      // Check incoming SSRC
      // Deliver data
      if (isVideoSourceSSRC(recvSSRC) && video_sink_) {
        parseIncomingPayloadType(buf, len, VIDEO_PACKET);
        parseIncomingExtensionId(buf, len, VIDEO_PACKET);
        video_sink_->deliverVideoData(std::move(packet));
      } else if (isAudioSourceSSRC(recvSSRC) && audio_sink_) {
        parseIncomingPayloadType(buf, len, AUDIO_PACKET);
        parseIncomingExtensionId(buf, len, AUDIO_PACKET);
        audio_sink_->deliverAudioData(std::move(packet));
      } else {
        ELOG_DEBUG("%s read video unknownSSRC: %u, localVideoSSRC: %u, localAudioSSRC: %u",
                    toLog(), recvSSRC, this->getVideoSourceSSRC(), this->getAudioSourceSSRC());
      }
    } else {
      if (packet->type == AUDIO_PACKET && audio_sink_) {
        parseIncomingPayloadType(buf, len, AUDIO_PACKET);
        parseIncomingExtensionId(buf, len, AUDIO_PACKET);
        // Firefox does not send SSRC in SDP
        if (getAudioSourceSSRC() == 0) {
          ELOG_DEBUG("%s discoveredAudioSourceSSRC:%u", toLog(), recvSSRC);
          this->setAudioSourceSSRC(recvSSRC);
        }
        audio_sink_->deliverAudioData(std::move(packet));
      } else if (packet->type == VIDEO_PACKET && video_sink_) {
        parseIncomingPayloadType(buf, len, VIDEO_PACKET);
        parseIncomingExtensionId(buf, len, VIDEO_PACKET);
        // Firefox does not send SSRC in SDP
        if (getVideoSourceSSRC() == 0) {
          ELOG_DEBUG("%s discoveredVideoSourceSSRC:%u", toLog(), recvSSRC);
          this->setVideoSourceSSRC(recvSSRC);
        }
        // change ssrc for RTP packets, don't touch here if RTCP
        video_sink_->deliverVideoData(std::move(packet));
      }
    }  // if not bundle
  }  // if not Feedback
}

// 设置media_stream_event_listener_ 
void MediaStream::setMediaStreamEventListener(MediaStreamEventListener* listener) {
  boost::mutex::scoped_lock lock(event_listener_mutex_);
  this->media_stream_event_listener_ = listener;
}

// media_stream_event_listener_回调通知事件
void MediaStream::notifyMediaStreamEvent(const std::string& type, const std::string& message) {
  boost::mutex::scoped_lock lock(event_listener_mutex_);
  if (this->media_stream_event_listener_ != nullptr) {
    media_stream_event_listener_->notifyMediaStreamEvent(type, message);
  }
}

// event_sink_通知事件
void MediaStream::notifyToEventSink(MediaEventPtr event) {
  event_sink_->deliverEvent(std::move(event));
}

// 发送PLI包,重新发送关键帧的请求
int MediaStream::sendPLI() {
  RtcpHeader thePLI;
  thePLI.setPacketType(RTCP_PS_Feedback_PT);
  thePLI.setBlockCount(1);
  thePLI.setSSRC(this->getVideoSinkSSRC());
  thePLI.setSourceSSRC(this->getVideoSourceSSRC());
  thePLI.setLength(2);
  char *buf = reinterpret_cast<char*>(&thePLI);
  int len = (thePLI.getLength() + 1) * 4;
  sendPacketAsync(std::make_shared<DataPacket>(0, buf, len, VIDEO_PACKET));
  return len;
}

// 通过fb_sink_将PLI请求发送出去
void MediaStream::sendPLIToFeedback() {
  if (fb_sink_) {
    fb_sink_->deliverFeedback(RtpUtils::createPLI(this->getVideoSinkSSRC(),
      this->getVideoSourceSSRC()));
  }
}

// 异步发送数据包,改变了每个输出包的载荷类型和扩展字
// changes the outgoing payload type for in the given data packet
void MediaStream::sendPacketAsync(std::shared_ptr<DataPacket> packet) {
  if (!sending_) {
    return;
  }
  auto stream_ptr = shared_from_this();
  if (packet->comp == -1) {
    sending_ = false;
    auto p = std::make_shared<DataPacket>();
    p->comp = -1;
    worker_->task([stream_ptr, p]{
      stream_ptr->sendPacket(p);
    });
    return;
  }

  changeDeliverPayloadType(packet.get(), packet->type);
  changeDeliverExtensionId(packet.get(), packet->type);
  worker_->task([stream_ptr, packet]{
    stream_ptr->sendPacket(packet);
  });
}

// 设置滑动条显示模式
void MediaStream::setSlideShowMode(bool state) {
  ELOG_DEBUG("%s slideShowMode: %u", toLog(), state);
  if (slide_show_mode_ == state) {
    return;
  }
  asyncTask([state] (std::shared_ptr<MediaStream> media_stream) {
    media_stream->stats_->getNode()[media_stream->getVideoSinkSSRC()].insertStat(
      "erizoSlideShow",
       CumulativeStat{state});
  });
  slide_show_mode_ = state;
  notifyUpdateToHandlers();
}

// 禁止音频或视频,并在stat中设置禁止状态
void MediaStream::muteStream(bool mute_video, bool mute_audio) {
  asyncTask([mute_audio, mute_video] (std::shared_ptr<MediaStream> media_stream) {
    ELOG_DEBUG("%s message: muteStream, mute_video: %u, mute_audio: %u", media_stream->toLog(), mute_video, mute_audio);
    media_stream->audio_muted_ = mute_audio;
    media_stream->video_muted_ = mute_video;
    media_stream->stats_->getNode()[media_stream->getAudioSinkSSRC()].insertStat("erizoAudioMute",
                                                                             CumulativeStat{mute_audio});
    media_stream->stats_->getNode()[media_stream->getAudioSinkSSRC()].insertStat("erizoVideoMute",
                                                                             CumulativeStat{mute_video});
    if (media_stream && media_stream->pipeline_) {
      media_stream->pipeline_->notifyUpdate();
    }
  });
}

// 设置视频的宽高和帧率
void MediaStream::setVideoConstraints(int max_video_width, int max_video_height, int max_video_frame_rate) {
  asyncTask([max_video_width, max_video_height, max_video_frame_rate] (std::shared_ptr<MediaStream> media_stream) {
    media_stream->quality_manager_->setVideoConstraints(max_video_width, max_video_height, max_video_frame_rate);
  });
}

// 设置audio和video的clientHostType统计信息
void MediaStream::setTransportInfo(std::string audio_info, std::string video_info) {
  if (video_enabled_) {
    uint32_t video_sink_ssrc = getVideoSinkSSRC();
    uint32_t video_source_ssrc = getVideoSourceSSRC();

    if (video_sink_ssrc != kDefaultVideoSinkSSRC) {
      stats_->getNode()[video_sink_ssrc].insertStat("clientHostType", StringStat{video_info});
    }
    if (video_source_ssrc != 0) {
      stats_->getNode()[video_source_ssrc].insertStat("clientHostType", StringStat{video_info});
    }
  }

  if (audio_enabled_) {
    uint32_t audio_sink_ssrc = getAudioSinkSSRC();
    uint32_t audio_source_ssrc = getAudioSourceSSRC();

    if (audio_sink_ssrc != kDefaultAudioSinkSSRC) {
      stats_->getNode()[audio_sink_ssrc].insertStat("clientHostType", StringStat{audio_info});
    }
    if (audio_source_ssrc != 0) {
      stats_->getNode()[audio_source_ssrc].insertStat("clientHostType", StringStat{audio_info});
    }
  }
}

// 设置将要发送反馈标记和目标码率
void MediaStream::setFeedbackReports(bool will_send_fb, uint32_t target_bitrate) {
  if (slide_show_mode_) {
    target_bitrate = 0;
  }

  this->should_send_feedback_ = will_send_fb;
  if (target_bitrate == 1) {
    this->video_enabled_ = false;
  }
  this->rate_control_ = target_bitrate;
}

// 设置元数据统计信息
void MediaStream::setMetadata(std::map<std::string, std::string> metadata) {
  for (const auto &item : metadata) {
    log_stats_->getNode().insertStat("metadata-" + item.first, StringStat{item.second});
  }
  setLogContext(metadata);
}

// 获取connection的当前状态
WebRTCEvent MediaStream::getCurrentState() {
  return connection_->getCurrentState();
}

// 获取json格式的stats统计信息
void MediaStream::getJSONStats(std::function<void(std::string)> callback) {
  asyncTask([callback] (std::shared_ptr<MediaStream> stream) {
    std::string requested_stats = stream->stats_->getStats();
    //  ELOG_DEBUG("%s message: Stats, stats: %s", stream->toLog(), requested_stats.c_str());
    callback(requested_stats);
  });
}

// 更换数据包的扩展字段
void MediaStream::changeDeliverExtensionId(DataPacket *dp, packetType type) {
  RtpHeader* h = reinterpret_cast<RtpHeader*>(dp->data);
  RtcpHeader *chead = reinterpret_cast<RtcpHeader*>(dp->data);
  if (!chead->isRtcp()) {
    // Extension Id to external
    if (h->getExtension()) {
      std::array<RTPExtensions, 15> extMap;
      RtpExtensionProcessor& ext_processor = getRtpExtensionProcessor();
      switch (type) {
        case VIDEO_PACKET:
          extMap = ext_processor.getVideoExtensionMap();
          break;
        case AUDIO_PACKET:
          extMap = ext_processor.getAudioExtensionMap();
          break;
        default:
          ELOG_WARN("%s Won't process RTP extensions for unknown type packets", toLog());
          return;
          break;
      }
      uint16_t totalExtLength = h->getExtLength();
      if (h->getExtId() == 0xBEDE) {  // One-Byte Header
        char* extBuffer = (char*)&h->extensions;  // NOLINT
        uint8_t extByte = 0;
        uint16_t currentPlace = 1;
        uint8_t extId = 0;
        uint8_t extLength = 0;
        while (currentPlace < (totalExtLength*4)) {
          extByte = (uint8_t)(*extBuffer);
          extId = extByte >> 4;
          extLength = extByte & 0x0F;
          // extId == 0 should never happen, see https://tools.ietf.org/html/rfc5285#section-4.2
          if (extId != 0) {
            for (int i = 1; i < 15; i++) {
              if (extMap.at(i) == extId) {
                extBuffer[0] = (extBuffer[0] | 0xF0) & (i << 4 | 0x0F);
              }
            }
          }
          extBuffer = extBuffer + extLength + 2;
          currentPlace = currentPlace + extLength + 2;
        }
      } else {
        ELOG_WARN("%s Two-Byte Header not handled!", toLog());
      }
    }
  }
}

// 通过remote_sdp_更换数据包的载荷类型
void MediaStream::changeDeliverPayloadType(DataPacket *dp, packetType type) {
  RtpHeader* h = reinterpret_cast<RtpHeader*>(dp->data);
  RtcpHeader *chead = reinterpret_cast<RtcpHeader*>(dp->data);
  if (!chead->isRtcp()) {
      int internalPT = h->getPayloadType();
      int externalPT = internalPT;
      if (type == AUDIO_PACKET) {
          externalPT = remote_sdp_->getAudioExternalPT(internalPT);
      } else if (type == VIDEO_PACKET) {
          externalPT = remote_sdp_->getVideoExternalPT(externalPT);
      }
      if (internalPT != externalPT) {
          h->setPayloadType(externalPT);
      }
  }
}

// 解析出数据包中的扩展字,并更换这个扩展字
void MediaStream::parseIncomingExtensionId(char *buf, int len, packetType type) {
  RtcpHeader* chead = reinterpret_cast<RtcpHeader*>(buf);
  RtpHeader* h = reinterpret_cast<RtpHeader*>(buf);
  if (!chead->isRtcp()) {
    // Extension Id to internal
    if (h->getExtension()) {
      std::array<RTPExtensions, 15> extMap;
      RtpExtensionProcessor& ext_processor = getRtpExtensionProcessor();
      switch (type) {
        case VIDEO_PACKET:
          extMap = ext_processor.getVideoExtensionMap();
          break;
        case AUDIO_PACKET:
          extMap = ext_processor.getAudioExtensionMap();
          break;
        default:
          ELOG_WARN("%s Won't process RTP extensions for unknown type packets", toLog());
          return;
          break;
      }
      uint16_t totalExtLength = h->getExtLength();
      if (h->getExtId() == 0xBEDE) {  // One-Byte Header
        char* extBuffer = (char*)&h->extensions;  // NOLINT
        uint8_t extByte = 0;
        uint16_t currentPlace = 1;
        uint8_t extId = 0;
        uint8_t extLength = 0;
        while (currentPlace < (totalExtLength*4)) {
          extByte = (uint8_t)(*extBuffer);
          extId = extByte >> 4;
          extLength = extByte & 0x0F;
          if (extId != 0 && extMap[extId] != 0) {
            extBuffer[0] = (extBuffer[0] | 0xF0) & (extMap[extId] << 4 | 0x0F);
          }
          extBuffer = extBuffer + extLength + 2;
          currentPlace = currentPlace + extLength + 2;
        }
      } else {
        ELOG_WARN("%s Two-Byte Header not handled!", toLog());
      }
    }
  }
}

// 解析出数据包中的载荷类型,并更换这个载荷类型
// parses incoming payload type, replaces occurence in buf
void MediaStream::parseIncomingPayloadType(char *buf, int len, packetType type) {
  RtcpHeader* chead = reinterpret_cast<RtcpHeader*>(buf);
  RtpHeader* h = reinterpret_cast<RtpHeader*>(buf);
  if (!chead->isRtcp()) {
    int externalPT = h->getPayloadType();
    int internalPT = externalPT;
    if (type == AUDIO_PACKET) {
      internalPT = remote_sdp_->getAudioInternalPT(externalPT);
    } else if (type == VIDEO_PACKET) {
      internalPT = remote_sdp_->getVideoInternalPT(externalPT);
    }
    if (externalPT != internalPT) {
      h->setPayloadType(internalPT);
    } else {
//        ELOG_WARN("onTransportData did not find mapping for %i", externalPT);
    }
  }
}

// 向connection_写入包
void MediaStream::write(std::shared_ptr<DataPacket> packet) {
  if (connection_) {
    connection_->write(packet);
  }
}

// 通过名字的找到pipeline下的这个子handle功能,并是它有效
void MediaStream::enableHandler(const std::string &name) {
  asyncTask([name] (std::shared_ptr<MediaStream> conn) {
      if (conn && conn->pipeline_) {
        conn->pipeline_->enable(name);
      }
  });
}

// 通过名字的找到pipeline下的这个子handle功能,并是它无效
void MediaStream::disableHandler(const std::string &name) {
  asyncTask([name] (std::shared_ptr<MediaStream> conn) {
    if (conn && conn->pipeline_) {
      conn->pipeline_->disable(name);
    }
  });
}

// pipeline下的通知更新
void MediaStream::notifyUpdateToHandlers() {
  asyncTask([] (std::shared_ptr<MediaStream> conn) {
    if (conn && conn->pipeline_) {
      conn->pipeline_->notifyUpdate();
    }
  });
}

// 异步任务函数
boost::future<void> MediaStream::asyncTask(std::function<void(std::shared_ptr<MediaStream>)> f) {
  auto task_promise = std::make_shared<boost::promise<void>>();
  std::weak_ptr<MediaStream> weak_this = shared_from_this();
  worker_->task([weak_this, f, task_promise] {
    if (auto this_ptr = weak_this.lock()) {
      f(this_ptr);
    }
    task_promise->set_value();
  });
  return task_promise->get_future();
}

// 发送数据包,向pipeline中写入数据包
void MediaStream::sendPacket(std::shared_ptr<DataPacket> p) {
  if (!sending_) {
    return;
  }
  uint32_t partial_bitrate = 0;
  uint64_t sentVideoBytes = 0;
  uint64_t lastSecondVideoBytes = 0;

  if (rate_control_ && !slide_show_mode_) {
    if (p->type == VIDEO_PACKET) {
      if (rate_control_ == 1) {
        return;
      }
      now_ = clock::now();
      if ((now_ - mark_) >= kBitrateControlPeriod) {
        mark_ = now_;
        lastSecondVideoBytes = sentVideoBytes;
      }
      partial_bitrate = ((sentVideoBytes - lastSecondVideoBytes) * 8) * 10;
      if (partial_bitrate > this->rate_control_) {
        return;
      }
      sentVideoBytes += p->length;
    }
  }
  if (!pipeline_initialized_) {
    ELOG_DEBUG("%s message: Pipeline not initialized yet.", toLog());
    return;
  }

  if (pipeline_) {
    pipeline_->write(std::move(p));
  }
}

// 设置质量层层级
void MediaStream::setQualityLayer(int spatial_layer, int temporal_layer) {
  asyncTask([spatial_layer, temporal_layer] (std::shared_ptr<MediaStream> media_stream) {
    media_stream->quality_manager_->forceLayers(spatial_layer, temporal_layer);
  });
}
//  设置空间层有效
void MediaStream::enableSlideShowBelowSpatialLayer(bool enabled, int spatial_layer) {
  asyncTask([enabled, spatial_layer] (std::shared_ptr<MediaStream> media_stream) {
    media_stream->quality_manager_->enableSlideShowBelowSpatialLayer(enabled, spatial_layer);
  });
}

 

Supongo que te gusta

Origin blog.csdn.net/tong5956/article/details/108263107
Recomendado
Clasificación