接口调用简单、多平台支持的RTSP-Server组件EasyRTSPServer修改SDP中的vps(H265)&sps&pps为可选项方法

背景分析

RTSP(Real Time Streaming Protocol)是由Real Network和Netscape共同提出的如何有效地在IP网络上传输流媒体数据的应用层协议。RTSP对流媒体提供了诸如暂停,快进等控制,而它本身并不传输数据,RTSP的作用相当于流媒体服务器的远程控制。服务器端可以自行选择使用TCP或UDP来传送串流内容,它的语法和运作跟HTTP 1.1类似,但并不特别强调时间同步,所以比较能容忍网络延迟。而且允许同时多个串流需求控制(Multicast),除了可以降低服务器端的网络用量,还可以支持多方视频会议(Video onference)。

关于EasyRTSPServer

EasyRTSPServer是一套稳定、高效、可靠、多平台支持的RTSP-Server组件, 接口调用非常简单成熟,无需关注RTSPServer中关于客户端监听接入、音视频多路复用、RTSP具体流程、RTP打包与发送等相关问题,支持多种音视频格式,再也不用去处理整个RTSP OPTIONS/DESCRIBE/SETUP/PLAY/RTP/RTCP的复杂流程和担心内存释放的问题了,非常适合于安防领域、教育领域、互联网直播领域等。

EasyRTSPServer架构.png

EasyRTSPServer修改SDP中的vps(H265)&sps&pps为可选项

提出问题:

EasyRTSPServer做为转发服务器, 在处理客户端请求时, 有时前端的流还没过来,而又需要及时的回复客户端,在live555官方代码中,如果不填写vps(H265)&sps&pps的话,是直接返回失败的, 所以需要对该流程做修改, 以保证和客户端的正常交互。

分析问题:

找到生成sdp信息的位置,修改其中的逻辑。

解决问题:

具体处理方法见以下代码:

char const* H264VideoRTPSink::auxSDPLine() {
  // Generate a new "a=fmtp:" line each time, using our SPS and PPS (if we have them),
  // otherwise parameters from our framer source (in case they've changed since the last time that
  // we were called):
  H264or5VideoStreamFramer* framerSource = NULL;
  u_int8_t* vpsDummy = NULL; unsigned vpsDummySize = 0;
  u_int8_t* sps = fSPS; unsigned spsSize = fSPSSize;
  u_int8_t* pps = fPPS; unsigned ppsSize = fPPSSize;
  if (sps == NULL || pps == NULL) {
    // We need to get SPS and PPS from our framer source:
    if (fOurFragmenter == NULL) return NULL; // we don't yet have a fragmenter (and therefore not a source)
    framerSource = (H264or5VideoStreamFramer*)(fOurFragmenter->inputSource());
    if (framerSource == NULL) return NULL; // we don't yet have a source

    framerSource->getVPSandSPSandPPS(vpsDummy, vpsDummySize, sps, spsSize, pps, ppsSize);


    if (sps == NULL || pps == NULL)
	{
#if 1
		//add by gavin
	  char const* fmtpFmt =
		"a=fmtp:%d packetization-mode=1"
		//";profile-level-id=%06X"
		";sprop-parameter-sets=\r\n";
	  unsigned fmtpFmtSize = (unsigned)strlen(fmtpFmt)
		+ 3 /* max char len */
		+ 6 /* 3 bytes in hex */
		+ 0;
	  char* fmtp = new char[fmtpFmtSize];
	  memset(fmtp, 0x00, fmtpFmtSize);
	  sprintf(fmtp, fmtpFmt,
			  rtpPayloadType());

	  delete[] fFmtpSDPLine; fFmtpSDPLine = fmtp;
	  return fFmtpSDPLine;
#endif

		return NULL; // our source isn't ready
	}

  }

  // Set up the "a=fmtp:" SDP line for this stream:
  u_int8_t* spsWEB = new u_int8_t[spsSize]; // "WEB" means "Without Emulation Bytes"
  unsigned spsWEBSize = removeH264or5EmulationBytes(spsWEB, spsSize, sps, spsSize);
  if (spsWEBSize < 4) { // Bad SPS size => assume our source isn't ready
    delete[] spsWEB;
    return NULL;
  }
  u_int32_t profileLevelId = (spsWEB[1]<<16) | (spsWEB[2]<<8) | spsWEB[3];
  delete[] spsWEB;

  char* sps_base64 = base64Encode((char*)sps, spsSize);
  char* pps_base64 = base64Encode((char*)pps, ppsSize);

  char const* fmtpFmt =
    "a=fmtp:%d packetization-mode=1"
    ";profile-level-id=%06X"
    ";sprop-parameter-sets=%s,%s\r\n";
  unsigned fmtpFmtSize = (unsigned)strlen(fmtpFmt)
    + 3 /* max char len */
    + 6 /* 3 bytes in hex */
    + (unsigned)strlen(sps_base64) + (unsigned)strlen(pps_base64);
  char* fmtp = new char[fmtpFmtSize];
  sprintf(fmtp, fmtpFmt,
          rtpPayloadType(),
	  profileLevelId,
          sps_base64, pps_base64);

  delete[] sps_base64;
  delete[] pps_base64;

  delete[] fFmtpSDPLine; fFmtpSDPLine = fmtp;
  return fFmtpSDPLine;
}


char const* H265VideoRTPSink::auxSDPLine() {
  // Generate a new "a=fmtp:" line each time, using our VPS, SPS and PPS (if we have them),
  // otherwise parameters from our framer source (in case they've changed since the last time that
  // we were called):
  H264or5VideoStreamFramer* framerSource = NULL;
  u_int8_t* vps = fVPS; unsigned vpsSize = fVPSSize;
  u_int8_t* sps = fSPS; unsigned spsSize = fSPSSize;
  u_int8_t* pps = fPPS; unsigned ppsSize = fPPSSize;
  if (vps == NULL || sps == NULL || pps == NULL) {
    // We need to get VPS, SPS and PPS from our framer source:
    if (fOurFragmenter == NULL) return NULL; // we don't yet have a fragmenter (and therefore not a source)
    framerSource = (H264or5VideoStreamFramer*)(fOurFragmenter->inputSource());
    if (framerSource == NULL) return NULL; // we don't yet have a source

    framerSource->getVPSandSPSandPPS(vps, vpsSize, sps, spsSize, pps, ppsSize);
    if (vps == NULL || sps == NULL || pps == NULL) {

		//add by gavin
	  char const* fmtpFmt =
		"a=fmtp:%d packetization-mode=1"
		//";profile-level-id=%06X"
		";sprop-vps=; sprop-sps=; sprop-pps=\r\n";
	  unsigned fmtpFmtSize = (unsigned)strlen(fmtpFmt)
		+ 3 /* max char len */
		+ 6 /* 3 bytes in hex */
		+ 0;
	  char* fmtp = new char[fmtpFmtSize];
	  memset(fmtp, 0x00, fmtpFmtSize);
	  sprintf(fmtp, fmtpFmt,
			  rtpPayloadType());

	  delete[] fFmtpSDPLine; fFmtpSDPLine = fmtp;
	  return fFmtpSDPLine;


      return NULL; // our source isn't ready
    }
  }

  // Set up the "a=fmtp:" SDP line for this stream.
  u_int8_t* vpsWEB = new u_int8_t[vpsSize]; // "WEB" means "Without Emulation Bytes"
  unsigned vpsWEBSize = removeH264or5EmulationBytes(vpsWEB, vpsSize, vps, vpsSize);
  if (vpsWEBSize < 6/*'profile_tier_level' offset*/ + 12/*num 'profile_tier_level' bytes*/) {
    // Bad VPS size => assume our source isn't ready
    delete[] vpsWEB;
    return NULL;
  }
  u_int8_t const* profileTierLevelHeaderBytes = &vpsWEB[6];
  unsigned profileSpace  = profileTierLevelHeaderBytes[0]>>6; // general_profile_space
  unsigned profileId = profileTierLevelHeaderBytes[0]&0x1F; // general_profile_idc
  unsigned tierFlag = (profileTierLevelHeaderBytes[0]>>5)&0x1; // general_tier_flag
  unsigned levelId = profileTierLevelHeaderBytes[11]; // general_level_idc
  u_int8_t const* interop_constraints = &profileTierLevelHeaderBytes[5];
  char interopConstraintsStr[100];
  sprintf(interopConstraintsStr, "%02X%02X%02X%02X%02X%02X", 
	  interop_constraints[0], interop_constraints[1], interop_constraints[2],
	  interop_constraints[3], interop_constraints[4], interop_constraints[5]);
  delete[] vpsWEB;

  char* sprop_vps = base64Encode((char*)vps, vpsSize);
  char* sprop_sps = base64Encode((char*)sps, spsSize);
  char* sprop_pps = base64Encode((char*)pps, ppsSize);

  char const* fmtpFmt =
    "a=fmtp:%d profile-space=%u"
    ";profile-id=%u"
    ";tier-flag=%u"
    ";level-id=%u"
    ";interop-constraints=%s"
    ";sprop-vps=%s"
    ";sprop-sps=%s"
    ";sprop-pps=%s\r\n";
  unsigned fmtpFmtSize = (unsigned)strlen(fmtpFmt)
    + 3 /* max num chars: rtpPayloadType */ + 20 /* max num chars: profile_space */
    + 20 /* max num chars: profile_id */
    + 20 /* max num chars: tier_flag */
    + 20 /* max num chars: level_id */
    + (unsigned)strlen(interopConstraintsStr)
    + (unsigned)strlen(sprop_vps)
    + (unsigned)strlen(sprop_sps)
    + (unsigned)strlen(sprop_pps);
  char* fmtp = new char[fmtpFmtSize];
  sprintf(fmtp, fmtpFmt,
          rtpPayloadType(), profileSpace,
	  profileId,
	  tierFlag,
	  levelId,
	  interopConstraintsStr,
	  sprop_vps,
	  sprop_sps,
	  sprop_pps);

  delete[] sprop_vps;
  delete[] sprop_sps;
  delete[] sprop_pps;

  delete[] fFmtpSDPLine; fFmtpSDPLine = fmtp;
  return fFmtpSDPLine;
}

猜你喜欢

转载自www.cnblogs.com/TSINGSEE/p/11685687.html