音视频传输中时间戳平滑处理

在音视频中一般时间戳从设备中系统时间得来,通常是以毫秒作为单位的linux时间。因为网络传输或者时间有时候突变的因为,造成了时间戳混乱。有必要对时间戳做一下处理。包括突变时候平滑处理,包括音视频不同步的时候的处理,下面算法解决了时间戳计算问题,在移动设备上很有好处:

static int RTMP_CanculateTimestamp(CarEye_RTMP_Handle handle, MediaFrameInfo& FrameInfo, unsigned int MediaType)
{
	int Diff;

	if(handle == NULL)
    {
        return -1;
    }
    CERTMPInfo* thiz = (CERTMPInfo*)handle;

	if(MediaType == CAREYE_VFRAME_FLAG)
	{
		if(thiz->fVideoIndex < 0)
		{
			thiz->fVideoIndex++;//skip first frame
		}
		else
		{
			if(FrameInfo.TimestampInternal == 0 && thiz->fVideoPTSIncrement > 0 && thiz->fVideoIndex >= TIMESTAMP_CANCULATE_VIDEO_THRESHOLD_SIZE)
			{
				unsigned int preIndex = (thiz->fVideoIndex + TIMESTAMP_CANCULATE_BUFFER_SIZE - 1)%TIMESTAMP_CANCULATE_BUFFER_SIZE;
				long pts = (long)(FrameInfo.Timestamp - thiz->fVideoTimestamp[preIndex]);
				if(pts > (thiz->fVideoPTSIncrement << 1) || pts < -(thiz->fVideoPTSIncrement << 1))
				{
					/*printf("Have an abnormal video frame! drop the Timestamp!!  fVideoIndex=%d, preIndex=%d, Timestamp=%ul, oldTimestamp=%ul, fVideoPTSIncrement=%d, pts=%ld\n", 
						thiz->fVideoIndex,
						preIndex,
						FrameInfo.Timestamp,
						thiz->fVideoTimestamp[preIndex],
						thiz->fVideoPTSIncrement,
						pts);*/

					if((++thiz->fVideoFrameAbnornalCounts) > 1)
					{
						thiz->fVideoIndex = 0;
						thiz->fVideoFrameAbnornalCounts = 0;
						thiz->fVideoPTSIncrement = 0;
						//重新赋值

					}
					else
					{
						return -2;
					}
				}
			}
			
			thiz->fVideoTimestamp[thiz->fVideoIndex++ % TIMESTAMP_CANCULATE_BUFFER_SIZE] = FrameInfo.Timestamp;
			if(thiz->fVideoIndex == (TIMESTAMP_CANCULATE_BUFFER_SIZE << 8))
			{
				thiz->fVideoIndex = TIMESTAMP_CANCULATE_BUFFER_SIZE;
			}

			if(TIMESTAMP_CANCULATE_VIDEO_THRESHOLD_SIZE <= thiz->fVideoIndex)
			{
				int frameCount = (thiz->fVideoIndex >= TIMESTAMP_CANCULATE_BUFFER_SIZE)?TIMESTAMP_CANCULATE_BUFFER_SIZE:thiz->fVideoIndex;
				if(frameCount >= TIMESTAMP_CANCULATE_BUFFER_SIZE)
				{
					thiz->fVideoPTSIncrement = (FrameInfo.Timestamp - thiz->fVideoTimestamp[thiz->fVideoIndex%TIMESTAMP_CANCULATE_BUFFER_SIZE] + (TIMESTAMP_CANCULATE_BUFFER_SIZE>>1))/(TIMESTAMP_CANCULATE_BUFFER_SIZE-1);
				}
				else
				{
					thiz->fVideoPTSIncrement = (FrameInfo.Timestamp - thiz->fVideoTimestamp[0] + (frameCount>>1)) / (frameCount-1);
				}
			}
		}
	}
	else if(MediaType == CAREYE_AFRAME_FLAG)
	{
		if(thiz->fAudioIndex < 0)
		{
			thiz->fAudioIndex++;//skip first frame
		}
		else
		{
			if(FrameInfo.TimestampInternal == 0 && thiz->fAudioPTSIncrement > 0 && thiz->fAudioIndex >= TIMESTAMP_CANCULATE_AUDIO_THRESHOLD_SIZE)
			{
				unsigned int preIndex = (thiz->fAudioIndex + TIMESTAMP_CANCULATE_BUFFER_SIZE - 1)%TIMESTAMP_CANCULATE_BUFFER_SIZE;
				long pts = (long)(FrameInfo.Timestamp - thiz->fAudioTimestamp[preIndex]);
				if(pts > (thiz->fAudioPTSIncrement << 1) || pts < -(thiz->fAudioPTSIncrement << 1))
				{
					/*printf("Have an abnormal audio frame! drop the Timestamp!!  fAudioIndex=%d, preIndex=%d, Timestamp=%ul, oldTimestamp=%ul, fAudioPTSIncrement=%d, pts=%ld\n", 
						thiz->fAudioIndex,
						preIndex,
						FrameInfo.Timestamp,
						thiz->fAudioTimestamp[preIndex],
						thiz->fAudioPTSIncrement,
						pts);*/

					if((++thiz->fAudioFrameAbnornalCounts) > 1)
					{
						thiz->fAudioIndex = 0;
						thiz->fAudioFrameAbnornalCounts = 0;
						thiz->fAudioPTSIncrement = 0;

					}
					else
					{
						return -2;
					}
				}
			}
			
			thiz->fAudioTimestamp[thiz->fAudioIndex++ % TIMESTAMP_CANCULATE_BUFFER_SIZE] = FrameInfo.Timestamp;
			if(thiz->fAudioIndex == (TIMESTAMP_CANCULATE_BUFFER_SIZE << 8))
			{
				thiz->fAudioIndex = TIMESTAMP_CANCULATE_BUFFER_SIZE;
			}

			if(TIMESTAMP_CANCULATE_AUDIO_THRESHOLD_SIZE <= thiz->fAudioIndex)
			{
				int frameCount = (thiz->fAudioIndex >= TIMESTAMP_CANCULATE_BUFFER_SIZE)?TIMESTAMP_CANCULATE_BUFFER_SIZE:thiz->fAudioIndex;
				if(frameCount >= TIMESTAMP_CANCULATE_BUFFER_SIZE)
				{
					thiz->fAudioPTSIncrement = (FrameInfo.Timestamp - thiz->fAudioTimestamp[thiz->fAudioIndex % TIMESTAMP_CANCULATE_BUFFER_SIZE] + (TIMESTAMP_CANCULATE_BUFFER_SIZE>>1))/(TIMESTAMP_CANCULATE_BUFFER_SIZE-1);
				}
				else
				{
					thiz->fAudioPTSIncrement = (FrameInfo.Timestamp - thiz->fAudioTimestamp[0] + (frameCount>>1)) / (frameCount-1);
				}
			}
		}
	}

	return 0;
}
#define MAX_AUDIO_VEDIO_DIFF	1000

int CRTMPPublisher::SendH264Packet(unsigned char *data, UINT32 size,bool bIsKeyFrame, UINT32 nTimeStamp, UINT32 ptzIncrement)
{
	UINT32 Diff;

	if(data == NULL && size<11)
	{
		return -1;
	}

	if (data[0]==0x00 && data[1]==0x00 && data[2]==0x00 && data[3]==0x01)
	{
		data +=4;
		size -= 4;
	}

	if (data[0]==0x00 && data[1]==0x00 && data[2]==0x01)
	{
		data +=3;
		size -= 3;
	}

	if (m_bSentMetadata == false)
	{
		if (!RTMP_IsConnected(m_pRtmp)/* && (Reconnect() == false)*/)
		{
			return -2;
		}

		if(SendMetadata()==true)
		{
			m_bSentMetadata = true;
		}
		else
		{
			m_bSentMetadata = false;
			return -3;
		}
	}

	int nalu_type = data[0] & 0x1F;
	int nalu_offset = 0;
	int parser_offset = 0;
	while (nalu_type == 0x06 || nalu_type == 0x07 || nalu_type == 0x08)
	{
		nalu_offset = FindNaluHead(data + parser_offset, size - parser_offset);
		if (nalu_offset == -1)
		{
			return -4;
		}
		parser_offset += nalu_offset;
		nalu_type = data[parser_offset] & 0x1F;
	}

	data += parser_offset;
	size -= parser_offset;

	if (size > m_nH264BuffSize - 10 - 30)
	{
		_TRACE_LOG("SendH264Packet error! packet too large! size =%ld.\n", size);
		return -5;
	}

	unsigned char *body = (unsigned char*)(m_h264Buff+30);

	int i = 0;
	if(bIsKeyFrame)
	{
		body[i++] = 0x17;// 1:Iframe  7:AVC

		if (m_bWaitingKeyFrame)
		{
			m_bWaitingKeyFrame = false;
		}
	}
	else
	{
		body[i++] = 0x27;// 2:Pframe  7:AVC
	}
	body[i++] = 0x01;// AVC NALU
	body[i++] = 0x00;
	body[i++] = 0x00;
	body[i++] = 0x00;

	// NALU size
	body[i++] = size>>24;
	body[i++] = size>>16;
	body[i++] = size>>8;
	body[i++] = size&0xff;

	// NALU data
	memcpy(&body[i],data,size);

	if (!RTMP_IsConnected(m_pRtmp)/* && (Reconnect() == false)*/)
	{
		return -6;
	}

	if (m_bWaitingKeyFrame)
	{
		return -7;
	}

	if(ptzIncrement <= 0)
	{
		//int nTick = 0;

		//if (m_uLastVideoPTS == 0)
		//{
		//	m_uLastVideoPTS = nTimeStamp;
		//	m_uVideoPTS = 0;
		//}
		//else
		//{
		//	nTick = nTimeStamp - m_uLastVideoPTS;

		//	if (nTick < 0 || nTick>500)
		//	{
		//		nTick = 40;
		//		//printf("H264 TimeStamp Exception : Tick = 0x%x\n", nTimeStamp-m_uLastVideoPTS);
		//	}
		//}

		//m_uVideoPTS += nTick;
	
		//m_uLastVideoPTS = nTimeStamp;
		//m_uVideoPTS += 100;
		m_uAudioPTS = m_uVideoPTS = nTimeStamp;
		//printf("%s m_uAudioPTS=%u, m_uVideoPTS=%u, ptzIncrement=%d\n", __FUNCTION__, m_uAudioPTS, m_uVideoPTS, ptzIncrement);

	}
	else
	{
		m_uVideoPTS += ptzIncrement;
	}	
	//增加横向音视频同步
	Diff = m_uAudioPTS > m_uVideoPTS ? m_uAudioPTS - m_uVideoPTS : m_uVideoPTS - m_uAudioPTS;
	if (Diff > MAX_AUDIO_VEDIO_DIFF)
	{
		m_uAudioPTS = m_uVideoPTS = nTimeStamp;
		
	}
	//printf("%s m_uAudioPTS=%u, m_uVideoPTS=%u, ptzIncrement=%d\n", __FUNCTION__, m_uAudioPTS, m_uVideoPTS, ptzIncrement);

	int iRet = SendPacket(RTMP_PACKET_TYPE_VIDEO, body, i+size, m_uVideoPTS);
	
	return iRet;
}

第一步时间时间戳的平滑处理和突变的判断,第二步对音视频时间戳不同步进行了处理。相关代码可以从开源网站下载:https://github.com/Car-eye-team

猜你喜欢

转载自blog.csdn.net/Car_eye/article/details/82937293