How does the Unity platform implement RTSP to RTMP push?

technical background

Under the Unity platform, RTSP, RTMP playback, and RTMP push, even including lightweight RTSP services, will not be described in detail. The question mentioned by a developer discussed today, if RTSP playback is implemented under Unity, at any time Transfer to RTMP to push out?

RTSP to RTMP has been available in the native environment for a long time. Here, in fact, just move the native one to Unity. The related process is as follows:

Technical realization

This article takes the Windows platform as an example. On the basis of the RTSP playback module, add an RTSP to RTMP push module. Without further ado, here is the code:

Play in real time, stop playing

/*
 * SmartPlayerWinMono.cs.cs
 * 
 * Author: daniusdk.com
 * Created on 2017/04/19.
 */
public void StartPlayer(int sel)
{
  Debug.Log("StartPlayer++, sel: " + sel);

  if (videoctrl[sel].is_playing_)
  {
    Debug.Log("StartPlayer, already started.. sel: " + sel);
    return;
  }

  lock (videoctrl[sel].frame_lock_)
  {
    videoctrl[sel].cur_video_frame_ = null;
  }

  if (!videoctrl[sel].is_recording_ && !videoctrl[sel].is_pulling_)
  {
    if (!OpenPlayerHandle(sel))
    {
      Debug.LogError("call OpenPlayerHandle failed, sel:" + sel);
      return;
    }
  }

  if (is_enable_hardware_decoder_)
  {
    NTSmartPlayerSDK.NT_SP_SetH264HardwareDecoder(videoctrl[sel].player_handle_, is_support_h264_hardware_decoder_ ? 1 : 0, 0);
    NTSmartPlayerSDK.NT_SP_SetH265HardwareDecoder(videoctrl[sel].player_handle_, is_support_h265_hardware_decoder_ ? 1 : 0, 0);
  }
  else
  {
    NTSmartPlayerSDK.NT_SP_SetH264HardwareDecoder(videoctrl[sel].player_handle_, 0, 0);
    NTSmartPlayerSDK.NT_SP_SetH265HardwareDecoder(videoctrl[sel].player_handle_, 0, 0);
  }

  //video frame callback (YUV/RGB)
  videoctrl[sel].sdk_video_frame_call_back_ = new VideoControl.SetVideoFrameCallBack(SDKVideoFrameCallBack);
  videoctrl[sel].video_frame_call_back_ = new SP_SDKVideoFrameCallBack(NT_SP_SetVideoFrameCallBack);
  NTSmartPlayerSDK.NT_SP_SetVideoFrameCallBack(videoctrl[sel].player_handle_, (Int32)NT.NTSmartPlayerDefine.NT_SP_E_VIDEO_FRAME_FORMAT.NT_SP_E_VIDEO_FRAME_FROMAT_I420, window_handle_, videoctrl[sel].video_frame_call_back_);

  UInt32 flag = NTSmartPlayerSDK.NT_SP_StartPlay(videoctrl[sel].player_handle_);

  if (flag == DANIULIVE_RETURN_OK)
  {
    videoctrl[sel].is_need_get_frame_ = true;
    Debug.Log("NT_SP_StartPlay succeed, sel:" + sel);
  }
  else
  {
    videoctrl[sel].is_need_get_frame_ = false;
    Debug.LogError("NT_SP_StartPlay failed, sel:" + sel);
  }

  videoctrl[sel].is_playing_ = true;
}

private void StopPlayer(int sel)
{
  Debug.Log("StopPlayer++, sel: " + sel);

  videoctrl[sel].is_need_get_frame_ = false;
  videoctrl[sel].is_need_init_texture_ = false;

  if (videoctrl[sel].player_handle_ == IntPtr.Zero)
  {
    return;
  }

  UInt32 flag = NTSmartPlayerSDK.NT_SP_StopPlay(videoctrl[sel].player_handle_);
  if (flag == DANIULIVE_RETURN_OK)
  {
    Debug.Log("call NT_SP_StopPlay succeed, sel: " + sel);
  }
  else
  {
    Debug.LogError("call NT_SP_StopPlay failed, sel: " + sel);
  }

  if (!videoctrl[sel].is_recording_ && !videoctrl[sel].is_pulling_)
  {
    NTSmartPlayerSDK.NT_SP_Close(videoctrl[sel].player_handle_);
    videoctrl[sel].player_handle_ = IntPtr.Zero;
  }

  videoctrl[sel].is_playing_ = false;
}

If you need to transfer RTMP out, first pull the stream end, you need to call the pull stream interface:

Start streaming, stop streaming

public void StartPull(int sel)
{
  if (videoctrl[sel].is_pulling_)
  {
    Debug.Log("StartPull, already started.. sel: " + sel);
    return;
  }

  if (!videoctrl[sel].is_playing_ && 
      !videoctrl[sel].is_recording_ )
  {
    if (!OpenPlayerHandle(sel))
    {
      Debug.LogError("call OpenPlayerHandle failed, sel:" + sel);
      return;
    }
  }

  videoctrl[sel].pull_stream_video_data_call_back_ = new SP_SDKPullStreamVideoDataCallBack(OnVideoDataHandle);
  videoctrl[sel].pull_stream_audio_data_call_back_ = new SP_SDKPullStreamAudioDataCallBack(OnAudioDataHandle);

  NTSmartPlayerSDK.NT_SP_SetPullStreamVideoDataCallBack(videoctrl[sel].player_handle_, IntPtr.Zero, videoctrl[sel].pull_stream_video_data_call_back_);
  NTSmartPlayerSDK.NT_SP_SetPullStreamAudioDataCallBack(videoctrl[sel].player_handle_, IntPtr.Zero, videoctrl[sel].pull_stream_audio_data_call_back_);

  int is_transcode_aac = 1;   //PCMA/PCMU/Speex格式转AAC后 再转发
  NTSmartPlayerSDK.NT_SP_SetPullStreamAudioTranscodeAAC(videoctrl[sel].player_handle_, is_transcode_aac);

  UInt32 ret = NTSmartPlayerSDK.NT_SP_StartPullStream(videoctrl[sel].player_handle_);

  if (NTBaseCodeDefine.NT_ERC_OK != ret)
  {
    if (!videoctrl[sel].is_playing_ && !videoctrl[sel].is_recording_)
    {
      NTSmartPlayerSDK.NT_SP_Close(videoctrl[sel].player_handle_);
      videoctrl[sel].player_handle_ = IntPtr.Zero;
    }

    return;
  }

  videoctrl[sel].is_pulling_ = true;
}

public void StopPull(int sel)
{
  if (!videoctrl[sel].is_pulling_)
    return;

  NTSmartPlayerSDK.NT_SP_StopPullStream(videoctrl[sel].player_handle_);

  if (!videoctrl[sel].is_playing_ && !videoctrl[sel].is_recording_)
  {
    NTSmartPlayerSDK.NT_SP_Close(videoctrl[sel].player_handle_);
    videoctrl[sel].player_handle_ = IntPtr.Zero;
  }

  videoctrl[sel].is_pulling_ = false;
}

When setting the streaming, it should be noted that if it is other such as PCMA and PCMU, considering the versatility, you can switch to AAC and then call back the data. In addition, when pulling or playing, judge whether RTSP has been turned on URL, ensure that the same stream is in one instance, do not open two instances, occupying additional resources.

In addition, when closing the playback or streaming, it is necessary to judge whether it is in the streaming or playback state. As long as one of the two is not closed, the instance cannot be closed.

Start retweeting RTMP, stop retweeting:

public bool StartPush(int sel, String url)
{
  if (videoctrl[sel].is_pushing_)
    return false;

  if (String.IsNullOrEmpty(url))
    return false;

  if (!OpenPushHandle(sel))
    return false;

  if (GetPushHandle(sel) == IntPtr.Zero)
    return false;

  IntPtr push_handle = GetPushHandle(sel);

  if (NTBaseCodeDefine.NT_ERC_OK != NTSmartPublisherSDK.NT_PB_SetURL(push_handle, url, IntPtr.Zero))
  {
    NTSmartPublisherSDK.NT_PB_Close(push_handle);
    SetPushHandle(sel, IntPtr.Zero);

    return false;
  }

  if (NTBaseCodeDefine.NT_ERC_OK != NTSmartPublisherSDK.NT_PB_StartPublisher(push_handle, IntPtr.Zero))
  {
    NTSmartPublisherSDK.NT_PB_Close(push_handle);
    SetPushHandle(sel, IntPtr.Zero);

    return false;
  }

  videoctrl[sel].is_pushing_ = true;

  return true;
}

public void StopPush(int sel)
{
  if (!videoctrl[sel].is_pushing_)
    return;

  videoctrl[sel].is_pushing_ = false;

  lock (videoctrl[sel].push_handle_mutex_)
  {
    if (videoctrl[sel].push_handle_ == IntPtr.Zero)
      return;

    NTSmartPublisherSDK.NT_PB_StopPublisher(videoctrl[sel].push_handle_);

    NTSmartPublisherSDK.NT_PB_Close(videoctrl[sel].push_handle_);
    videoctrl[sel].push_handle_ = IntPtr.Zero;
  }
}

Audio and video data callback

private void OnVideoDataHandle(IntPtr handle, IntPtr user_data,
                               UInt32 video_codec_id, IntPtr data, UInt32 size,
                               IntPtr info, IntPtr reserve)
{
  int cur_sel = -1;

  for ( int i = 0; i < videoctrl.Length; i++)
  {
    if(handle == videoctrl[i].player_handle_)
    {
      cur_sel = i;
      break;
    }
  }

  if (cur_sel < 0)
    return;

  if (!videoctrl[cur_sel].is_pushing_)
    return;

  if (data == IntPtr.Zero)
    return;

  if (size < 1)
    return;

  if (info == IntPtr.Zero)
    return;

  NT_SP_PullStreamVideoDataInfo video_info = (NT_SP_PullStreamVideoDataInfo)Marshal.PtrToStructure(info, typeof(NT_SP_PullStreamVideoDataInfo));

  lock (videoctrl[cur_sel].push_handle_mutex_)
  {
    if (!videoctrl[cur_sel].is_pushing_)
      return;

    if (videoctrl[cur_sel].push_handle_ == IntPtr.Zero)
      return;

    //新接口
    NTSmartPublisherSDK.NT_PB_PostVideoEncodedDataV2(videoctrl[cur_sel].push_handle_, video_codec_id,
                                                     data, size, video_info.is_key_frame_, video_info.timestamp_, video_info.presentation_timestamp_);
  }
}

private void OnAudioDataHandle(IntPtr handle, IntPtr user_data,
                               UInt32 audio_codec_id, IntPtr data, UInt32 size,
                               IntPtr info, IntPtr reserve)
{
  int cur_sel = -1;

  for (int i = 0; i < videoctrl.Length; i++)
  {
    if (handle == videoctrl[i].player_handle_)
    {
      cur_sel = i;
      break;
    }
  }

  if (cur_sel < 0)
    return;

  if (!videoctrl[cur_sel].is_pushing_)
    return;

  if (data == IntPtr.Zero)
    return;

  if (size < 1)
    return;

  if (info == IntPtr.Zero)
    return;

  NT_SP_PullStreamAuidoDataInfo audio_info = (NT_SP_PullStreamAuidoDataInfo)Marshal.PtrToStructure(info, typeof(NT_SP_PullStreamAuidoDataInfo));

  lock (videoctrl[cur_sel].push_handle_mutex_)
  {
    if (!videoctrl[cur_sel].is_pushing_)
      return;

    if (videoctrl[cur_sel].push_handle_ == IntPtr.Zero)
      return;

    NTSmartPublisherSDK.NT_PB_PostAudioEncodedData(videoctrl[cur_sel].push_handle_, audio_codec_id, data, size,
                                                   audio_info.is_key_frame_, audio_info.timestamp_,
                                                   audio_info.parameter_info_, audio_info.parameter_info_size_);
  }
}

Summarize

In fact, the RTSP to RTMP push in the Unity environment is easier to connect than RTMP, RTSP playback or streaming, because it basically does not involve page interaction, and interested developers can try it. 

Guess you like

Origin blog.csdn.net/renhui1112/article/details/131733110