利用GStreamer 解析RTSP视频流,并保存cv::Mat

一,

硬解码rtsp视频流,并封装成opencv  Mat,供后续使用

二,

#pragma once
#include <opencv2/core/core.hpp>
#include <string>
#include <gst/gst.h>

#include "websocket_server.h"
namespace Transmit
{
    class RTSPDecode : public std::enable_shared_from_this<RTSPDecode>
    {
    public:
        using Ptr = std::shared_ptr<RTSPDecode>;
        ~RTSPDecode();

        /**
         * @brief 初始化编码器
         */
        int init(int width, int height,std::string url);

        int width()
        {
            return width_;
        }

        int height()
        {
            return height_;
        }

        GstElement *rtph264depay()
        {
            return rtph264depay_;
        }

    private:
        GstElement *pipeline_;
        GstElement *rtspsrc_;      // 读取rtsp流
        GstElement *rtph264depay_; // 从rtspsrc接收到rtp包
        GstElement *h264parse_;    // 分割输出H264帧数据
        GstElement *omxh264dec_;   // 硬解码H264帧数据
        GstElement *nvvidconv_;    // 硬解码drm_prime帧数据成BGRx
        GstElement *capsfilter_;   // 指定输出的数据类型
        GstElement *videoconvert_; // 转换video数据格式
        GstElement *appsink_;      // 获取BGRx数据

        int width_ = 0;
        int height_ = 0;

        std::string url_;
    };
}
#include <transmit/rtsp_decode.h>
#include <iostream>
#include <stdio.h>
#include <unistd.h>
#include <gst/gst.h>
#include <gst/gstelement.h>
#include <gst/app/gstappsink.h>
#include <opencv2/core/core.hpp>
#include <opencv2/opencv.hpp>

using namespace std;
namespace Transmit
{
    GstFlowReturn CaptureGstBGRBuffer(GstAppSink *sink, gpointer user_data)
    {
        RTSPDecode *data = (RTSPDecode *)user_data;
        GstSample *sample = gst_app_sink_pull_sample(sink);
        if (sample == NULL)
        {
            return GST_FLOW_ERROR;
        }

        GstBuffer *buffer = gst_sample_get_buffer(sample);
        GstMapInfo map_info;
        if (!gst_buffer_map((buffer), &map_info, GST_MAP_READ))
        {
            gst_buffer_unmap((buffer), &map_info);
            gst_sample_unref(sample);
            return GST_FLOW_ERROR;
        }

        cv::Mat bgra = cv::Mat(data->height(), data->width(), CV_8UC4, (char *)map_info.data, cv::Mat::AUTO_STEP);
        cv::Mat bgr;
        cv::cvtColor(bgra, bgr,cv::COLOR_BGRA2BGR);

        
        static int sampleno = 0;
        char szName[56] = {0};
        sprintf(szName, "%d.jpg", sampleno++); // 设置保存路径
        //memcpy(frame.data, map_info.data, map_info.size);
        cv::imwrite(szName, bgr);

        // fprintf(stderr, "Got sample no %d %d\n", sampleno++, (int)map_info.size);
        gst_buffer_unmap((buffer), &map_info);
        gst_sample_unref(sample);
        return GST_FLOW_OK;
    }

    void RtspSrcPadAdded(GstElement *src, GstPad *new_pad, gpointer user_data)
    {
        RTSPDecode *data = (RTSPDecode *)user_data;
        GstPad *sink_pad = gst_element_get_static_pad(data->rtph264depay(), "sink");
        GstCaps *p_caps;
        gchar *description;
        GstPadLinkReturn ret;
        GstCaps *new_pad_caps = NULL;
        GstStructure *new_pad_struct = NULL;
        const gchar *new_pad_type = NULL;
        g_print("Received new pad '%s' from '%s':\n", GST_PAD_NAME(new_pad), GST_ELEMENT_NAME(src));

        // 使用gst_pad_is_linked(sink_pad)来检查是否已经连接好了,若是则忽略该信号
        if (gst_pad_is_linked(sink_pad))
        {
            g_print("We are already linked. Ignoring.\n");
            goto exit;
        }

        p_caps = gst_pad_get_pad_template_caps(new_pad);
        description = gst_caps_to_string(p_caps);
        g_print("new pad caps: %s\n", description);
        g_free(description);
        if (NULL != p_caps)
            gst_caps_unref(p_caps);

        new_pad_caps = gst_pad_get_current_caps(new_pad);
        new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
        new_pad_type = gst_structure_get_name(new_pad_struct);

        if (!g_str_has_prefix(new_pad_type, "application/x-rtp"))
        {
            g_print("It has type '%s' which is not application/x-rtp. Ignoring.\n", new_pad_type);
            goto exit;
        }

        ret = gst_pad_link(new_pad, sink_pad); // link
        if (GST_PAD_LINK_FAILED(ret))
        {
            g_print("Type is '%s' but link failed.\n", new_pad_type);
        }
        else
        {
            g_print("Link succeeded (type '%s').\n", new_pad_type);
        }

        if (NULL != new_pad_caps)
            gst_caps_unref(p_caps);

    exit:
        if (sink_pad != NULL)
            gst_object_unref(sink_pad);
    }

    RTSPDecode::~RTSPDecode()
    {
        if (pipeline_)
        {
            gst_element_set_state(pipeline_, GST_STATE_NULL);
            gst_object_unref(pipeline_);
            pipeline_ = nullptr;
        }
    }

    int RTSPDecode::init(int width, int height, std::string url)
    {
        width_ = width;
        height_ = height;
        url_ = url;

        pipeline_ = gst_pipeline_new("pipeline");
        rtspsrc_ = gst_element_factory_make("rtspsrc", "Rtspsrc");
        rtph264depay_ = gst_element_factory_make("rtph264depay", "Rtph264depay");
        h264parse_ = gst_element_factory_make("h264parse", "H264parse");
        omxh264dec_ = gst_element_factory_make("omxh264dec", "Omxh264dec");
        nvvidconv_ = gst_element_factory_make("nvvidconv", "Nvvidconv");
        capsfilter_ = gst_element_factory_make("capsfilter", "Capsfilter");
        videoconvert_ = gst_element_factory_make("videoconvert", "Videoconvert");
        appsink_ = gst_element_factory_make("appsink", "Appsink");

        if (!pipeline_ || !rtspsrc_ || !rtph264depay_ || !h264parse_ || !omxh264dec_ || !nvvidconv_ || !capsfilter_ || !videoconvert_ || !appsink_)
        {
            std::cerr << "Not all elements could be created" << std::endl;
            return -1;
        }

        // 设置
        g_object_set(G_OBJECT(rtspsrc_), "location", url_.c_str(), "latency", 2000, NULL);
        g_object_set(G_OBJECT(capsfilter_), "caps", gst_caps_new_simple("video/x-raw", "format", G_TYPE_STRING, "BGRx", "width", G_TYPE_INT, width_, "height", G_TYPE_INT, height_, nullptr), NULL);

        // Set up appsink
        g_object_set(G_OBJECT(appsink_), "emit-signals", TRUE, NULL);
        g_object_set(G_OBJECT(appsink_), "sync", FALSE, NULL);
        g_object_set(G_OBJECT(appsink_), "drop", TRUE, NULL);
        g_signal_connect(appsink_, "new-sample", G_CALLBACK(CaptureGstBGRBuffer), reinterpret_cast<void *>(this));

        // Set up rtspsrc
        g_signal_connect(rtspsrc_, "pad-added", G_CALLBACK(RtspSrcPadAdded), reinterpret_cast<void *>(this));

        // BAdd elements to pipeline
        gst_bin_add_many(GST_BIN(pipeline_), rtspsrc_, rtph264depay_, h264parse_, omxh264dec_, nvvidconv_, capsfilter_, videoconvert_, appsink_, nullptr);

        // Link elements
        if (gst_element_link_many(rtph264depay_, h264parse_, omxh264dec_, nvvidconv_, capsfilter_, videoconvert_, appsink_, nullptr) != TRUE)
        {
            std::cerr << "rtspsrc_, rtph264depay_, h264parse_, omxh264dec_, nvvidconv_, capsfilter_,videoconvert_,appSink_ could not be linked" << std::endl;
            return -1;
        }

        // Start playing
        auto ret = gst_element_set_state(pipeline_, GST_STATE_PLAYING);
        if (ret == GST_STATE_CHANGE_FAILURE)
        {
            std::cerr << "Unable to set the pipeline to the playing state" << std::endl;
            return -1;
        }
        return 0;
    }
}

猜你喜欢

转载自blog.csdn.net/weixin_38416696/article/details/129435586