How to decode an rtp stream and show the video using SDL?

Miguel Silvestre msilvestre at gmail.com
Mon May 15 16:20:43 UTC 2023


Hello,

I have this functional Gstreamer + SDL program:
https://gist.github.com/msilvestre/a0232d2552d0173b3495626163d8444f

But now I want to use rtp and h264 to receive the video.
Here are the working pipelines:
`gst-launch-1.0 -v videotestsrc ! openh264enc !  rtph264pay ! udpsink
host=$(hostname) port=5000`

Then I changed the pipeline on C++ to accommodate the following receiver:

`gst-launch-1.0 udpsrc port=5000 ! application/x-rtp ! rtph264depay !
h264parse ! openh264dec ! videoconvert ! autovideosink`

But instead of autovideosink I'm using appsink also.
With this approach app sink is not getting any buffer from the pull buffer.
But if I remove the appsink capabilities my SDL application shows black and
white weird things. Maybe because of the video format. So I try to play
around with videoconvert src pad capabilities but with no success.

I'm missing something for sure. What can it be?

Here is the c++ code:

```cplusplus
#include <gst/gst.h>
#include <thread>
#include <iostream>
#include <mutex>
#include <gst/app/gstappsink.h>
#include <SDL.h>


// SDL window dimensions
#define SCREEN_WIDTH 640
//#define SCREEN_HEIGHT 480
#define SCREEN_HEIGHT 1138

bool quit = false;

#define VERIFY_GST_NULL(ELEMENT) if (!ELEMENT) {std::cout << ">>>>>>>" <<
#ELEMENT " is null!" << std::endl; return 0;}

int main(int argc, char *argv[]) {
    // Initialize GStreamer
    gst_init(0, NULL);
/*
    GstElement *pipeline=gst_parse_launch("udpsrc port=5000 !
application/x-rtp ! rtph264depay ! h264parse ! openh264dec ! videoconvert !
appsink name=sink",NULL);
    GstElement *sinkElement = gst_bin_get_by_name(GST_BIN(pipeline),
"sink");
    sink = GST_APP_SINK(sinkElement);
    */

    // Create the pipeline
    GstElement *pipeline = gst_pipeline_new("video-player");
    GstElement *udpSrc = gst_element_factory_make("udpsrc", "udp_src");
    GstElement *rtpDepay = gst_element_factory_make("rtph264depay",
"rtp_h264_depay");
    GstElement *h264Parser = gst_element_factory_make("h264parse",
"h264_parse");
    GstElement *h264Decoder = gst_element_factory_make("openh264dec",
"h264_decoder");
    GstElement *videoConvert = gst_element_factory_make("videoconvert",
"video_decoder");
    GstAppSink *sink = GST_APP_SINK(gst_element_factory_make("appsink",
"sink"));

    VERIFY_GST_NULL(pipeline)
    VERIFY_GST_NULL(udpSrc)
    VERIFY_GST_NULL(rtpDepay)
    VERIFY_GST_NULL(h264Parser)
    VERIFY_GST_NULL(h264Decoder)
    VERIFY_GST_NULL(videoConvert)
    VERIFY_GST_NULL(sink)

    g_object_set(udpSrc, "port", 5000, NULL);

    auto caps = gst_caps_new_simple("application/x-rtp",
                                    "media", G_TYPE_STRING, "video",
                                    "clock-rate", G_TYPE_INT, 90000,
                                    "encoding-name", G_TYPE_STRING, "H264",
                                    NULL);
    g_object_set(G_OBJECT(udpSrc), "caps", caps, NULL);



    auto videoConvertPad = gst_element_get_static_pad(videoConvert, "src");
    auto videoConvertSrcCaps = gst_caps_new_simple("video/x-raw",
                                                   "format", G_TYPE_STRING,
"BGRA",
                                                   "width", G_TYPE_INT,
SCREEN_WIDTH,
                                                   "height", G_TYPE_INT,
SCREEN_HEIGHT,

 "framerate",GST_TYPE_FRACTION,30,1,
                                                   NULL);
    gst_pad_set_caps(videoConvertPad, videoConvertSrcCaps);

    // Set the sink element to push data to the app

    gst_app_sink_set_emit_signals(sink, true);
    gst_app_sink_set_drop(sink, true);
    gst_app_sink_set_max_buffers(sink, 1);
    gst_app_sink_set_caps(sink, gst_caps_new_simple("video/x-raw",
                                                    "format",
G_TYPE_STRING, "BGRA",
                                                    "width", G_TYPE_INT,
SCREEN_WIDTH,
                                                    "height", G_TYPE_INT,
SCREEN_HEIGHT,
                                                    NULL));


    gst_bin_add_many(GST_BIN (pipeline),
                     udpSrc,
                     rtpDepay,
                     h264Parser,
                     h264Decoder,
                     videoConvert,
                     sink,
                     NULL);
    gst_element_link_many(udpSrc, rtpDepay, h264Parser, h264Decoder,
videoConvert, sink, NULL);


    // Start the pipeline
    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    // Create the SDL window
    SDL_Init(SDL_INIT_VIDEO);
    SDL_Window *window = SDL_CreateWindow("GStreamer + SDL",
 SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, SCREEN_WIDTH,
SCREEN_HEIGHT, SDL_WINDOW_SHOWN);

    // Create the SDL renderer
    auto renderer = SDL_CreateRenderer(window, -1,
SDL_RENDERER_ACCELERATED);

    // Create the SDL surface
    auto surface = SDL_CreateRGBSurface(0, SCREEN_WIDTH, SCREEN_HEIGHT, 32,
0, 0, 0, 0);

    SDL_Event e;
    std::cout << ">>> beggining of loop \n";
    while (!quit) {
        // Get the buffer from the appsink
        GstSample *sample = gst_app_sink_pull_sample(sink);
        if (!sample)
        {
            std::cout << ">>>>> No Sample from appsink!\n";
            std::this_thread::sleep_for(std::chrono::milliseconds(100));
            continue;
        }
        std::cout << ">>>>> I Have THA Sample from appsink!\n";

        GstBuffer *buffer = gst_sample_get_buffer(sample);

        // Get the buffer data
        GstMapInfo info;
        gst_buffer_map(buffer, &info, GST_MAP_READ);

        SDL_LockSurface(surface);
        memcpy(surface->pixels, info.data, surface->pitch * surface->h);
        SDL_UnlockSurface(surface);

        // Unmap the buffer
        gst_buffer_unmap(buffer, &info);
        // Release the sample
        gst_sample_unref(sample);

        auto texture = SDL_CreateTextureFromSurface(renderer, surface);
        SDL_RenderCopy(renderer, texture, NULL, NULL);
        SDL_DestroyTexture(texture);
        SDL_RenderPresent(renderer);


        while (SDL_PollEvent(&e)) {
            if (e.type == SDL_QUIT) {
                quit = 1;
                break;
            }
        }

        std::this_thread::sleep_for(std::chrono::milliseconds(100));
    }


    // Stop the pipeline
    gst_element_set_state(pipeline, GST_STATE_NULL);

    // Release the GStreamer elements
    //gst_object_unref(sink);
    //gst_object_unref(videoConvert);
    //gst_object_unref(videoSrc);
    //gst_object_unref(pipeline);


    return 0;
}
```

Thank you.
--
Miguel Silvestre
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.freedesktop.org/archives/gstreamer-devel/attachments/20230515/591c89c4/attachment.htm>


More information about the gstreamer-devel mailing list