<div dir="ltr">Hello,<br><br>I have this functional Gstreamer + SDL program: <a href="https://gist.github.com/msilvestre/a0232d2552d0173b3495626163d8444f">https://gist.github.com/msilvestre/a0232d2552d0173b3495626163d8444f</a><br><br>But now I want to use rtp and h264 to receive the video.<br>Here are the working pipelines:<br>`gst-launch-1.0 -v videotestsrc ! openh264enc !  rtph264pay ! udpsink host=$(hostname) port=5000`<div><br>Then I changed the pipeline on C++ to accommodate the following receiver:<br><br></div><div>`gst-launch-1.0 udpsrc port=5000 ! application/x-rtp ! rtph264depay ! h264parse ! openh264dec ! videoconvert ! autovideosink`<br><br>But instead of autovideosink I'm using appsink also. <br>With this approach app sink is not getting any buffer from the pull buffer. But if I remove the appsink capabilities my SDL application shows black and white weird things. Maybe because of the video format. So I try to play around with videoconvert src pad capabilities but with no success.<br><br>I'm missing something for sure. What can it be? <br><br>Here is the c++ code:</div><div><br></div><div>```cplusplus<br>#include <gst/gst.h><br>#include <thread><br>#include <iostream><br>#include <mutex><br>#include <gst/app/gstappsink.h><br>#include <SDL.h><br><br><br>// SDL window dimensions<br>#define SCREEN_WIDTH 640<br>//#define SCREEN_HEIGHT 480<br>#define SCREEN_HEIGHT 1138<br><br>bool quit = false;<br><br>#define VERIFY_GST_NULL(ELEMENT) if (!ELEMENT) {std::cout << ">>>>>>>" << #ELEMENT " is null!" << std::endl; return 0;}<br><br>int main(int argc, char *argv[]) {<br>    // Initialize GStreamer<br>    gst_init(0, NULL);<br>/*<br>    GstElement *pipeline=gst_parse_launch("udpsrc port=5000 ! application/x-rtp ! rtph264depay ! h264parse ! openh264dec ! videoconvert ! appsink name=sink",NULL);<br>    GstElement *sinkElement = gst_bin_get_by_name(GST_BIN(pipeline), "sink");<br>    sink = GST_APP_SINK(sinkElement);<br>    */<br><br>    // Create the pipeline<br>    GstElement *pipeline = gst_pipeline_new("video-player");<br>    GstElement *udpSrc = gst_element_factory_make("udpsrc", "udp_src");<br>    GstElement *rtpDepay = gst_element_factory_make("rtph264depay", "rtp_h264_depay");<br>    GstElement *h264Parser = gst_element_factory_make("h264parse", "h264_parse");<br>    GstElement *h264Decoder = gst_element_factory_make("openh264dec", "h264_decoder");<br>    GstElement *videoConvert = gst_element_factory_make("videoconvert", "video_decoder");<br>    GstAppSink *sink = GST_APP_SINK(gst_element_factory_make("appsink", "sink"));<br><br>    VERIFY_GST_NULL(pipeline)<br>    VERIFY_GST_NULL(udpSrc)<br>    VERIFY_GST_NULL(rtpDepay)<br>    VERIFY_GST_NULL(h264Parser)<br>    VERIFY_GST_NULL(h264Decoder)<br>    VERIFY_GST_NULL(videoConvert)<br>    VERIFY_GST_NULL(sink)<br><br>    g_object_set(udpSrc, "port", 5000, NULL);<br><br>    auto caps = gst_caps_new_simple("application/x-rtp",<br>                                    "media", G_TYPE_STRING, "video",<br>                                    "clock-rate", G_TYPE_INT, 90000,<br>                                    "encoding-name", G_TYPE_STRING, "H264",<br>                                    NULL);<br>    g_object_set(G_OBJECT(udpSrc), "caps", caps, NULL);<br><br><br><br>    auto videoConvertPad = gst_element_get_static_pad(videoConvert, "src");<br>    auto videoConvertSrcCaps = gst_caps_new_simple("video/x-raw",<br>                                                   "format", G_TYPE_STRING, "BGRA",<br>                                                   "width", G_TYPE_INT, SCREEN_WIDTH,<br>                                                   "height", G_TYPE_INT, SCREEN_HEIGHT,<br>                                                   "framerate",GST_TYPE_FRACTION,30,1,<br>                                                   NULL);<br>    gst_pad_set_caps(videoConvertPad, videoConvertSrcCaps);<br><br>    // Set the sink element to push data to the app<br><br>    gst_app_sink_set_emit_signals(sink, true);<br>    gst_app_sink_set_drop(sink, true);<br>    gst_app_sink_set_max_buffers(sink, 1);<br>    gst_app_sink_set_caps(sink, gst_caps_new_simple("video/x-raw",<br>                                                    "format", G_TYPE_STRING, "BGRA",<br>                                                    "width", G_TYPE_INT, SCREEN_WIDTH,<br>                                                    "height", G_TYPE_INT, SCREEN_HEIGHT,<br>                                                    NULL));<br><br><br>    gst_bin_add_many(GST_BIN (pipeline),<br>                     udpSrc,<br>                     rtpDepay,<br>                     h264Parser,<br>                     h264Decoder,<br>                     videoConvert,<br>                     sink,<br>                     NULL);<br>    gst_element_link_many(udpSrc, rtpDepay, h264Parser, h264Decoder, videoConvert, sink, NULL);<br>    <br><br>    // Start the pipeline<br>    gst_element_set_state(pipeline, GST_STATE_PLAYING);<br><br>    // Create the SDL window<br>    SDL_Init(SDL_INIT_VIDEO);<br>    SDL_Window *window = SDL_CreateWindow("GStreamer + SDL",  SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, SCREEN_WIDTH, SCREEN_HEIGHT, SDL_WINDOW_SHOWN);<br><br>    // Create the SDL renderer<br>    auto renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED);<br><br>    // Create the SDL surface<br>    auto surface = SDL_CreateRGBSurface(0, SCREEN_WIDTH, SCREEN_HEIGHT, 32, 0, 0, 0, 0);<br><br>    SDL_Event e;<br>    std::cout << ">>> beggining of loop \n";<br>    while (!quit) {<br>        // Get the buffer from the appsink<br>        GstSample *sample = gst_app_sink_pull_sample(sink);<br>        if (!sample)<br>        {<br>            std::cout << ">>>>> No Sample from appsink!\n";<br>            std::this_thread::sleep_for(std::chrono::milliseconds(100));<br>            continue;<br>        }<br>        std::cout << ">>>>> I Have THA Sample from appsink!\n";<br><br>        GstBuffer *buffer = gst_sample_get_buffer(sample);<br><br>        // Get the buffer data<br>        GstMapInfo info;<br>        gst_buffer_map(buffer, &info, GST_MAP_READ);<br><br>        SDL_LockSurface(surface);<br>        memcpy(surface->pixels, info.data, surface->pitch * surface->h);<br>        SDL_UnlockSurface(surface);<br><br>        // Unmap the buffer<br>        gst_buffer_unmap(buffer, &info);<br>        // Release the sample<br>        gst_sample_unref(sample);<br><br>        auto texture = SDL_CreateTextureFromSurface(renderer, surface);<br>        SDL_RenderCopy(renderer, texture, NULL, NULL);<br>        SDL_DestroyTexture(texture);<br>        SDL_RenderPresent(renderer);<br><br><br>        while (SDL_PollEvent(&e)) {<br>            if (e.type == SDL_QUIT) {<br>                quit = 1;<br>                break;<br>            }<br>        }<br><br>        std::this_thread::sleep_for(std::chrono::milliseconds(100));<br>    }<br><br><br>    // Stop the pipeline<br>    gst_element_set_state(pipeline, GST_STATE_NULL);<br><br>    // Release the GStreamer elements<br>    //gst_object_unref(sink);<br>    //gst_object_unref(videoConvert);<br>    //gst_object_unref(videoSrc);<br>    //gst_object_unref(pipeline);<br><br><br>    return 0;<br>}<br>```</div><div><br></div><div>Thank you.<br clear="all"><div><div dir="ltr" class="gmail_signature" data-smartmail="gmail_signature"><div>--</div>Miguel Silvestre</div></div></div></div>