appsrc video RTP timestamp issue

John Dunn John.Dunn at qsc.com
Fri Nov 13 18:02:03 UTC 2020


Hello-

I have an appsource that generates raw RGBx frames 30 times a second that I would like to transmit via RTP. Current the appsrc  is a prototype with dummy frame buffer data but will eventually be tied to an accurate hardware source of frames. My appsrc works perfectly fine with this pipline 

  const char* pipeline_str =
    "appsrc name=appsrc !"
    "video/x-raw, format=RGBx, width=640, height=480 !"
    "videoconvert !"
    "autovideosink";

But when I use this pipeline to send it out via RTP

  pipeline_str =
    "appsrc name=appsrc !"
    "video/x-raw, format=RGBx, width=640, height=480 !"
    "videoconvert !"
    "mfh264enc !"
    "rtph264pay pt=96 !"
    "udpsink host=127.0.0.1 port=1234 sync=true async=false";

The following message is spewed out via GST_DEBUG  

  (gstest:36956): GStreamer-CRITICAL **: 08:43:43.164: gst_segment_to_running_time: assertion 'segment->format == format' failed

and VLC isn't able to reliably receive the stream. I have tested the same pipeline using videotestsrc in place of my appsrc and VLC works perfectly fine. My incoming frame data doesn't have any inherent time base so I'm just setting dst to the the current time delta from the start of play. I've also tried setting do-timestamp=1 on my appsrc but that doesn't work either and also makes the loop exit with no error after a second or two.

Here's the source of my appsrc. Right now I'm trying to simulate an accurate 30fps source using some spinlocks but my timing isn't perfect - I don't know if that's a source of some of the issues. I'd also like to have the absolute minimum latency on sending the rtp packets out 

const uint32_t VIDEO_WIDTH = 640;
const uint32_t VIDEO_HEIGHT = 480;
const uint32_t VIDEO_FPS = 30;

const uint32_t   RED = 0x000000ff;
const uint32_t GREEN = 0x0000ff00;
const uint32_t  BLUE = 0x00ff0000;
const uint32_t WHITE = 0x00FFFFff;
const uint32_t BLACK = 0x00000000;

class app_src_t {
  uint32_t* videoFrameBuffer;
  uint8_t r, g, b{ 0 };
  uint32_t count{ 0 };

  std::thread thread;
  GstAppSrc *appsrc;
  GstClock* clk;
  GstClockTime start_time;
  std::atomic<bool> run;

  void update_frame() {
    uint32_t color = (r & 0x000000ff) | ((g << 8) & 0x0000ff00) | ((b << 16) & 0x00ff0000);
    for (uint32_t iy = 0; iy < VIDEO_HEIGHT; iy++) {
      for (uint32_t ix = 0; ix < VIDEO_WIDTH; ix++) {
        uint32_t index = ( ix + count )  % VIDEO_WIDTH;
        if (index < 100) videoFrameBuffer[iy*VIDEO_WIDTH + ix] = RED;
        else if (index < 200)videoFrameBuffer[iy*VIDEO_WIDTH + ix] = GREEN;
        else if (index < 300) videoFrameBuffer[iy*VIDEO_WIDTH + ix] = BLUE;
        else if (index < 400) videoFrameBuffer[iy*VIDEO_WIDTH + ix] = WHITE;
        else videoFrameBuffer[iy*VIDEO_WIDTH + ix] = color;
      }
    }
    r += 5;
    if (count % 2 == 0) g += 3;
    if (count % 3 == 0) b += 6;
  }
  GstClockTime last_now;

  void on_frame(GstClockTime now) {
    // update frame data
    guint dataLen = VIDEO_WIDTH * VIDEO_HEIGHT * sizeof(uint32_t);
    // create buffer and copy data into it
    GstBuffer* buffer = gst_buffer_new_wrapped(g_memdup(videoFrameBuffer, dataLen), dataLen);
    buffer->duration = 1 * 1000 * 1000 * 1000 / VIDEO_FPS;
    buffer->offset = count;
    buffer->dts = now - start_time;
    GstFlowReturn ret = gst_app_src_push_buffer(appsrc, buffer);
    // update frame counter
    last_now = now;
    count++;
  }

public:
  app_src_t(GstPipeline* pipeline, const char* name) : run(true) {

    videoFrameBuffer = new uint32_t[VIDEO_WIDTH * VIDEO_HEIGHT];
    clk = gst_pipeline_get_pipeline_clock(pipeline);
    start_time = gst_clock_get_time(clk);

    appsrc = GST_APP_SRC(gst_bin_get_by_name(GST_BIN(pipeline), name));

    if (appsrc) {
      GstAppSrcCallbacks appsrc_callbacks;
      appsrc_callbacks.need_data = app_src_t::appsrc_need_data_callback;
      appsrc_callbacks.enough_data = app_src_t::appsrc_enough_data_callback;
      appsrc_callbacks.seek_data = 0; // appsrc_seek_data_callback;
      gst_app_src_set_callbacks(appsrc, &appsrc_callbacks, 0, 0);
      //g_object_set(appsrc, "do-timestamp", 1);
      g_object_set(appsrc, "is-live", 1);
    }

    GstCaps* caps = gst_caps_new_simple("video/x-raw",
      "format", G_TYPE_STRING, "RGBx",
      "framerate", GST_TYPE_FRACTION, 30, 1,
      "pixel-aspect-ratio", GST_TYPE_FRACTION, 1, 1,
      "width", G_TYPE_INT, VIDEO_WIDTH,
      "height", G_TYPE_INT, VIDEO_HEIGHT,
      NULL);

    gst_app_src_set_caps(appsrc, caps);

    // start feeding 
    thread = std::thread([this](){
      GstClockTime prev_clock = gst_clock_get_time(clk);
      while (run) {
        GstClockTime next_clock = gst_clock_get_time(clk);
        // send out next frame
        on_frame(next_clock);
        update_frame();

        // make sure each frame takes *at least* 1/30th of a second
        GstClockTime frame_clock = gst_clock_get_time(clk);
        double sleep_ns = 1.0 * 1000 * 1000 * 1000 / 30 - (frame_clock - next_clock);
        GstClockTime spin_start = gst_clock_get_time(clk);
        // spin lock until next frame since windows sleep is miserable...
        while (gst_clock_get_time(clk) - spin_start < sleep_ns) std::this_thread::sleep_for(std::chrono::nanoseconds(0));

        prev_clock = next_clock;
      }
    });
  }
  ~app_src_t() {
    delete[]videoFrameBuffer;
  }

  void stop() {
    run = false; 
    thread.join();
  }
  static void appsrc_need_data_callback(GstAppSrc *src, guint length, gpointer user_data) {
    std::cout << "appsrc_need_data_callback" << std::endl;
  }
  static void appsrc_enough_data_callback(GstAppSrc *src, gpointer user_data) {
    std::cout << "appsrc_enough_data_callback" << std::endl;
  }
};





More information about the gstreamer-devel mailing list