Pipeline control is lost when used with gtksink & rtpsrc .

stproj smpalasis at gmail.com
Tue Dec 8 15:12:17 PST 2015


Greetings,

*I am trying to build a front end for my application using gtkplugin. What i
would like to do is set the states of the pipeline based on the buttons that
i press, but instead of this, the pipeline is stuck in play state. Moreover,
if you see in my code, i have a set state to play in the bottom of my main,
which makes the whole program unresponsive if i comment it.  

I did the same thing with success using filesrc but i fail with rtpsrc. What
seems to be wrong? Am i missing something or is it a bug?
*

I have attached my code and how on the first 3 commented lines is how i
compile my program.


Best regards,
S.

/*
    Compile gtkrtpclient.c
    gcc -Wall gtkrtpclient.c -o gtkrtpclient $(pkg-config --cflags gtk+-3.0
gstreamer-video-1.0 
  --libs gstreamer-1.0 gtk+-3.0 gstreamer-video-1.0)
*/

#include <gtk/gtk.h>
#include <gst/gst.h>
#include <glib.h>

static void
button_state_null_cb (GtkWidget * widget, GstElement * pipeline)
{
  gst_element_set_state (pipeline, GST_STATE_NULL);
  g_print ("GST_STATE_NULL\n");
}

static void
button_state_ready_cb (GtkWidget * widget, GstElement * pipeline)
{
  gst_element_set_state (pipeline, GST_STATE_READY);
  g_print ("GST_STATE_READY\n");
}

static void
button_state_paused_cb (GtkWidget * widget, GstElement * pipeline)
{
  gst_element_set_state (pipeline, GST_STATE_PAUSED);
  g_print ("GST_STATE_PAUSED\n");
}

static void
button_state_playing_cb (GtkWidget * widget, GstElement * pipeline)
{
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_print ("GST_STATE_PLAYING\n");
}

/*
static void
button_start_recording_cb (GtkWidget * widget, GstElement * pipeline)
{
  //impement
  g_print ("GST_START_RECORDING\n");
}
*/
/*
static void
button_stop_recording_cb (GtkWidget * widget, GstElement * pipeline)
{
  //implement
  g_print ("GST_STOP_RECORDING\n");
}
*/



static void
end_stream_cb (GstBus * bus, GstMessage * message, GstElement * pipeline)
{
  g_print ("End of stream\n");

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  gtk_main_quit ();
}

static void
destroy_cb (GtkWidget * widget, GdkEvent * event, GstElement * pipeline)
{
  g_print ("Close\n");

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  gtk_main_quit ();
}

int
main (int argc, char *argv[])
{
  GtkWidget *window, *window_control;
  GtkWidget *button_state_null, *button_state_ready;
  GtkWidget *button_state_paused, *button_state_playing;
  //GtkWidget *button_start_recording, *button_stop_recording;
  GtkWidget *grid, *area;


  GstElement *pipeline, *source, *jitterBuffer, *rtpdepay, *parser,
*decoder, *converter, *sink  ;
  GstCaps *caps;
  GstBus *bus;

  gst_init (&argc, &argv);
  gtk_init (&argc, &argv);

  pipeline = gst_pipeline_new ("pipeline");

  //window that contains an area where the video is drawn
  window = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  gtk_window_set_default_size (GTK_WINDOW (window), 640, 480);
  gtk_window_move (GTK_WINDOW (window), 300, 10);
  gtk_window_set_title (GTK_WINDOW (window), "gtkgstwidget");

  //window to control the states
  window_control = gtk_window_new (GTK_WINDOW_TOPLEVEL);
  gtk_window_set_resizable (GTK_WINDOW (window_control), FALSE);
  gtk_window_move (GTK_WINDOW (window_control), 10, 10);
  grid = gtk_grid_new ();
  gtk_container_add (GTK_CONTAINER (window_control), grid);

  //control state null
  button_state_null = gtk_button_new_with_label ("GST_STATE_NULL");
  g_signal_connect (G_OBJECT (button_state_null), "clicked",
      G_CALLBACK (button_state_null_cb), pipeline);
  gtk_grid_attach (GTK_GRID (grid), button_state_null, 0, 1, 1, 1);
  gtk_widget_show (button_state_null);

  //control state ready
  button_state_ready = gtk_button_new_with_label ("GST_STATE_READY");
  g_signal_connect (G_OBJECT (button_state_ready), "clicked",
      G_CALLBACK (button_state_ready_cb), pipeline);
  gtk_grid_attach (GTK_GRID (grid), button_state_ready, 0, 2, 1, 1);
  gtk_widget_show (button_state_ready);

  //control state paused
  button_state_paused = gtk_button_new_with_label ("GST_STATE_PAUSED");
  g_signal_connect (G_OBJECT (button_state_paused), "clicked",
      G_CALLBACK (button_state_paused_cb), pipeline);
  gtk_grid_attach (GTK_GRID (grid), button_state_paused, 0, 3, 1, 1);
  gtk_widget_show (button_state_paused);

  //control state playing
  button_state_playing = gtk_button_new_with_label ("GST_STATE_PLAYING");
  g_signal_connect (G_OBJECT (button_state_playing), "clicked",
      G_CALLBACK (button_state_playing_cb), pipeline);
  gtk_grid_attach (GTK_GRID (grid), button_state_playing, 0, 4, 1, 1);
  gtk_widget_show (button_state_playing);


  //control start recording
  /*button_start_recording = gtk_button_new_with_label
("vSTB_START_RECORDING");
  g_signal_connect (G_OBJECT (button_start_recording), "clicked",
      G_CALLBACK (button_start_recording_cb), pipeline);  // START RECORDING
CALLBACK
  gtk_grid_attach (GTK_GRID (grid), button_start_recording, 0, 5, 1, 1);
  gtk_widget_show (button_start_recording);*/

  //control stop recording
  /*button_stop_recording = gtk_button_new_with_label
("vSTB_START_RECORDING");
  g_signal_connect (G_OBJECT (button_stop_recording), "clicked",
      G_CALLBACK (button_stop_recording_cb), pipeline);  // STOP RECORDING
CALLBACK
  gtk_grid_attach (GTK_GRID (grid), button_stop_recording, 0, 6, 1, 1);
  gtk_widget_show (button_stop_recording);*/

  gtk_widget_show (grid);
  gtk_widget_show (window_control);

  g_signal_connect (G_OBJECT (window), "delete-event", G_CALLBACK
(destroy_cb),
      pipeline);

  //configure the pipeline
  pipeline = gst_pipeline_new ("rtp-stream");
  source   = gst_element_factory_make ("udpsrc",      "Udp-source");
  jitterBuffer     = gst_element_factory_make ("rtpjitterbuffer",     
"buffer");
  rtpdepay = gst_element_factory_make ("rtph264depay",      "rtpdepay");
  parser = gst_element_factory_make ("h264parse",      "parser");
  decoder = gst_element_factory_make ("avdec_h264", "decoder");
  converter = gst_element_factory_make ("videoconvert",      "converter");
  sink     = gst_element_factory_make ("gtksink",      "gtksink");

  if (!pipeline || !source || !jitterBuffer || !rtpdepay || !parser ||
!decoder || !converter || !sink ) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  
  /* Set up the caps */
  caps = gst_caps_new_simple ("application/x-rtp",
      "media", G_TYPE_STRING, "video",
      "clock-rate", G_TYPE_INT, 90000,
      "encoding-name", G_TYPE_STRING, "H264", NULL);

  /* we set the input filename to the source element */
  g_object_set (G_OBJECT (source), "port", 5000, "caps", caps, NULL);

  /* we add all elements into the pipeline */
  gst_bin_add_many (GST_BIN (pipeline),
                    source, jitterBuffer, rtpdepay, parser, decoder,
converter, sink, NULL);

  /* we link the elements together */
  gst_element_link_many (source, jitterBuffer, rtpdepay, parser, decoder,
converter, sink, NULL);


  g_object_get (sink, "widget", &area, NULL);
  gtk_container_add (GTK_CONTAINER (window), area);
  g_object_unref (area);

  gtk_widget_realize (area);

  //configure bus messages
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  g_signal_connect (bus, "message::error", G_CALLBACK (end_stream_cb),
      pipeline);
  g_signal_connect (bus, "message::warning", G_CALLBACK (end_stream_cb),
      pipeline);
  g_signal_connect (bus, "message::eos", G_CALLBACK (end_stream_cb),
pipeline);
  gst_object_unref (bus);



  gst_element_set_state (pipeline, GST_STATE_PLAYING);//uncomment,wont work
otherwise!!
  g_print ("Running...\n");

  gtk_widget_show_all (window);

  gtk_main ();

  gst_deinit ();

  return 0;
}




--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/Pipeline-control-is-lost-when-used-with-gtksink-rtpsrc-tp4674818.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.


More information about the gstreamer-devel mailing list