Hello,<br> Have you installed all the plugin?<br> Try something like gst-inspect flutsdemux. Same for flump3dec, mpeg2dec.<br> You should have theses elements to make the pipeline works.<br> After try something like that :
gst-launch-0.10 -v playbin uri=<a href="file:///home/myprofile/myfile.ts">file:///home/myprofile/myfile.ts</a><br>If it works, then comment this line <br>gst_element_set_state (pipeAudio, GST_STATE_PAUSED);<br><br>Otherwise each time you see an audio padd add you will bloc your audio pipeline.
<br><br>Erwan Masson<br><br><br><br><div><span class="gmail_quote">2007/5/16, fabien <<a href="mailto:fabien.castan@free.fr">fabien.castan@free.fr</a>>:</span><blockquote class="gmail_quote" style="border-left: 1px solid rgb(204, 204, 204); margin: 0pt 0pt 0pt 0.8ex; padding-left: 1ex;">
Hello<br>I've tested your code on two different pc-s with two different versions<br>of GStreamer (08.10 and 10.12) but didn't work on either of them. After<br>several tests, I finally commented the line that set PLAYING on the
<br>pipeAudio. That's when I managet to get an image for the first time.<br>But then I tested it with avi and mpg files and it doesn't work, it only<br>opens .ogg. Furthermore it only works once in every two times I try...
<br>Fabien<br><br><br>Erwan Masson a écrit :<br>> Hi,<br>> I have try some test, it seems if you dont put the pipevideo to pause,<br>> and you put pieAudio to play it works.<br>> I dont know why, moreover if I put your main pipeline to play it
<br>> crashes, in my code it dont crashes.<br>><br>> For your notify caps, I have never be able to retrieves info, I use<br>> have type callback.<br>><br>> For your have data function, I would have make a hand off callback
<br>> instead.<br>><br>> This code works fine with me.<br>> Try it,<br>> Erwan<br>><br>> 2007/5/14, fabien <<a href="mailto:fabien.castan@free.fr">fabien.castan@free.fr</a> <mailto:<a href="mailto:fabien.castan@free.fr">
fabien.castan@free.fr</a>>>:<br>><br>> Thank you for your fast answer.<br>> But I did remove the capsfilter but it still doesn't work... :(<br>> From time to time, the sound begins to play and stop after 2
<br>> seconds or<br>> so and I get no video.<br>> Fabien<br>><br>> Erwan Masson a écrit :<br>> > Hello,<br>> > Remove the capsfilter and it will works:) .<br>> > I used capsfilter linked with fakesink to grab each frame in a
<br>> > specific format(with a hand off signal).<br>> > Erwan<br>> ><br>> ><br>> > 2007/5/14, Michael Smith < <a href="mailto:msmith@fluendo.com">msmith@fluendo.com</a>
<br>> <mailto:<a href="mailto:msmith@fluendo.com">msmith@fluendo.com</a>><br>> > <mailto:<a href="mailto:msmith@fluendo.com">msmith@fluendo.com</a> <mailto:<a href="mailto:msmith@fluendo.com">
msmith@fluendo.com</a>>>>:<br>> ><br>> > On Mon, 2007-05-14 at 14:14 +0200, fabien wrote:<br>> > > Hello,<br>> > > I tested your code, but it still doesn't work. If I
<br>> comment the<br>> > video<br>> > > part, the audio is playing (from avi file but not from<br>> mpeg..).<br>> > So it<br>> > > must be an error on the video pipeline. I made a sketch of the
<br>> > pipeline...<br>> > > If anyone could find the error, it would be of great help<br>> for me.<br>> > > Thank you<br>> > ><br>> ><br>> > You're using a capsfilter to force a particular pixel format,
<br>> > size, and<br>> > framerate.<br>> ><br>> > You use ffmpegcolorspace (which can convert to the required<br>> pixel<br>> > format), but you don't have anything to convert to the size and
<br>> > framerate you're asking for.<br>> ><br>> > One specific problem you're likely to run into is that many<br>> files have<br>> > non-square pixels, but ximagesink requires square pixels.
<br>> You can use<br>> > the 'videoscale' element to resize appropriately.<br>> ><br>> > You can also try using videorate to change video framerate.<br>> ><br>
> > There may be other problems with your code, I didn't read it<br>> > closely or<br>> > try it myself.<br>> ><br>> > Mike<br>> ><br>> >
<br>><br>><br>><br>> ------------------------------------------------------------------------<br>><br>><br>> /* g++ `pkg-config --libs --cflags gstreamer-0.10` -Wall exampleNew.cpp -o exampleNew */<br>
> /* usage: ./example videoFileName */<br>><br>> #include <gst/gst.h><br>> #include <unistd.h><br>><br>><br>> #define VIDEO_WIDTH 360<br>> #define VIDEO_HEIGHT 288<br>><br>><br>> GstElement* pipeline;
<br>> GstElement* pipeVideo;<br>> GstElement* pipeAudio;<br>><br>><br>> static int ind = 0;<br>> static gboolean<br>> cb_bus_call (GstBus *bus,<br>> GstMessage *msg,<br>> gpointer data)
<br>> {<br>> GMainLoop *loop = (GMainLoop *) data;<br>> GstState oldstate, newstate, pending;<br>> //g_print ("Got %s message\n", GST_MESSAGE_TYPE_NAME (msg));<br>> switch (GST_MESSAGE_TYPE (msg)) {
<br>> case GST_MESSAGE_STATE_CHANGED:<br>><br>> gst_message_parse_state_changed (msg , &oldstate, &newstate, &pending);<br>> //return TRUE;<br>> g_print("State changed, ");
<br>><br>> switch(oldstate){<br>> case GST_STATE_VOID_PENDING:<br>> g_print("GST_STATE_VOID_PENDING");<br>> break;
<br>> case GST_STATE_NULL:<br>> g_print("GST_STATE_NULL");<br>> break;<br>> case GST_STATE_READY:<br>> g_print("GST_STATE_READY");
<br>> break;<br>> case GST_STATE_PAUSED :<br>> g_print("GST_STATE_PAUSED");<br>> break;<br>> case GST_STATE_PLAYING:
<br>> g_print("GST_STATE_PLAYING");<br>> break;<br>> default :<br>> break;<br>> }<br>> g_print(" -> ");
<br>> switch(newstate){<br>> case GST_STATE_VOID_PENDING:<br>> g_print("GST_STATE_VOID_PENDING");<br>> break;<br>
> case GST_STATE_NULL:<br>> g_print("GST_STATE_NULL");<br>> break;<br>> case GST_STATE_READY:<br>> g_print("GST_STATE_READY");
<br>> break;<br>> case GST_STATE_PAUSED :<br>> g_print("GST_STATE_PAUSED");<br>> break;<br>> case GST_STATE_PLAYING:
<br>> g_print("GST_STATE_PLAYING");<br>> break;<br>> default :<br>> break;<br>> }<br>>
<br>> g_print("\n");<br>><br>> //old state %s, new state %s, p video_bppending %s",*oldstate, *newstate, *pending);<br>> break;<br>> case GST_MESSAGE_EOS:
<br>> g_print ("End-of-stream\n");<br>> g_main_loop_quit (loop);<br>> break;<br>> case GST_MESSAGE_ERROR: {<br>> gchar *debug;<br>> GError *err;<br>><br>> gst_message_parse_error (msg, &err, &debug);
<br>> g_free (debug);<br>><br>> g_print ("Error: %s\n", err->message);<br>><br>> g_error_free (err);<br>><br>> g_main_loop_quit (loop);<br>><br>> break;
<br>> }<br>> default:<br>> break;<br>> }<br>><br>> return TRUE;<br>> }<br>><br>> static gboolean cb_have_data (GstPad *pad, GstBuffer *buffer, gpointer u_data)<br>> {<br>> printf("nb frame %d",ind++);
<br>> return TRUE;<br>> }<br>><br>><br>><br>> /* This callback will be called when GStreamer finds some info about the<br>> * video. In this case we want the width & height. */<br>> static void cb_notify_caps(GObject *obj,
<br>> GParamSpec *pspec, gpointer data)<br>> {<br>> GstPad *pad = GST_PAD(obj);<br>> GstCaps *caps;<br>> GstStructure *str;<br>> gint width, height;<br>><br>> if (!(caps = gst_pad_get_caps (pad))) return;
<br>> if (!(str = gst_caps_get_structure (caps, 0))) return;<br>> if (!gst_structure_get_int (str, "width", &width) ||<br>> !gst_structure_get_int (str, "height", &height))
<br>> return;<br>> g_print("cb_notify_caps width:%d height:%d\n", width, height);<br>> //video_width = width;<br>> //video_height = height;<br>> }<br>><br>>
<br>><br>><br>><br>> /* This callback will be called when GStreamer finds some stream (audio or<br>> * video) in the open file. This function links the appropriate elements. */<br>><br>> // A new pad callback function:
<br>> static void cb_new_pad (GstElement *element, GstPad *pad, gpointer data)<br>> {<br>> GstPad *sinkpad;<br>> GstCaps *caps;<br>> GstStructure *str;<br>> gint i, max;<br>><br>
> caps = gst_pad_get_caps (pad);<br>><br>> str = gst_caps_get_structure (caps, 0);<br>> g_print("________________________cb New Pad______________________________________________\n");
<br>> g_print("GstStructure: %s\n",gst_structure_get_name (str));<br>> /* We can now link this pad with the audio or video decoder */<br>> g_print ("Dynamic pad created, linking parser/decoder \n");
<br>> g_print("-------\n");<br>><br>> // VIDEO<br>> if (g_strrstr (gst_structure_get_name (str), "video"))<br>> {<br>> g_print("OOOOooook Video link\n");
<br>><br>> max = gst_structure_n_fields (str); // Récupère le nombre de field dans la structure<br>><br>> g_print("nb field = %d\n", max);<br>><br>> for(i=0;i<max; i++){
<br>> g_print("\n Nom de la structure %s \n", gst_structure_nth_field_name (str, i)); // Recupère le nom de la structure.<br>> }<br>> g_print("GstCaps: %s\n", gst_caps_to_string(caps));
<br>> //Add now the video thread in the main pipeline<br>> gst_bin_add(GST_BIN(pipeline), pipeVideo);<br>> //Put the pipeline video on state ready (It can bug if it is not init to ready)
<br>> gst_element_set_state (pipeVideo, GST_STATE_READY);<br>> //Retrieves the sink pad of the pipevideo(the ghost pad)<br>> sinkpad = gst_element_get_pad (pipeVideo, "sink");
<br>><br>> //If the pileline is already link, stop here<br>> if (GST_PAD_IS_LINKED (sinkpad)) return;<br>> else g_print("le pipeline n'est pas deja lie au sink donc on le fait...\n");
<br>> //You can add here a notify caps:<br>> //g_signal_connect(sinkpad, "notify::caps",G_CALLBACK(cb_notify_caps), NULL);<br>><br>> //Link the main pipeline pad with the pipeline video pad
<br>> if (gst_pad_link(pad, sinkpad)!= GST_PAD_LINK_OK)<br>> {<br>> g_error("Cannot link video\n");<br>> return;<br>> }
<br>> //Put the state at pause, it can be crash if not init<br>> // gst_element_set_state (pipeVideo, GST_STATE_PAUSED);<br>> // gst_element_set_state (pipeAudio, GST_STATE_PAUSED);
<br>> // gst_element_set_state (pipeline, GST_STATE_PAUSED);<br>><br>> g_print("Video playing\n");<br>> //gst_element_set_state (pipeVideo, GST_STATE_PLAYING);<br>
> }<br>><br>> // AUDIO<br>> if ( g_strrstr (gst_structure_get_name (str), "audio"))<br>> {<br>> g_print("Audio link\n");<br>><br>> gst_bin_add(GST_BIN(pipeline), pipeAudio);
<br>> gst_element_set_state (pipeAudio, GST_STATE_READY);<br>> sinkpad = gst_element_get_pad (pipeAudio, "sink");<br>> if (GST_PAD_IS_LINKED (sinkpad)){<br>> g_print("Already link");
<br>><br>> }<br>><br>> if (gst_pad_link(pad, sinkpad) != GST_PAD_LINK_OK)<br>> {<br>> g_error("Cannot link audio\n");<br>> return;
<br>> }<br>> g_print("Audio paused\n");<br>> gst_element_set_state (pipeAudio, GST_STATE_PAUSED);<br>><br>> g_print("Audio playing\n");
<br>> //gst_element_set_state (pipeVideo, GST_STATE_PAUSED);<br>> gst_element_set_state (pipeAudio, GST_STATE_PLAYING);<br>> // gst_element_set_state (pipeline, GST_STATE_PAUSED);
<br>> // g_print("pipe playing2\n");<br>><br>> /*<br>> if(!haveTypeAudio){<br>> g_print("Type de l'audio pas encore trouvé %d \n", video_frames);
<br>> return;<br>> };<br>> */<br>><br>> //gst_object_unref (sinkpad);<br>> }<br>> //gst_element_set_state (pipeline, GST_STATE_PLAYING);
<br>> }<br>><br>><br>><br>><br>><br>><br>><br>><br>> gint main (gint argc, gchar *argv[])<br>> {<br>> /* make sure we have input */<br>> if (argc != 2) {<br>> g_print ("Usage: %s <filename>\n", argv[0]);
<br>> return -1;<br>> }<br>> GstBus* bus ;<br>> /* initialize GStreamer */<br>> gst_init (&argc, &argv);<br>> GMainLoop *loop = g_main_loop_new (NULL, FALSE);
<br>><br>> /* Main pipeline */<br>> pipeline = gst_pipeline_new ("Main pipeline");<br>><br>> GstElement* source = gst_element_factory_make ("filesrc", "file-source");
<br>> /* the parser got 2 dynamic output pad, you will have to link them to your audio thread and video thread */<br>> GstElement* parser = gst_element_factory_make ("decodebin", "decodebin-parser");
<br>><br>> /* Audio Pipeline */<br>> pipeAudio = gst_pipeline_new ("audio-player ");<br>> /* A queue is needed to synchronise with Video thread */<br>> GstElement* aqueue = gst_element_factory_make("queue", "aqueue");
<br>> // GstElement* adecoder = gst_element_factory_make ("identity", "identity-decoder-audio");<br>> GstElement* aconv = gst_element_factory_make ("audioconvert", "converteraudio");
<br>> /* Identity, useful for add handdoff signal (to grab a sample) */<br>> // GstElement* aconv2 = gst_element_factory_make ("identity", " identity conv2");<br>> /* With typefind you are able to retrieves some info in the signal */
<br>> // GstElement* afind = gst_element_factory_make ("typefind", "typefindaudio");<br>> GstElement* asink = gst_element_factory_make ("alsasink", "alsa-output");<br>
><br>><br>> /* Video Pipeline */<br>> // GstElement*<br>> pipeVideo = gst_pipeline_new ("video-player");<br>> /* queue usefull to synchronize with audio thread */<br>> GstElement* vqueue = gst_element_factory_make("queue", "vqueue");
<br>> //GstElement* vdecoder = gst_element_factory_make ("identity", "identity-decoder");<br>> GstElement* vconv = gst_element_factory_make ("ffmpegcolorspace", "convertervideo");
<br>> /* Use capsfilter if you want to convert to RGB (default ffmpeg output is YUV */<br>> // GstElement* vcapsfilter = gst_element_factory_make ("capsfilter", "restreint le caps");<br>
> /*g_object_set (G_OBJECT (vcapsfilter), "caps", gst_caps_new_simple ("video/x-raw-rgb",<br>> "width", G_TYPE_INT, VIDEO_WIDTH,
<br>> "height", G_TYPE_INT, VIDEO_HEIGHT,<br>> "framerate", GST_TYPE_FRACTION, 25, 1,
<br>> "bpp", G_TYPE_INT, 3*8,<br>> "depth", G_TYPE_INT, 3*8,
<br>><br>> "red_mask", G_TYPE_INT, 0xff0000,<br>> "green_mask", G_TYPE_INT, 0x00ff00,
<br>> "blue_mask", G_TYPE_INT, 0x0000ff,<br>><br>> NULL)
<br>> , NULL);*/<br>><br>> /* Put a handoff signal on identity and you will grab video frame */<br>> // GstElement* vconv2 = gst_element_factory_make ("identity", "identity-vconv2");
<br>> // g_signal_connect (vconv2, "handoff", G_CALLBACK (cb_handoff_video), NULL);<br>> /* use typefind if you want to grab some info on video, like width, height....*/<br>> // GstElement* vfind = gst_element_factory_make ("typefind", "typefindVideo2");
<br>> //GstElement* vsink = gst_element<br>> gst_element_factory_make ("fakesink", "video-fake-output");<br>> GstElement* vsink = gst_element_factory_make ("ximagesink", "video-output");
<br>><br>> /* You need to test all Element to see if they are created */<br>> if (!pipeline || !source || !parser) {<br>> g_print ("One basic element could not be created.\n");
<br>> if (!pipeline) g_print("pipeline\n");<br>> if (!source) g_print("source\n");<br>> if (!parser) g_print("parser\n");<br>> return -1;
<br>> }<br>> if (!pipeAudio || !aqueue || /*!adecoder ||*/ !aconv || !asink) {<br>> g_print ("One audio element could not be created.\n");<br>> if (!pipeAudio) g_print("pipeline\n");
<br>> if (!aqueue) g_print("queue\n");<br>> //if (!adecoder) g_print("decoder\n");<br>> if (!aconv) g_print("conv\n");<br>> if (!asink) g_print("sink\n");
<br>> return -1;<br>> }<br>> if (!pipeVideo || !vqueue ||/* !vdecoder ||*/ !vconv || !vsink) {<br>> g_print ("One video element could not be created.\n");<br>> if (!pipeVideo) g_print("pipeline\n");
<br>> if (!vqueue) g_print("queue\n");<br>> //if (!vdecoder) g_print("decoder\n");<br>> if (!vconv) g_print("conv\n");<br>> if (!vsink) g_print("sink\n");
<br>> return -1;<br>> }<br>><br>> g_object_set (G_OBJECT (source), "location",argv[1], NULL);<br>><br>> /* Add a bus to catch Information */<br>> /*<br>
> GstBus* bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));<br>> // gst_bus_add_watch (bus, cb_bus_call, loop);<br>> gst_object_unref (bus);<br>> bus = gst_pipeline_get_bus (GST_PIPELINE (pipeVideo));
<br>> // gst_bus_add_watch (bus, cb_bus_call, loop);<br>> gst_object_unref (bus);<br>> */<br>> bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));<br>> gst_bus_add_watch (bus, cb_bus_call, loop);
<br>> gst_object_unref (bus);<br>> // bus = gst_pipeline_get_bus (GST_PIPELINE (pipeVideo));<br>> // gst_bus_add_watch (bus, cb_bus_call, loop);<br>> // gst_object_unref (bus);<br>><br>> bus = gst_pipeline_get_bus (GST_PIPELINE (pipeAudio));
<br>> gst_bus_add_watch (bus, cb_bus_call, loop);<br>> gst_object_unref (bus);<br>><br>> /* Video pipeline */<br>> /* Add element in pipeline */<br>> gst_bin_add_many (GST_BIN (pipeVideo), vqueue, /*vdecoder,*/ vconv, /*vcapsfilter, vconv2, vfind, */vsink, NULL);
<br>> /* Link element in pipeline */<br>> gst_element_link_many (vqueue, /*vdecoder,*/ vconv, /*vcapsfilter, vconv2, vfind ,*/ vsink, NULL);<br>><br>> /* Set the ghost pad for the viedo pipeline (pad for input)*/
<br>> GstPad* pad = gst_element_get_pad (vqueue, "sink");<br>> gst_element_add_pad (pipeVideo, gst_ghost_pad_new ("sink", pad));<br>> gst_object_unref (GST_OBJECT (pad));<br>
><br>> /* Audio pipeline */<br>> gst_bin_add_many (GST_BIN (pipeAudio),aqueue, /*adecoder,*/ aconv,/* aconv2, afind, */asink, NULL);<br>> gst_element_link_many (aqueue, /*adecoder,*/ aconv, /*aconv2, afind,*/ asink, NULL);
<br>> pad = gst_element_get_pad (aqueue, "sink");<br>> gst_element_add_pad (pipeAudio, gst_ghost_pad_new ("sink", pad));<br>> gst_object_unref (GST_OBJECT (pad));<br>><br>
> /* Main pipeline */<br>> gst_bin_add_many (GST_BIN (pipeline), source, parser, NULL);<br>> gst_element_link (source, parser);<br>><br>> /* link together - note that we cannot link the parser and
<br>> * decoder yet, because the parser uses dynamic pads. For that,GST_STATE_READY<br>> * we set a pad-added signal handler. */<br>> g_signal_connect (parser, "pad-added", G_CALLBACK (cb_new_pad), NULL);
<br>><br>><br>> /* Now set to playing and iterate. */<br>> g_print ("Setting to PLAYING\n");<br>> gst_element_set_state (pipeline, GST_STATE_READY);<br>> g_print ("Setting to PLAYING\n");
<br>> gst_element_set_state (pipeline, GST_STATE_PLAYING);<br>> // gst_element_set_state (pipeVideo, GST_STATE_PLAYING);<br>><br>> /* wait until it's up and running or failed */<br>> if (gst_element_get_state (pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
<br>> g_error ("Failed to go into PLAYING state");<br>> }<br>><br>><br>> // called on each frame being read<br>> GstPad *padObserver = gst_element_get_pad (source, "source");
<br>> gst_pad_add_buffer_probe (padObserver, G_CALLBACK (cb_have_data), NULL);<br>> gst_object_unref (padObserver);<br>><br>><br>><br>><br>> g_print ("Running\n");<br>> g_main_loop_run (loop);
<br>><br>> /* exit */<br>> printf("exit");<br>> gst_element_set_state (pipeline, GST_STATE_NULL);<br>> gst_object_unref (pipeline);<br>><br>><br>> }<br>><br>>
<br>><br>><br>> ------------------------------------------------------------------------<br>><br>> -------------------------------------------------------------------------<br>> This SF.net email is sponsored by DB2 Express
<br>> Download DB2 Express C - the FREE version of DB2 express and take<br>> control of your XML. No limits. Just data. Click to get it now.<br>> <a href="http://sourceforge.net/powerbar/db2/">http://sourceforge.net/powerbar/db2/
</a><br>> ------------------------------------------------------------------------<br>><br>> _______________________________________________<br>> gstreamer-devel mailing list<br>> <a href="mailto:gstreamer-devel@lists.sourceforge.net">
gstreamer-devel@lists.sourceforge.net</a><br>> <a href="https://lists.sourceforge.net/lists/listinfo/gstreamer-devel">https://lists.sourceforge.net/lists/listinfo/gstreamer-devel</a><br>><br><br><br><br></blockquote>
</div><br>