[gst-devel] fakesink not working as a videosink with playbin2 ?

Vincent Torri vtorri at univ-evry.fr
Thu Oct 14 16:44:00 CEST 2010


Hey,

basically, I create a playbin2, i pause the pipeline, i create a fakesink, 
and set it as the video-sink for playbin2.

When I play the file, the handoff callback of the fakesink is not called. 
Instead, a new window is created, where the stream is rendered. It seems 
that settin the video sink as the fakesink is not working (that is, i'm 
doing something wrong). The code is below.

Does someone see where the problem is ?

thank you

Vincent Torri



Eina_Bool
_emotion_pipeline_build(Emotion_Gstreamer_Video *ev, const char *file)
{
   int n_audio;
   int n_video;
   int i;

    ev->pipeline = gst_element_factory_make ("playbin2", "pipeline");
    if (!ev->pipeline)
      {
        ERR("could not create playbin2 element");
        return EINA_FALSE;
      }

    ev->eos_bus = gst_pipeline_get_bus(GST_PIPELINE(ev->pipeline));
    if (!ev->eos_bus)
      {
        ERR("could not create BUS");
        goto unref_pipeline;
      }

    g_object_set (G_OBJECT (ev->pipeline), "uri", file, NULL);

    if (!emotion_pipeline_pause(ev->pipeline))
      goto unref_pipeline;

    g_object_get (G_OBJECT (ev->pipeline),
                  "n-audio", &n_audio,
                  "n-video", &n_video,
                  NULL);

    /* Video sinks */

    for (i = 0; i < n_video; i++)
      {
        GstPad *pad = NULL;

        g_signal_emit_by_name (ev->pipeline, "get-video-pad", i, &pad);
        if (pad)
          {
            GstCaps *caps = NULL;
            GstStructure *structure;
            GstQuery     *query;
            const GValue *val;
            gchar        *str;
            Eina_Bool build_sink = EINA_FALSE;

            gdouble length_time;
            gint width;
            gint height;
            gint fps_num;
            gint fps_den;
            guint32 fourcc;

            caps = gst_pad_get_negotiated_caps(pad);
            if (!caps)
              goto unref_pad_v;
            structure = gst_caps_get_structure(caps, 0);
            str = gst_caps_to_string(caps);

            if (!gst_structure_get_int(structure, "width", &width))
              goto unref_caps_v;
            if (!gst_structure_get_int(structure, "height", &height))
              goto unref_caps_v;
            if (!gst_structure_get_fraction(structure, "framerate", &fps_num, &fps_den))
              goto unref_caps_v;

            if (g_str_has_prefix(str, "video/x-raw-yuv"))
              {
                val = gst_structure_get_value(structure, "format");
                fourcc = gst_value_get_fourcc(val);
              }
            else if (g_str_has_prefix(str, "video/x-raw-rgb"))
              fourcc = GST_MAKE_FOURCC('A', 'R', 'G', 'B');
            else
              goto unref_caps_v;

            query = gst_query_new_duration(GST_FORMAT_TIME);
            if (gst_pad_peer_query(pad, query))
              {
                gint64 t;

                gst_query_parse_duration(query, NULL, &t);
                length_time = (double)t / (double)GST_SECOND;
              }
            else
              goto unref_query_v;

            build_sink = EINA_TRUE;

          unref_query_v:
            gst_query_unref(query);
          unref_caps_v:
            gst_caps_unref(caps);
          unref_pad_v:
            gst_object_unref(pad);

            if (build_sink)
              {
                Emotion_Video_Sink *vsink;

                vsink = emotion_video_sink_new(ev);
                if (!vsink) continue;

                vsink->length_time = length_time;
                vsink->width = width;
                vsink->height = height;
                vsink->fps_num = fps_num;
                vsink->fps_den = fps_den;
                vsink->fourcc = fourcc;

                vsink->sink = gst_element_factory_make("fakesink", "videosink");
                gst_element_set_state(vsink->sink, GST_STATE_PAUSED);
              }
          }
      }

    /* Audio sinks */

    for (i = 0; i < n_audio; i++)
      {
        GstPad *pad = NULL;

        g_signal_emit_by_name (ev->pipeline, "get-audio-pad", i, &pad);
        if (pad)
          {
            GstCaps *caps = NULL;
            GstStructure *structure;
            GstQuery     *query;
            Eina_Bool build_sink = EINA_FALSE;

            gdouble length_time;
            gint channels;
            gint samplerate;

            caps = gst_pad_get_negotiated_caps(pad);
            if (!caps)
              goto unref_pad_a;
            structure = gst_caps_get_structure(caps, 0);

            if (!gst_structure_get_int(structure, "channels", &channels))
              goto unref_caps_a;
            if (!gst_structure_get_int(structure, "rate", &samplerate))
              goto unref_caps_a;

            query = gst_query_new_duration(GST_FORMAT_TIME);
            if (gst_pad_peer_query(pad, query))
              {
                gint64 t;

                gst_query_parse_duration(query, NULL, &t);
                length_time = (double)t / (double)GST_SECOND;
              }
            else
              goto unref_query_a;

            build_sink = EINA_TRUE;

          unref_query_a:
            gst_query_unref(query);
          unref_caps_a:
            gst_caps_unref(caps);
          unref_pad_a:
            gst_object_unref(pad);

            if (build_sink)
              {
                Emotion_Audio_Sink *asink;

                asink = (Emotion_Audio_Sink *)calloc(1, sizeof(Emotion_Audio_Sink));
                if (!asink) continue;
                ev->audio_sinks = eina_list_append(ev->audio_sinks, asink);
                if (eina_error_get())
                  {
                    free(asink);
                    continue;
                  }

                asink->length_time = length_time;
                asink->channels = channels;
                asink->samplerate = samplerate;

                asink->sink = gst_element_factory_make("autoaudiosink", "audiosink");
                gst_element_set_state(asink->sink, GST_STATE_PAUSED);
              }
          }
      }

    if (eina_list_count(ev->video_sinks) == 1)
      {
        Emotion_Video_Sink *vsink;

        vsink = (Emotion_Video_Sink *)eina_list_data_get(ev->video_sinks);
        ev->ratio = (double)vsink->width / (double)vsink->height;
      }

    {
      Emotion_Video_Sink *vsink;

      vsink = (Emotion_Video_Sink *)eina_list_data_get(ev->video_sinks);
      if (vsink && vsink->sink)
        {
          g_object_set(G_OBJECT(ev->pipeline), "video-sink", vsink->sink, NULL);
          g_object_set(G_OBJECT(vsink->sink), "sync", TRUE, NULL);
          g_object_set(G_OBJECT(vsink->sink), "signal-handoffs", TRUE, NULL);
          g_signal_connect(G_OBJECT(vsink->sink),
                           "handoff",
                           G_CALLBACK(cb_handoff), ev);
        }
    }

    {
      Emotion_Audio_Sink *asink;

      asink = (Emotion_Audio_Sink *)eina_list_data_get(ev->audio_sinks);
      if (asink && asink->sink)
        {
          g_object_set(G_OBJECT(ev->pipeline), "audio-sink", asink->sink, 
NULL);
        }
    }

    return EINA_TRUE;

  unref_pipeline:
    gst_object_unref(ev->pipeline);

    return EINA_FALSE;
}




More information about the gstreamer-devel mailing list