changing souphttpsrc location property on android
Dani
dmr.dev.rdp at gmail.com
Fri Jun 10 08:29:16 UTC 2016
I have been able to solve this problem by changing this instruction:
g_signal_connect (data->demux, "pad-added", G_CALLBACK
(dynamic_addpad), &data);
For this one, slightly different (no &):
g_signal_connect (data->demux, "pad-added", G_CALLBACK
(dynamic_addpad), data);
This way the manual pipeline works perfectly!
Regards,
Dani
El 06/06/16 a las 10:06, Dani escribió:
> Thanks for your answer, these are the changes I have done to the code:
>
> static void dynamic_addpad(GstElement *src, GstPad *new_pad,
> CustomData *data) { char* pad_name = gst_pad_get_name(new_pad);
> g_print(" In dynamic ADDING PAD %s\n", pad_name); if
> (g_str_has_prefix(pad_name,"audio")) { GstElement *q_audio;// =
> (GstElement *)data->queue_audio; q_audio = gst_bin_get_by_name
> (GST_BIN(data->pipeline), "queue_audio"); GstPad *audiodemuxsink =
> gst_element_get_static_pad(q_audio,"sink"); GstPadLinkReturn ret =
> gst_pad_link(new_pad, audiodemuxsink); g_print("'%s' dynamic link
> returns: %d\n", pad_name, (int) ret); gst_object_unref(q_audio); }
> else if (g_str_has_prefix(pad_name,"video")) { GstElement
> *q_video;// = (GstElement *) data->queue_video; q_video =
> gst_bin_get_by_name(GST_BIN(data->pipeline), "queue_video");
> GstPad *videodemuxsink =
> gst_element_get_static_pad(q_video,"sink"); GstPadLinkReturn ret =
> gst_pad_link(new_pad,videodemuxsink); g_print("'%s' dynamic link
> returns: %d\n", pad_name, (int) ret); gst_object_unref(q_video); } }
>
>
> static void *app_function (void *userdata) { JavaVMAttachArgs
> args; GstBus *bus; CustomData *data = (CustomData *)userdata;
> GSource *timeout_source; GSource *bus_source; GError *error =
> NULL; guint flags; GST_DEBUG ("Creating pipeline in CustomData at
> %p", data); /* Create our own GLib Main Context and make it the
> default one */ data->context = g_main_context_new ();
> g_main_context_push_thread_default(data->context); /* Build
> pipeline */ data->pipeline = gst_pipeline_new ("pipeline");
> data->httpsrc = gst_element_factory_make ("souphttpsrc",
> "http_src"); data->tsdemux = gst_element_factory_make ("tsdemux",
> "demux");data->queue_video = gst_element_factory_make("queue",
> "queue_video"); data->h264parse = gst_element_factory_make
> ("h264parse", "h264_parse"); data->h264dec =
> gst_element_factory_make ("avdec_h264", "h264dec");
> data->video_convert =
> gst_element_factory_make("videoconvert","video_convert");
> data->videosink = gst_element_factory_make ("glimagesink",
> "video_sink"); data->queue_audio = gst_element_factory_make
> ("queue", "queue_audio"); data->aacparse =
> gst_element_factory_make ("aacparse", "aacparse"); data->faad =
> gst_element_factory_make ("faad", "faad"); data->audio_convert =
> gst_element_factory_make("audioconvert", "audio_convert");
> data->audiosink = gst_element_factory_make ("autoaudiosink",
> "audio_sink"); g_signal_connect (data->tsdemux, "pad-added",
> G_CALLBACK (dynamic_addpad), &data);
> gst_bin_add_many(GST_BIN(data->pipeline), data->httpsrc,
> data->tsdemux, data->queue_video, data->h264parse, data->h264dec,
> data->video_convert, data->videosink, data->queue_audio,
> data->aacparse, data->faad, data->audio_convert, data->audiosink,
> NULL); gst_element_link(data->httpsrc, data->tsdemux);
> gst_element_link_many(data->queue_video, data->h264parse,
> data->h264dec, data->video_convert, data->videosink, NULL);
> gst_element_link_many(data->queue_audio, data->aacparse,
> data->faad, data->audio_convert, data->audiosink, NULL);
> GstElement *http_src; http_src =
> gst_bin_get_by_name(GST_BIN(data->pipeline), "http_src");
> g_object_set(G_OBJECT(http_src), "location",
> "http://192.168.0.32/videos/video.ts", NULL);
> gst_object_unref(http_src);if (error) { gchar *message =
> g_strdup_printf("Unable to build pipeline: %s", error->message);
> g_clear_error (&error); set_ui_message(message, data); g_free
> (message); return NULL; }/* Disable subtitles */ /*g_object_get
> (data->pipeline, "flags", &flags, NULL); flags &=
> ~GST_PLAY_FLAG_TEXT; g_object_set (data->pipeline, "flags", flags,
> NULL); */ /* Set the pipeline to READY, so it can already accept a
> window handle, if we have one */ data->target_state =
> GST_STATE_READY; gst_element_set_state(data->pipeline,
> GST_STATE_READY); /* Instruct the bus to emit signals for each
> received message, and connect to the interesting signals */ bus =
> gst_element_get_bus (data->pipeline); bus_source =
> gst_bus_create_watch (bus); g_source_set_callback (bus_source,
> (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
> g_source_attach (bus_source, data->context); g_source_unref
> (bus_source); g_signal_connect (G_OBJECT (bus), "message::error",
> (GCallback)error_cb, data); g_signal_connect (G_OBJECT (bus),
> "message::eos", (GCallback)eos_cb, data); g_signal_connect
> (G_OBJECT (bus), "message::state-changed",
> (GCallback)state_changed_cb, data); g_signal_connect (G_OBJECT
> (bus), "message::duration", (GCallback)duration_cb, data);
> g_signal_connect (G_OBJECT (bus), "message::buffering",
> (GCallback)buffering_cb, data); g_signal_connect (G_OBJECT (bus),
> "message::clock-lost", (GCallback)clock_lost_cb,
> data);gst_object_unref (bus); /* Register a function that GLib
> will call 4 times per second */ timeout_source =
> g_timeout_source_new (250); g_source_set_callback (timeout_source,
> (GSourceFunc)refresh_ui, data, NULL); g_source_attach
> (timeout_source, data->context); g_source_unref (timeout_source);
> /* Create a GLib Main Loop and set it to run */ GST_DEBUG
> ("Entering main loop... (CustomData:%p)", data); data->main_loop =
> g_main_loop_new (data->context, FALSE);
> check_initialization_complete (data); g_main_loop_run
> (data->main_loop); GST_DEBUG ("Exited main loop");
> g_main_loop_unref (data->main_loop); data->main_loop = NULL; /*
> Free resources */
> g_main_context_pop_thread_default(data->context);
> g_main_context_unref (data->context); data->target_state =
> GST_STATE_NULL; gst_element_set_state (data->pipeline,
> GST_STATE_NULL); gst_object_unref (data->pipeline); return NULL; }
>
> The full debug has been uploaded to this link:
> https://www.dropbox.com/s/bujeg6sc4381dtr/logcat060620160943.txt?dl=0
> If it may have to do something with this issue, I am using ubuntu
> 14.04 64b, Android Studio 1.5.1 (December 1st 2015 built) and ndk
> r10e-rc4 (64-bit). The gradle version is 2.8 and the android plugin
> version is 1.5.0. Thanks for your time, Dani
> El 03/06/16 a las 07:47, Sebastian Dröge escribió:
>> On Di, 2016-05-31 at 10:30 +0200, Dani wrote:
>>>
>>> I/GLib+stdout(16393): In dynamic ADDING PAD audio_0066
>>> I/GLib+stdout(16393): //////////-6//////////
>>> I/GLib+stdout(16393): Audio link refused!
>>> I/GLib+stdout(16393): Sink pad link: 'audio_0066'
>>> D/GStreamer+tsdemux(16393): 0:00:05.832146503 0x9e236660
>>> tsdemux.c:1599:activate_pad_for_stream:<demux:audio_0066> done adding
>>> pad
>> That's still not very useful unfortunately. Can you show your code, and
>> if it's still like last time please only try linking the pads once and
>> then just print the return value of gst_pad_link(). Feel free to print
>> it as an integer too instead of having all the if-else cases there.
>>
>> However your code should really print a lot of debug output because of
>> the 7 links it should be doing before printing the "link refused"
>> message.
>>
>>
>> Also unrelated to all this, use queue and not queue2 in your pipeline.
>> queue2 is mostly for network buffering, not thread decoupling :)
>>
>> _______________________________________________
>> gstreamer-devel mailing list
>> gstreamer-devel at lists.freedesktop.org
>> https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.freedesktop.org/archives/gstreamer-devel/attachments/20160610/a55b5d29/attachment-0001.html>
More information about the gstreamer-devel
mailing list