problem when injecting source file mp3/m3u8 data to appsrc element.
Luca Bacci
luca.bacci982 at gmail.com
Fri Mar 16 15:23:04 UTC 2018
Yeah, I see the line the creates decodebin element actually is commented
out.
You should add it to the pipeline. decodebin decodes mp3 data to
uncompressed audio.
audioresample, and most audiosinks, cannot take compressed data like mp3
take a look at
https://gstreamer.freedesktop.org/documentation/application-development/highlevel/playback-components.html
Luca
2018-03-16 14:13 GMT+01:00 Sujith reddy <Sujithreddy6192 at gmail.com>:
> HI Luca,
>
> here in above code i didn't use decodebin element.
>
> ........................
>
> i came to know that when i am giving mp3/m38u it is giving noise..then i
> rechecked the code i found out that i need to use decodebin element for
> decoding mp3.
>
> Now i tried with the below code ..
>
> it is saying *Elements could not be linked.*
>
> /////////////////////////////////////
> /*****************
>
>
> gcc llll.c -o playback-tutorial-7 `pkg-config --cflags --libs gstreamer-1.0
> gstreamer-audio-1.0 gstreamer-app-1.0`
> *******************/
>
>
>
>
>
>
>
>
> #include <gstreamer-1.0/gst/gst.h>
> #include <gst/audio/audio.h>
> #include <string.h>
> #include <stdio.h>
>
> #define CHUNK_SIZE 4096 /* Amount of bytes we are sending in each buffer
> */
> #define SAMPLE_RATE 48000 /* Samples per second we are sending */
>
> /* Structure to contain all our information, so we can pass it to callbacks
> */
> typedef struct _CustomData {
> GstElement *pipeline, *app_source, *tee, *audio_queue, *audio_convert1,
> *audio_resample, *audio_sink,*app_decode,*audio_decode;
> GstElement *app_queue, *audio_convert2, *app_sink;
>
>
> guint64 num_samples; /* Number of samples generated so far (for
> timestamp generation) */
> // gfloat a, b, c, d; /* For waveform generation */
>
> guint sourceid; /* To control the GSource */
> FILE *fp,*fp1;
> GMainLoop *main_loop; /* GLib's Main Loop */
> } CustomData;
>
> /* This method is called by the idle GSource in the mainloop, to feed
> CHUNK_SIZE bytes into appsrc.
> * The ide handler is added to the mainloop when appsrc requests us to
> start
> sending data (need-data signal)
> * and is removed when appsrc has enough data (enough-data signal).
> */
> static gboolean push_data (CustomData *data) {
> GstBuffer *buffer;
> GstFlowReturn ret;
> int i,r;
> GstMapInfo map;
> gint num_samples = CHUNK_SIZE/2; /* Because each sample is 16 bits */
> //gfloat freq;
>
> /* Create a new empty buffer */
> buffer = gst_buffer_new_and_alloc (CHUNK_SIZE);
>
> /* Set its timestamp and duration */
> GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->num_samples,
> GST_SECOND, SAMPLE_RATE);
> GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (CHUNK_SIZE,
> GST_SECOND, SAMPLE_RATE);
>
> /* Generate some psychodelic waveforms */
> gst_buffer_map (buffer, &map, GST_MAP_WRITE);
> r=fread(map.data,2,CHUNK_SIZE/2,data->fp);
> gst_buffer_unmap (buffer, &map);
> data->num_samples += num_samples;
>
> while(r==NULL)
> gst_app_src_end_of_stream (data->app_source);
>
>
>
> /* Push the buffer into the appsrc */
> g_signal_emit_by_name (data->app_source, "push-buffer", buffer, &ret);
> // gst_app_src_end_of_stream (data->app_source);
> //gst_app_src_push_buffer (data->app_source, buffer);
> /* Free the buffer now that we are done with it */
> gst_buffer_unref (buffer);
>
> if (ret != GST_FLOW_OK) {
> /* We got some error, stop sending data */
> return FALSE;
> }
>
> return TRUE;
> }
>
> /* This signal callback triggers when appsrc needs data. Here, we add an
> idle handler
> * to the mainloop to start pushing data into the appsrc */
> static void start_feed (GstElement *source, guint size, CustomData *data) {
> if (data->sourceid == 0) {
> g_print ("Start feeding\n");
> data->sourceid = g_idle_add ((GSourceFunc) push_data, data);
> }
> }
>
> /* This callback triggers when appsrc has enough data and we can stop
> sending.
> * We remove the idle handler from the mainloop */
> static void stop_feed (GstElement *source, CustomData *data) {
> if (data->sourceid != 0) {
> g_print ("Stop feeding\n");
> g_source_remove (data->sourceid);
> data->sourceid = 0;
> }
> }
>
> /* The appsink has received a buffer */
>
> static void new_sample (GstElement *sink, CustomData *data) {
>
> //printf("sujith1111111");
> GstSample *sample;
> ///////////////////////////////////////////////////////
> GstBuffer *buffer;
> GstMapInfo map;
> g_signal_emit_by_name (data ->app_sink, "pull-sample",
> &sample,NULL);
> if (sample)
> {
> buffer = gst_sample_get_buffer (sample);
>
> gst_buffer_map (buffer, &map, GST_MAP_READ);
>
> g_print("\n here size=%d\n",map.size);
> fwrite(map.data,1,map.size,data->fp1); ///data is written
> to a file
> gst_buffer_unmap (buffer,&map);
> gst_sample_unref(sample);
>
> /////////////////////////////////////////////////
> }
> }
>
> /* This function is called when an error message is posted on the bus */
> static void error_cb (GstBus *bus, GstMessage *msg, CustomData *data) {
> GError *err;
> gchar *debug_info;
>
> /* Print error details on the screen */
> gst_message_parse_error (msg, &err, &debug_info);
> g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME
> (msg->src), err->message);
> g_printerr ("Debugging information: %s\n", debug_info ? debug_info :
> "none");
> g_clear_error (&err);
> g_free (debug_info);
>
> g_main_loop_quit (data->main_loop);
> }
>
> int main(int argc, char *argv[]) {
> CustomData data;
> GstPad *tee_audio_pad,*tee_app_pad;
> GstPad *queue_audio_pad, *queue_app_pad;
> GstAudioInfo info;
> GstCaps *audio_caps;
> GstBus *bus;
>
> /* Initialize cumstom data structure */
> memset (&data, 0, sizeof (data));
>
> data.fp=fopen("/home/raghava/Documents/llll/songs/
> ChoosiChudangane.mp3","rb");
> // data.fp= fopen("./Deviceconnected.raw","rb");
> data.fp1 = fopen("1.raw","wb");
> /* Initialize GStreamer */
> gst_init (&argc, &argv);
>
> /* Create the elements */
> data.app_source = gst_element_factory_make ("appsrc", "audio_source");
> data.tee = gst_element_factory_make ("tee", "tee");
> data.audio_queue = gst_element_factory_make ("queue", "audio_queue");
> data.app_decode = gst_element_factory_make ("decodebin", "app_decode");
> data.audio_convert1 = gst_element_factory_make ("audioconvert",
> "audio_convert1");
> data.audio_resample = gst_element_factory_make ("audioresample",
> "audio_resample");
> data.audio_sink = gst_element_factory_make ("autoaudiosink",
> "audio_sink");
> data.app_queue = gst_element_factory_make ("queue", "app_queue");
> data.audio_decode = gst_element_factory_make ("decodebin",
> "audio_decode");
> data.audio_convert2 = gst_element_factory_make ("audioconvert",
> "audio_convert2");
> data.app_sink = gst_element_factory_make ("appsink", "app_sink");
>
>
>
> /* Create the empty pipeline */
> data.pipeline = gst_pipeline_new ("test-pipeline");
>
> if (!data.pipeline || !data.app_source || !data.tee || !data.audio_queue
> || !data.audio_convert1 ||
> !data.audio_resample || !data.audio_sink || !data.audio_convert2 ||
> !data.app_queue || !data.app_sink ||!data.audio_decode||
> !data.app_decode ) //
> {
> g_printerr ("Not all elements could be created.\n");
> return -1;
> }
>
>
> /* Configure appsrc */
> gst_audio_info_set_format (&info, GST_AUDIO_FORMAT_S16, SAMPLE_RATE, 1,
> NULL);
> audio_caps = gst_audio_info_to_caps (&info);
> g_object_set (data.app_source, "caps", audio_caps, "format",
> GST_FORMAT_TIME, NULL);
> //g_object_set (data.app_source, "format", GST_FORMAT_TIME, NULL);
> g_signal_connect (data.app_source, "need-data", G_CALLBACK (start_feed),
> &data);
> g_signal_connect (data.app_source, "enough-data", G_CALLBACK (stop_feed),
> &data);
>
> /* Configure appsink */
> g_object_set (data.app_sink, "emit-signals", TRUE, "caps", audio_caps,
> NULL);
> g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample),
> &data);
> gst_caps_unref (audio_caps);
> // g_free (audio_caps_text);
>
> /* Link all elements that can be automatically linked because they have
> "Always" pads */
> gst_bin_add_many (GST_BIN (data.pipeline), data.app_source, data.tee,
> data.audio_queue, data.audio_convert1, data.audio_resample,
> data.audio_sink, data.app_queue,
> data.audio_convert2,data.app_sink,data.audio_decode,data.app_decode,
> NULL);//,data.audio_decode,data.app_decode
> if (gst_element_link_many (data.app_source, data.tee, NULL) != TRUE ||
> gst_element_link_many (data.audio_queue,data.audio_decode,
> data.audio_convert1, data.audio_resample, data.audio_sink, NULL) != TRUE ||
> gst_element_link_many (data.app_queue,data.app_decode,
> data.audio_convert2,data.app_sink, NULL) != TRUE )//,data.app_decode
> ,data.audio_decode
> {
> g_printerr ("Elements could not be linked.\n");
> gst_object_unref (data.pipeline);
> return -1;
> }
>
> /* Manually link the Tee, which has "Request" pads */
> tee_audio_pad = gst_element_get_request_pad (data.tee, "src_%u");
> g_print ("Obtained request pad %s for audio branch.\n", gst_pad_get_name
> (tee_audio_pad));
> queue_audio_pad = gst_element_get_static_pad (data.audio_queue, "sink");
> tee_app_pad = gst_element_get_request_pad (data.tee, "src_%u");
> g_print ("Obtained request pad %s for app branch.\n", gst_pad_get_name
> (tee_app_pad));
> queue_app_pad = gst_element_get_static_pad (data.app_queue, "sink");
> if (gst_pad_link (tee_audio_pad, queue_audio_pad) != GST_PAD_LINK_OK ||
> gst_pad_link (tee_app_pad, queue_app_pad) != GST_PAD_LINK_OK) {
> g_printerr ("Tee could not be linked\n");
> gst_object_unref (data.pipeline);
> return -1;
> }
> gst_object_unref (queue_audio_pad);
> gst_object_unref (queue_app_pad);
>
> /* Instruct the bus to emit signals for each received message, and
> connect
> to the interesting signals */
> bus = gst_element_get_bus (data.pipeline);
> gst_bus_add_signal_watch (bus);
> g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb,
> &data);
> gst_object_unref (bus);
>
> /* Start playing the pipeline */
> gst_element_set_state (data.pipeline, GST_STATE_PLAYING);
> /* sleep(6);
> gst_element_set_state (data.pipeline, GST_STATE_NULL);
> g_object_set (data.playbin, "uri", ", NULL);*/
> /* Create a GLib Main Loop and set it to run */
> int nstreams;
> g_object_get (data.pipeline, "n-audio", &nstreams, NULL);
> printf("nstreams =%d",nstreams);
> data.main_loop = g_main_loop_new (NULL, FALSE);
> g_main_loop_run (data.main_loop);
>
> /* Release the request pads from the Tee, and unref them */
> gst_element_release_request_pad (data.tee, tee_audio_pad);
> gst_element_release_request_pad (data.tee, tee_app_pad);
> gst_object_unref (tee_audio_pad);
> gst_object_unref (tee_app_pad);
>
> /* Free resources */
> gst_element_set_state (data.pipeline, GST_STATE_NULL);
> gst_object_unref (data.pipeline);
> return 0;
> }
>
>
> Thanks
> sujith
>
>
>
> --
> Sent from: http://gstreamer-devel.966125.n4.nabble.com/
> _______________________________________________
> gstreamer-devel mailing list
> gstreamer-devel at lists.freedesktop.org
> https://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.freedesktop.org/archives/gstreamer-devel/attachments/20180316/07eb112a/attachment-0001.html>
More information about the gstreamer-devel
mailing list