Gstreamer Application
Giritharan
girisugu2 at gmail.com
Fri May 15 09:20:23 PDT 2015
Hi i wrote gstreamer applicaton to record video along with audio for the
following gstreamer command.
*
gst-launch –e mfw_v4lsrc capture-mode=0 fps-n=30 ! vpuenc codec=0 ! queue !
mux. alsasrc ! 'audio/x-raw-int,rate=48000,channels=1' ! mfw_mp3encoder !
queue ! mux. avimux name=mux ! filesink location=audio_video2.avi
sync=false.
so my problem is while running the below c file i got the output like
(MFW_GST_V4LSRC_PLUGIN 3.0.7 build on Nov 6 2014 19:28:40.
No such IOCTL, cmd is 22032
No such IOCTL, cmd is 22032
BLN_MAD-MMCODECS_MP3E_ARM_02.02.00_ARM12 build on Jan 18 2013 15:29:26.
MFW_GST_MP3_ENCODER_PLUGIN 3.0.7 build on Nov 6 2014 19:28:47.
Segmentation fault)
#i nclude <gst/gst.h>
#include <glib.h>
static gboolean
bus_call (GstBus *bus,
GstMessage *msg,
gpointer data)
{
GMainLoop *loop = (GMainLoop *) data;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_EOS:
g_print ("End of stream\n");
g_main_loop_quit (loop);
break;
case GST_MESSAGE_ERROR: {
gchar *debug;
GError *error;
gst_message_parse_error (msg, &error, &debug);
g_free (debug);
g_printerr ("Error: %s\n", error->message);
g_error_free (error);
g_main_loop_quit (loop);
break;
}
default:
break;
}
return TRUE;
}
static void
on_pad_added (GstElement *element,
GstPad *pad,
gpointer data)
{
GstPad *sinkpad;
GstElement *decoder = (GstElement *) data;
/* We can now link this pad with the vorbis-decoder sink pad */
g_print ("Dynamic pad created, linking demuxer/decoder\n");
sinkpad = gst_element_get_static_pad (decoder, "sink");
gst_pad_link (pad, sinkpad);
gst_object_unref (sinkpad);
}
int
main (int argc,
char *argv[])
{
GMainLoop *loop;
GstElement *pipeline, *video_source, *video_encoder, *video_queue,
*audio_source,*audio_encoder,*audio_queue,*audio_muxer, *sink, *filter;
GstBus *bus;
guint bus_watch_id;
GstCaps *caps = gst_caps_new_empty();
/* Initialisation */
gst_init (&argc, &argv);
loop = g_main_loop_new (NULL, FALSE);
if (argc != 2) {
g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
return -1;
}
/* Giri Gstreamer Elements for IMX */
pipeline = gst_pipeline_new("pipeline");
video_source = gst_element_factory_make("mfw_v4lsrc", "video_source");
video_encoder = gst_element_factory_make("vpuenc", "video_encoder");
video_queue = gst_element_factory_make("queue", "video_queue");
audio_source = gst_element_factory_make("alsasrc","audio_source");
audio_encoder = gst_element_factory_make("mfw_mp3encoder",
"audio_encoder");
audio_queue = gst_element_factory_make("queue", "audio_queue");
audio_muxer = gst_element_factory_make("avimux","audio_muxer");
sink = gst_element_factory_make("multifilesink", "sink");
filter = gst_element_factory_make("capsfilter", "filter");
GstStructure *cs;
GstCaps *video_caps;
cs = gst_structure_new("audio/x-raw-int",
"rate", G_TYPE_INT, 44100,
"channels", G_TYPE_INT, 1, NULL);
gst_caps_append_structure(caps, cs);
//audio=gst_caps_new_simple("audio/x-raw-int", "rate", G_TYPE_INT, 44100,
"channels", G_TYPE_INT, 1, NULL);
g_object_set (G_OBJECT (video_source), "capture-mode", 1, NULL);
g_object_set (G_OBJECT (video_source), "fps-n", 30, NULL);
// g_object_set (G_OBJECT (filter), "caps", p->gst.video_caps, NULL);
g_object_set (G_OBJECT (video_encoder),"codec", 0 , NULL);
g_object_set (G_OBJECT (filter), "caps", video_caps, NULL);
g_object_set (G_OBJECT (sink), "location",argv[1], NULL);
g_object_set (G_OBJECT (filter), "caps", cs, NULL);
if(!pipeline || !video_source || !video_encoder || !video_queue
|| !audio_source || !audio_encoder || !audio_muxer
|| !filter || !sink) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
/*pipeline make*/
gst_bin_add_many(GST_BIN (pipeline), video_source, video_encoder,
video_queue,audio_source,audio_encoder,audio_queue,audio_muxer,sink,filter,
NULL);
gst_element_link_many
(video_source,video_encoder,video_queue,audio_source,NULL);
gst_element_link_many(filter,audio_encoder,audio_queue,audio_muxer,
NULL);
gst_element_link(audio_muxer,sink);
/* we add a message handler */
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);
gst_object_unref (bus);
g_signal_connect (audio_source, "pad-added", G_CALLBACK (on_pad_added),
audio_muxer);
/* Set the pipeline to "playing" state*/
g_print ("Now playing: %s\n", argv[1]);
gst_element_set_state (pipeline, GST_STATE_PLAYING);
/* Iterate */
g_print ("Running...\n");
g_main_loop_run (loop);
/* Out of the main loop, clean up nicely */
g_print ("Returned, stopping playback\n");
gst_element_set_state (pipeline, GST_STATE_NULL);
g_print ("Deleting pipeline\n");
gst_object_unref (GST_OBJECT (pipeline));
g_source_remove (bus_watch_id);
g_main_loop_unref (loop);
return 0;
}
--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/Gstreamer-Application-tp4671886.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
More information about the gstreamer-devel
mailing list