<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=ISO-8859-1">
</head>
<body bgcolor="#FFFFFF" text="#000000">
<div class="moz-text-html" lang="x-western">
<div style="color:#000; background-color:#fff; font-family:times
new roman, new york, times, serif;font-size:12pt">
<div>If I use the following command, I can record Audio and
Video into an AVI File.<br>
<br>
gst-launch-1.0 -e v4l2src norm=PAL ! videorate !
'video/x-raw,format=(string)I420,width=320,height=240,framerate=(fraction)25/1'
! queue ! mux. alsasrc ! audioconvert !
'audio/x-raw,rate=44100,channels=2' ! queue ! mux. avimux
name=mux ! filesink location=script-test.avi <br>
</div>
<div style="color: rgb(0, 0, 0); font-size: 16px; font-family:
times new roman,new york,times,serif; background-color:
transparent; font-style: normal;"><br>
I would like to replicate that in a program and my program is
below. However, the video does not make it through - the
Audio plays OK when I play the avi file.<br>
<br>
It had been suggested that videorate and audiorate might be
beneficial for me. In my program, if I included audiorate (as
indicated by the commented out lines), I just heard a brief
burst of sound at the beginning of the clip, whereas without
audiorate, I heard the full audio. Including/excluding
videorate does not seem to make any difference at the moment.<br>
<br>
I tried to set a value for "norm" on the v4l2src as PAL, but
the compiler did not recognise the ENUM value. The only way I
could set it was to use the value indicated by gst-inspect.<br>
<br>
I assume that I have done something stupid again, but I cannot
see what it is.<br>
<br>
Thanks for your help.<br>
<br>
Ian<br>
</div>
<div style="color: rgb(0, 0, 0); font-size: 16px; font-family:
times new roman,new york,times,serif; background-color:
transparent; font-style: normal;"><br>
</div>
<div style="color: rgb(0, 0, 0); font-size: 16px; font-family:
times new roman,new york,times,serif; background-color:
transparent; font-style: normal;">#include <gst/gst.h><br>
#include <glib.h><br>
<br>
<br>
static gboolean<br>
bus_call (GstBus *bus,<br>
GstMessage *msg,<br>
gpointer data)<br>
{<br>
GMainLoop *loop = (GMainLoop *) data;<br>
<br>
switch (GST_MESSAGE_TYPE (msg)) {<br>
<br>
case GST_MESSAGE_EOS:<br>
g_print ("End of stream\n");<br>
g_main_loop_quit (loop);<br>
break;<br>
<br>
case GST_MESSAGE_ERROR: {<br>
gchar *debug;<br>
GError *error;<br>
<br>
gst_message_parse_error (msg, &error, &debug);<br>
g_free (debug);<br>
<br>
g_printerr ("Error: %s\n", error->message);<br>
g_error_free (error);<br>
<br>
g_main_loop_quit (loop);<br>
break;<br>
}<br>
default:<br>
break;<br>
}<br>
<br>
return TRUE;<br>
}<br>
<br>
<br>
static void<br>
on_pad_added (GstElement *element,<br>
GstPad *pad,<br>
gpointer data)<br>
{<br>
GstPad *sinkpad;<br>
GstElement *decoder = (GstElement *) data;<br>
<br>
/* We can now link this pad with the vorbis-decoder sink pad
*/<br>
g_print ("Dynamic pad created, linking demuxer/decoder\n");<br>
<br>
sinkpad = gst_element_get_static_pad (decoder, "sink");<br>
<br>
gst_pad_link (pad, sinkpad);<br>
<br>
gst_object_unref (sinkpad);<br>
}<br>
<br>
<br>
<br>
int<br>
main (int argc,<br>
char *argv[])<br>
{<br>
GMainLoop *loop;<br>
<br>
GstElement *pipeline, *vsource, *vcapsfilter, *vidrate,
*queue1;<br>
GstElement *asource, *aconv, *audrate, *acapsfilter,
*queue2;<br>
GstElement *mux, *sink;<br>
GstBus *bus;<br>
guint bus_watch_id;<br>
<br>
GstCaps *caps;<br>
<br>
/* Initialisation */<br>
gst_init (&argc, &argv);<br>
<br>
loop = g_main_loop_new (NULL, FALSE);<br>
<br>
<br>
/* Check input arguments */<br>
if (argc != 2) {<br>
g_printerr ("Usage: %s <AVI filename>\n", argv[0]);<br>
return -1;<br>
}<br>
<br>
<br>
/* Create gstreamer elements */<br>
pipeline = gst_pipeline_new ("av-recorder");<br>
vsource = gst_element_factory_make ("v4l2src",
"vid-source");<br>
vcapsfilter = gst_element_factory_make ("capsfilter",
"vid-caps");<br>
vidrate = gst_element_factory_make ("videorate",
"vidrate");<br>
queue1 = gst_element_factory_make ("queue",
"queue1");<br>
asource = gst_element_factory_make ("alsasrc",
"alsa-source");<br>
aconv = gst_element_factory_make ("audioconvert",
"audio-conv");<br>
acapsfilter = gst_element_factory_make ("capsfilter",
"audio-caps");<br>
audrate = gst_element_factory_make ("audiorate",
"audrate");<br>
queue2 = gst_element_factory_make ("queue",
"queue2");<br>
mux = gst_element_factory_make ("avimux",
"avi-mux");<br>
sink = gst_element_factory_make ("filesink",
"file-output");<br>
<br>
if (!pipeline || !vsource || !vcapsfilter || !vidrate ||
!queue1 || !asource || !aconv || !audrate || !acapsfilter ||
!queue2 || !mux || !sink) {<br>
g_printerr ("One element could not be created.
Exiting.\n");<br>
return -1;<br>
}<br>
<br>
/* Set up the pipeline */<br>
<br>
/* we set the output filename to the sink element */<br>
g_object_set (G_OBJECT (sink), "location", argv[1], NULL);<br>
<br>
/* we set the video capabilities on the vidcaps element */<br>
caps =
gst_caps_from_string("video/x-raw,format=(string)I420,width=320,height=240,framerate=(fraction)25/1");<br>
g_object_set (G_OBJECT (vcapsfilter), "caps", caps, NULL);<br>
gst_caps_unref (caps); <br>
<br>
/* we set the audio capabilities on the audiocaps element */<br>
caps =
gst_caps_from_string("audio/x-raw,rate=44100,channels=2");<br>
g_object_set (G_OBJECT (acapsfilter), "caps", caps, NULL);<br>
gst_caps_unref (caps); <br>
<br>
g_object_set (G_OBJECT (vsource), "norm", 255, NULL);<br>
<br>
/* we add a message handler */<br>
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));<br>
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop);<br>
gst_object_unref (bus);<br>
<br>
/* we add all elements into the pipeline */<br>
/* vsource, vcapsfilter, vidrate, queue1,<br>
asource, aconv, acapsfilter, queue2,<br>
mux, sink */<br>
gst_bin_add_many (GST_BIN (pipeline),<br>
vsource, vcapsfilter, queue1,<br>
/* vsource, vidrate, vcapsfilter,
queue1,*/<br>
asource, aconv, acapsfilter, queue2,<br>
/* asource, aconv, audrate,
acapsfilter, queue2,*/<br>
mux, sink, NULL);<br>
<br>
/* we link the elements together */<br>
/* vsource -> vcapsfilter -> vidrate -> queue1
-> avimux<br>
asource -> aconv -> acapsfilter -> queue2
-> avimux<br>
mux -> sink */<br>
gst_element_link_many (vsource, vcapsfilter, queue1, mux,
NULL);<br>
/* gst_element_link_many (vsource, vcapsfilter, vidrate,
queue1, mux, NULL);*/<br>
gst_element_link_many (asource, aconv, acapsfilter, queue2,
mux, NULL);<br>
/* gst_element_link_many (asource, aconv, audrate,
acapsfilter, queue2, mux, NULL);*/<br>
gst_element_link_many (mux, sink, NULL);<br>
<br>
<br>
/* Set the pipeline to "playing" state*/<br>
g_print ("Now recording: %s\n", argv[1]);<br>
gst_element_set_state (pipeline, GST_STATE_PLAYING);<br>
<br>
<br>
/* Iterate */<br>
g_print ("Recording (or not!)...\n");<br>
g_main_loop_run (loop);<br>
<br>
<br>
/* Out of the main loop, clean up nicely */<br>
g_print ("Returned, stopping recording\n");<br>
gst_element_set_state (pipeline, GST_STATE_NULL);<br>
<br>
g_print ("Deleting pipeline\n");<br>
gst_object_unref (GST_OBJECT (pipeline));<br>
g_source_remove (bus_watch_id);<br>
g_main_loop_unref (loop);<br>
<br>
return 0;<br>
}<br>
</div>
</div>
</div>
<div class="moz-signature">-- <br>
--<br>
Ian Davidson<br>
--<br>
Facts used in this message may or may not reflect an underlying
objective reality. Facts are supplied for personal use only.<br>
Recipients quoting supplied information do so at their own risk.
Facts supplied may vary in whole or part from widely accepted
standards.<br>
While painstakingly researched, facts may or may not be indicative
of actually occurring events or natural phenomena.<br>
The author accepts no responsibility for personal loss or injury
resulting from memorisation and subsequent use.
</div>
</body>
</html>