<html>
<head>
<meta http-equiv="content-type" content="text/html;
charset=ISO-8859-1">
</head>
<body bgcolor="#FFFFFF" text="#000000">
<div class="moz-text-flowed" style="font-family: -moz-fixed;
font-size: 13px;" lang="x-western">My program now works - for very
short videos (up to 1 second and 2 frames duration!) <br>
<br>
I have been testing my program and thought it was almost working -
until I discovered a problem. I had been testing with a video
camera pointing nowhere in particular and all seemed well, but
then, when I waved to the camera, I noticed that the video did not
include me waving. I transferred the video to another machine and
noticed that while the audio continued as expected, the video
stopped after about 1 second and 2 frames. I tested with the
following gst-launch but it seems to work OK. <br>
<br>
gst-launch-1.0 -e --gst-debug-level=6 v4l2src norm=PAL !
'video/x-raw,format=(string)I420,width=320,height=240,framerate=(fraction)25/1'
! queue ! mux. alsasrc ! audioconvert !
'audio/x-raw,rate=44100,channels=2' ! queue ! mux. avimux name=mux
! filesink location=test.avi <br>
<br>
The program is intended to be functionally the same - the only
'embellishment' is that this test version is set to stop recording
after 1 minute (the live version will have a 40 minute cutoff).
The program also supports Ctrl-C to stop recording. I have (I
hope) attached a zip file of the debug output (the middle 56
seconds have been removed). I can see that the debug shows<br>
<ul>
<li>at 0:00:00.343548749, the pipeline is 'Playing' - so what
went before is configuration and establishment.</li>
<li>it appears to be reporting that both pads have data,
repeatedly throughout the session</li>
<li>there is activity after 1 minute when the EOS gets sent</li>
</ul>
but I have not noticed anything reporting problems after 1 second
when the video content stops recording. <br>
<br>
I have also tested using gst_parse_launch (instead of constructing
the pipeline 'by hand') and I get the same problem with the video
stopping after a second.<br>
<br>
Can anyone see what might be going wrong? <br>
<br>
Ian <br>
<br>
#include <gst/gst.h> <br>
#include <glib.h> <br>
#include <signal.h> <br>
#include <string.h> <br>
<br>
static GstElement *pipeline; <br>
gulong timeout_id; <br>
<br>
static gboolean <br>
bus_call (GstBus *bus, <br>
GstMessage *msg, <br>
gpointer data) <br>
{ <br>
GMainLoop *loop = (GMainLoop *) data; <br>
switch (GST_MESSAGE_TYPE (msg)) { <br>
<br>
case GST_MESSAGE_EOS: <br>
g_print ("End of stream\n"); <br>
g_main_loop_quit (loop); <br>
break; <br>
<br>
case GST_MESSAGE_ERROR: { <br>
gchar *debug; <br>
GError *error; <br>
<br>
gst_message_parse_error (msg, &error, &debug); <br>
g_free (debug); <br>
<br>
g_printerr ("Error: %s\n", error->message); <br>
g_error_free (error); <br>
<br>
g_main_loop_quit (loop); <br>
break; <br>
} <br>
<br>
case GST_MESSAGE_APPLICATION:{ <br>
const GstStructure *s; <br>
<br>
s = gst_message_get_structure (msg); <br>
<br>
if (gst_structure_has_name (s, "GstLaunchInterrupt")) { <br>
/* this application message is posted when we caught an
interrupt and <br>
* we need to stop the pipeline. */ <br>
g_print ("Interrupt: Stopping pipeline ...\n"); <br>
gst_element_send_event (pipeline, gst_event_new_eos ());
<br>
} <br>
break; <br>
} <br>
<br>
default: <br>
break; <br>
} <br>
<br>
return TRUE; <br>
} <br>
<br>
<br>
<br>
<br>
static void <br>
sigint_restore (void) <br>
{ <br>
struct sigaction action; <br>
<br>
memset (&action, 0, sizeof (action)); <br>
action.sa_handler = SIG_DFL; <br>
<br>
sigaction (SIGINT, &action, NULL); <br>
} <br>
<br>
<br>
static void <br>
sigint_handler_sighandler (int signum) <br>
{ <br>
g_print ("Caught interrupt -- "); <br>
<br>
gst_element_send_event (pipeline, gst_event_new_eos ()); <br>
sigint_restore (); <br>
} <br>
<br>
<br>
static void <br>
sigint_setup (void) <br>
{ <br>
struct sigaction action; <br>
<br>
memset (&action, 0, sizeof (action)); <br>
action.sa_handler = sigint_handler_sighandler; <br>
<br>
sigaction (SIGINT, &action, NULL); <br>
} <br>
<br>
/* is called every 40 minutes. <br>
* I will send an EOS to the pipeline. */ <br>
static gboolean <br>
times_up (GstElement * pipeline) <br>
{ <br>
gst_element_send_event (pipeline, gst_event_new_eos ()); <br>
sigint_restore (); <br>
return FALSE; <br>
} <br>
<br>
<br>
<br>
int <br>
main (int argc, <br>
char *argv[]) <br>
{ <br>
GMainLoop *loop; <br>
<br>
GstElement *vsource, *vcapsfilter, *vidrate, *queue1; <br>
GstElement *asource, *aconv, *audrate, *acapsfilter, *queue2; <br>
GstElement *mux, *sink; <br>
GstBus *bus; <br>
guint bus_watch_id; <br>
<br>
GstCaps *caps; <br>
<br>
/* Initialisation */ <br>
gst_init (&argc, &argv); <br>
<br>
loop = g_main_loop_new (NULL, FALSE); <br>
<br>
<br>
/* Check input arguments */ <br>
if (argc != 2) { <br>
g_printerr ("Usage: %s <AVI filename>\n", argv[0]); <br>
return -1; <br>
} <br>
<br>
<br>
/* Create gstreamer elements */ <br>
pipeline = gst_pipeline_new ("av-recorder"); <br>
vsource = gst_element_factory_make ("v4l2src",
"vid-source"); <br>
vcapsfilter = gst_element_factory_make ("capsfilter",
"vid-caps"); <br>
vidrate = gst_element_factory_make ("videorate",
"vidrate"); <br>
queue1 = gst_element_factory_make ("queue",
"queue1"); <br>
asource = gst_element_factory_make ("alsasrc",
"alsa-source"); <br>
aconv = gst_element_factory_make ("audioconvert",
"audio-conv"); <br>
acapsfilter = gst_element_factory_make ("capsfilter",
"audio-caps"); <br>
audrate = gst_element_factory_make ("audiorate",
"audrate"); <br>
queue2 = gst_element_factory_make ("queue",
"queue2"); <br>
mux = gst_element_factory_make ("avimux", "avi-mux"); <br>
sink = gst_element_factory_make ("filesink",
"file-output"); <br>
<br>
if (!pipeline || !vsource || !vcapsfilter || !vidrate || !queue1
|| !asource || !aconv || !audrate || !acapsfilter || !queue2 ||
!mux || !sink) { <br>
g_printerr ("One element could not be created. Exiting.\n"); <br>
return -1; <br>
} <br>
<br>
/* Set up the pipeline */ <br>
<br>
/* we set the output filename to the sink element */ <br>
g_object_set (G_OBJECT (sink), "location", argv[1], NULL); <br>
<br>
/* we set the video capabilities on the vidcaps element */ <br>
caps =
gst_caps_from_string("video/x-raw,format=(string)I420,width=320,height=240,framerate=(fraction)25/1");<br>
g_object_set (G_OBJECT (vcapsfilter), "caps", caps, NULL); <br>
gst_caps_unref (caps); <br>
<br>
/* we set the audio capabilities on the audiocaps element */ <br>
caps =
gst_caps_from_string("audio/x-raw,rate=44100,channels=2"); <br>
g_object_set (G_OBJECT (acapsfilter), "caps", caps, NULL); <br>
gst_caps_unref (caps); <br>
<br>
g_object_set (G_OBJECT (vsource), "norm", 255, NULL); <br>
<br>
/* we add a message handler */ <br>
bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); <br>
bus_watch_id = gst_bus_add_watch (bus, bus_call, loop); <br>
gst_object_unref (bus); <br>
<br>
/* we add all elements into the pipeline */ <br>
/* vsource, vcapsfilter, vidrate, queue1, <br>
asource, aconv, acapsfilter, queue2, <br>
mux, sink */ <br>
gst_bin_add_many (GST_BIN (pipeline), <br>
vsource, vcapsfilter, queue1, <br>
/* vsource, vidrate, vcapsfilter, queue1,*/
<br>
asource, aconv, acapsfilter, queue2, <br>
/* asource, aconv, audrate, acapsfilter,
queue2,*/ <br>
mux, sink, NULL); <br>
<br>
/* we link the elements together */ <br>
/* vsource -> vcapsfilter -> vidrate -> queue1
-> avimux <br>
asource -> aconv -> acapsfilter -> queue2 ->
avimux <br>
mux -> sink */ <br>
gst_element_link_many (vsource, vcapsfilter, queue1, mux,
NULL); <br>
/* gst_element_link_many (vsource, vcapsfilter, vidrate, queue1,
mux, NULL);*/ <br>
gst_element_link_many (asource, aconv, acapsfilter, queue2,
mux, NULL); <br>
/* gst_element_link_many (asource, aconv, audrate, acapsfilter,
queue2, mux, NULL);*/ <br>
gst_element_link_many (mux, sink, NULL); <br>
<br>
<br>
/* Set the pipeline to "playing" state*/ <br>
g_print ("Now recording: %s\n", argv[1]); <br>
gst_element_set_state (pipeline, GST_STATE_PLAYING); <br>
<br>
sigint_setup (); <br>
/* timeout_id = g_timeout_add (40 * 60000, (GSourceFunc)
times_up, pipeline); */ <br>
timeout_id = g_timeout_add (1 * 60000, (GSourceFunc) times_up,
pipeline); <br>
<br>
<br>
/* Iterate */ <br>
g_print ("Recording (or not!)...\n"); <br>
g_main_loop_run (loop); <br>
<br>
<br>
/* Out of the main loop, clean up nicely */ <br>
g_print ("Returned, stopping recording\n"); <br>
gst_element_set_state (pipeline, GST_STATE_NULL); <br>
<br>
g_print ("Deleting pipeline\n"); <br>
gst_object_unref (GST_OBJECT (pipeline)); <br>
g_source_remove (bus_watch_id); <br>
g_main_loop_unref (loop); <br>
<br>
return 0; <br>
} <br>
<br>
<br>
-- <br>
Ian Davidson <br>
-- <br>
Facts used in this message may or may not reflect an underlying
objective reality. Facts are supplied for personal use only. <br>
Recipients quoting supplied information do so at their own risk.
Facts supplied may vary in whole or part from widely accepted
standards. <br>
While painstakingly researched, facts may or may not be indicative
of actually occurring events or natural phenomena. <br>
The author accepts no responsibility for personal loss or injury
resulting from memorisation and subsequent use. <br>
</div>
<div class="moz-signature"><br>
</div>
</body>
</html>