wxWidgets MediaCtrl with decklinksrc.
Steve Cookson
steveco.1959 at gmail.com
Thu Aug 29 11:46:10 PDT 2013
Hi Guys,
I've been hacking the source code for wxMediaCtrl to get it working for
a video capture card while displaying and saving the output. It worked
fine for v4l2src, but it doesn't work at all the for decklinksrc.
The basic pipeline works with gst-launch, but the encoded version does
not work. This is the pipeline:
gst-launch -v decklinksrc connection = 3 mode=18 ! videoscale ! ffmpegcolorspace ! videobalance brightness=.3 contrast=.8 ! gamma gamma=.5 ! tee name=videoTee videoTee. ! queue name=monitorQueue ! xvimagesink sync=false force-aspect-ratio=true videoTee. ! queue name=fileQueue ! videorate ! video/x-raw-yuv,framerate=15/1 ! theoraenc quality=63 keyframe-force=1 ! oggmux ! filesink location=video.ogg
I hope it matches the code!!!
Where have I slipped up? Your thoughts would be very much appreciated.
The code follows, sorry about it's length :(
Regards
Steve
//-----------------------------------------------------------------------------
// wxGStreamerMediaBackend::DeviceCapture
//
// Used to Capture and store input from a local device (eg /dev/video0) in a specified file.
//-----------------------------------------------------------------------------
bool wxGStreamerMediaBackend::DeviceCapture(wxControl* ctrl, wxWindow* parent,
wxWindowID id,
const wxPoint& pos,
const wxSize& size,
long style,
const wxValidator& validator,
const wxString& name)
{
printf ("mediactrl->DeviceCapture option 'wxID_HIGHEST'.\n");
// wxMutexLocker lock(m_asynclock); // lock state events and async callbacks
//
//init gstreamer
//
//Convert arguments to unicode if enabled
#if wxUSE_UNICODE
int i;
char **argvGST = new char*[wxTheApp->argc + 1];
for ( i = 0; i < wxTheApp->argc; i++ )
{
argvGST[i] = wxStrdupA(wxConvUTF8.cWX2MB(wxTheApp->argv[i]));
}
argvGST[wxTheApp->argc] = NULL;
int argcGST = wxTheApp->argc;
#else
#define argcGST wxTheApp->argc
#define argvGST wxTheApp->argv
#endif
//Really init gstreamer
gboolean bInited;
GError* error = NULL;
#if GST_VERSION_MAJOR > 0 || GST_VERSION_MINOR >= 10
bInited = gst_init_check(&argcGST, &argvGST, &error);
#else
bInited = gst_init_check(&argcGST, &argvGST);
#endif
// Cleanup arguments for unicode case
#if wxUSE_UNICODE
for ( i = 0; i < argcGST; i++ )
{
free(argvGST[i]);
}
delete [] argvGST;
#endif
if(!bInited) //gst_init_check fail?
{
if(error)
{
wxLogSysError(wxT("Could not initialize GStreamer\n")
wxT("Error Message:%s"),
(const wxChar*) wxConvUTF8.cMB2WX(error->message)
);
g_error_free(error);
}
else
wxLogSysError(wxT("Could not initialize GStreamer"));
return false;
}
//
// wxControl creation
//
m_ctrl = wxStaticCast(ctrl, wxMediaCtrl);
#ifdef __WXGTK__
// We handle our own GTK expose events
m_ctrl->m_noExpose = true;
#endif
if( !m_ctrl->wxControl::Create(parent, id, pos, size,
style, // TODO: remove borders???
validator, name) )
{
wxFAIL_MSG(wxT("Could not create wxControl!!!"));
return false;
}
#ifdef __WXGTK__
// Turn off double-buffering so that
// so it doesn't draw over the video and cause sporadic
// disappearances of the video
gtk_widget_set_double_buffered(m_ctrl->m_wxwindow, FALSE);
#endif
// don't erase the background of our control window
// so that resizing is a bit smoother
m_ctrl->SetBackgroundStyle(wxBG_STYLE_CUSTOM);
// Ready video.
printf ("Create Pipeline method starts.\n");
//m_playbin-----------------------------------------------------------
// decklinksrc connection = 18 mode=3 !
// videoscale ! ffmpegcolorspace ! videobalance brightness=.3 contrast=.8 ! gamma gamma=.5 ! tee name=videoTee
// videoTee ! monitorQueue ! xvimagesink sync=false force-aspect-ratio=true
// videoTee ! fileQueue ! videorate ! video/x-raw-yuv,framerate=15/1 ! theoraenc quality=63 keyframe-force=1 !
// oggmux ! filesink location=video.ogg
//
// Create DeviceCapture object
//
//GstElement *m_playbin;
GstElement *videoSrc, *videoScale, *colorSpace, *videoBalance, *videoGamma, *videoTee;
GstElement *monitorQueue, *videoSink;
GstElement *fileQueue, *videoRate, *encoder, *muxer, *fileSink;
//
// Create elements
//
m_playbin = gst_pipeline_new ("DeviceCapture m_playbin subsitute");
if (!m_playbin) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT("'m_playbin' could not be created."));
//g_printerr ("'m_playbin' could not be created.\n");
return NULL;
}
videoSrc = gst_element_factory_make ("decklinksrc", "videoSrc");
if (!videoSrc) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT ("'videoSrc' could not be created.\n"));
return NULL;
}
videoScale = gst_element_factory_make ("videoscale", "videoScale");
if (!videoScale) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT ("'videoScale' could not be created.\n"));
return NULL;
}
colorSpace = gst_element_factory_make ("ffmpegcolorspace", "colorSpace");
if (!colorSpace) {
g_printerr ("'colorSpace' could not be created.\n");
return NULL;
}
videoBalance = gst_element_factory_make ("videobalance", "videoBalance");
if (!videoBalance) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT ("'videoBalance' could not be created.\n"));
return NULL;
}
videoGamma = gst_element_factory_make ("gamma", "videoGamma");
if (!videoGamma) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT ("'videoGamma' could not be created.\n"));
return NULL;
}
videoTee = gst_element_factory_make ("tee", "videoTee");
if (!videoTee) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT ("'videoTee' could not be created.\n"));
return NULL;
}
monitorQueue = gst_element_factory_make ("queue", "monitorQueue");
if (!monitorQueue) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT ("'monitorQueue' could not be created.\n"));
return NULL;
}
fileQueue = gst_element_factory_make ("queue", "fileQueue");
if (!fileQueue) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT ("'fileQueue' could not be created.\n"));
return NULL;
}
videoRate = gst_element_factory_make ("videorate", "videorate");
if (!videoRate) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT ("'videoRate' could not be created.\n"));
return NULL;
}
encoder = gst_element_factory_make ("theoraenc", "videoenc");
if (!encoder) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT ("'encoder' could not be created.\n"));
return NULL;
}
muxer = gst_element_factory_make ("oggmux", "muxer");
if (!muxer) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT ("'muxer' could not be created.\n"));
return NULL;
}
fileSink = gst_element_factory_make ("filesink", "fileSink");
if (!fileSink) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT ("'fileSink' could not be created.\n"));
return NULL;
}
// Setup video sink - first try gconf, then auto, then xvimage and
// then finally plain ximage
videoSink = gst_gconf_get_default_video_sink();
if( !TryVideoSink(videoSink) )
{
videoSink = gst_element_factory_make ("autovideosink", "video-sink");
if( !TryVideoSink(videoSink) )
{
videoSink = gst_element_factory_make ("xvimagesink", "video-sink");
if( !TryVideoSink(videoSink) )
{
// finally, do a final fallback to ximagesink
videoSink =
gst_element_factory_make ("ximagesink", "video-sink");
if( !TryVideoSink(videoSink) )
{
g_object_unref(videoSink);
wxLogSysError(wxT("Could not find a suitable video sink"));
return false;
}
}
}
}
// Error reporting
if (!m_playbin || !videoSrc || !videoScale || !colorSpace || !videoBalance || !videoGamma || !videoTee || !monitorQueue || !videoSink || !monitorQueue
|| !fileQueue || !videoRate || !encoder || !muxer|| !fileSink) {
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT ("One element could not be created.\n"));
return NULL;
}
// Set paramaters for the GstElements
// Video source
g_object_set (G_OBJECT (videoSrc), "connection", 3, NULL);
// Queue-size default is 2, set to 1 to reduce latency;
g_object_set (G_OBJECT (videoSrc), "mode", 18, NULL);
// Colour settings for camera
g_object_set (G_OBJECT (videoBalance), "brightness", .3, NULL);
g_object_set (G_OBJECT (videoBalance), "contrast", .8, NULL);
g_object_set (G_OBJECT (videoGamma), "gamma", .5, NULL);
// File location
g_object_set (G_OBJECT (fileSink), "location", "/home/image/tmp_vid.ogg", NULL); // was .avi
// Encoder quality
g_object_set (G_OBJECT (encoder), "quality", 63, NULL);
// Encoder keyframe rate
g_object_set (G_OBJECT (encoder), "keyframe-force", 1, NULL);
// videoSink sync=false force-aspect-ratio=true
g_object_set (G_OBJECT (videoSink), "sync", false, "force-aspect-ratio", true, NULL);
// g_object_set (G_OBJECT (videoSink), "sync", false, NULL);
// add elements to pipeline
gst_bin_add_many (GST_BIN (m_playbin),
videoSrc, videoScale, colorSpace, videoTee,
monitorQueue, videoSink,
fileQueue, videoRate, videoBalance, videoGamma, encoder, muxer, fileSink, NULL);
// link elements.
if (!gst_element_link_many( videoSrc, videoScale, colorSpace, videoBalance, videoGamma, videoTee, NULL )) {
g_warning ("Failed to link videoScale, colorSpace, videoBalance, videoGamma and videoTee.");
}
if (!gst_element_link_many( videoTee, monitorQueue, videoSink, NULL )) {
g_warning ("Failed to link videoTee, monitorQueue and videoSink.");
}
if (!gst_element_link_many( videoTee, fileQueue, videoRate, NULL )) {
g_warning ("Failed to link videoTee, fileQueue and videoRate.");
}
// Create caps stuff
GstCaps *caps;
m_videoFps = 15;
caps = gst_caps_new_simple ("video/x-raw-yuv",
"framerate", GST_TYPE_FRACTION, 15, 1,
NULL);
if (!gst_element_link_filtered (videoRate, encoder, caps)) {
g_warning ("Failed to link videoRate and encoder.");
}
gst_caps_unref (caps);
if (!gst_element_link_many( encoder, muxer, fileSink, NULL )) {
g_warning ("Failed to link encoder, muxer and fileSink.");
}
if (!GST_IS_ELEMENT(m_playbin))
{
if(G_IS_OBJECT(m_playbin))
g_object_unref(m_playbin);
wxLogSysError(wxT("Got an invalid bin"));
return false;
}
// Set playbin to ready
if( gst_element_set_state (m_playbin, GST_STATE_READY) ==
GST_STATE_FAILURE || !SyncStateChange(m_playbin, GST_STATE_READY))
{
wxLogSysError(wxT("wxGStreamerMediaBackend::DeviceCapture - ")
wxT("Could not set initial state to paused"));
return false;
}
printf ("Playbin set.\n");
#if GST_VERSION_MAJOR == 0 && GST_VERSION_MINOR < 10
// Connect the glib events/callbacks we want to our playbin
g_signal_connect(m_playbin, "eos",
G_CALLBACK(gst_finish_callback), this);
g_signal_connect(m_playbin, "error",
G_CALLBACK(gst_error_callback), this);
g_signal_connect(m_playbin, "state-change",
G_CALLBACK(gst_state_change_callback), this);
#else
// GStreamer 0.10+ uses GstBus for this now, connect to the sync
// handler as well so we can set the X window id of our xoverlay
gst_bus_add_watch (gst_element_get_bus(m_playbin),
(GstBusFunc) gst_bus_async_callback, this);
gst_bus_set_sync_handler(gst_element_get_bus(m_playbin),
(GstBusSyncHandler) gst_bus_sync_callback, this);
g_signal_connect(m_playbin, "notify::stream-info",
G_CALLBACK(gst_notify_stream_info_callback), this);
#endif
#if GST_VERSION_MAJOR == 0 && GST_VERSION_MINOR < 10
// Not on 0.10... called when video size changes
g_signal_connect(m_xoverlay, "desired-size-changed",
G_CALLBACK(gst_desired_size_changed_callback), this);
#endif
// Tell GStreamer which window to draw to in 0.8 - 0.10
// sometimes needs this too...
SetupXOverlay();
printf ("Control created.\n");
m_eventHandler = new wxGStreamerMediaEventHandler(this);
return true;
}
More information about the gstreamer-devel
mailing list