[gst-devel] Need help with first gstreamer program.
Ognen Bezanov
ognen.bezanov at wai.co.uk
Fri Jun 26 13:09:22 CEST 2009
Hello all,
I'm trying to write a gstreamer program to transcode a video into
another format (ogg theora with vorbis audio), but I can't get it to
work. When I run it with debug commands I get the following:
0:00:00.556658600 4539 0x981f920 INFO GST_STATES
gstelement.c:2163:gst_element_continue_state:<Abuffer> posting
state-changed READY to PAUSED
0:00:00.556698920 4539 0x981f920 INFO GST_STATES
gstbin.c:2194:gst_bin_change_state_func:<videopipeline> child 'Abuffer'
changed state to 3(PAUSED) successfully
0:00:00.556770600 4539 0x981f920 INFO GST_STATES
gstelement.c:2150:gst_element_continue_state:<demux> completed state
change to PAUSED
0:00:00.556797720 4539 0x981f920 INFO GST_STATES
gstelement.c:2163:gst_element_continue_state:<demux> posting
state-changed READY to PAUSED
0:00:00.556834680 4539 0x981f920 INFO GST_STATES
gstbin.c:2194:gst_bin_change_state_func:<videopipeline> child 'demux'
changed state to 3(PAUSED) successfully
0:00:00.556879560 4539 0x981f920 INFO filesrc
gstfilesrc.c:947:gst_file_src_start:<filesrc> opening file testmovie.mpg
0:00:00.556992080 4539 0x981f920 INFO GST_EVENT
gstevent.c:563:gst_event_new_new_segment_full: creating newsegment
update 0, rate 1.000000, format bytes, start 0, stop 831340544, position
0
0:00:00.557376160 4539 0x9bf2b70 INFO GST_EVENT
gstevent.c:557:gst_event_new_new_segment_full: creating newsegment
update 0, rate 1.000000, format GST_FORMAT_TIME, start
0:00:00.001622222, stop 99:99:99.999999999, position 0:00:00.001622222
0:00:00.557623280 4539 0x9bf2b70 INFO GST_ELEMENT_PADS
gstelement.c:639:gst_element_add_pad:<demux> adding pad 'video_00'
0:00:00.640338920 4539 0x9bf2b70 INFO GST_ELEMENT_PADS
gstelement.c:885:gst_element_get_static_pad: found pad Vbuffer:sink
0:00:00.640405200 4539 0x9bf2b70 INFO GST_PADS
gstpad.c:1796:gst_pad_link_prepare: trying to link demux:video_00 and
Vbuffer:sink
0:00:00.640504520 4539 0x9bf2b70 INFO GST_PADS
gstpad.c:1933:gst_pad_link: linked demux:video_00 and Vbuffer:sink,
successful
0:00:00.640556640 4539 0x9bf2b70 INFO GST_EVENT
gstevent.c:557:gst_event_new_new_segment_full: creating newsegment
update 0, rate 1.000000, format GST_FORMAT_TIME, start
0:00:00.001622222, stop 99:99:99.999999999, position 0:00:00.001622222
0:00:00.640972360 4539 0x9bef930 INFO GST_PADS
gstpad.c:3222:gst_pad_event_default_dispatch:<mux:sink_437196908>
Sending event 0x97e28c8 (tag) to all internally linked pads
0:00:00.557662760 4539 0x981f920 INFO GST_STATES
gstelement.c:2150:gst_element_continue_state:<filesrc> completed state
change to PAUSED
0:00:00.665778000 4539 0x9bf2b70 INFO GST_ELEMENT_PADS
gstelement.c:639:gst_element_add_pad:<demux> adding pad 'private_1'
0:00:00.665980680 4539 0x9bf2b70 INFO GST_EVENT
gstevent.c:557:gst_event_new_new_segment_full: creating newsegment
update 0, rate 1.000000, format GST_FORMAT_TIME, start
0:00:00.001622222, stop 99:99:99.999999999, position 0:00:00.001622222
0:00:00.680987680 4539 0x981f920 INFO GST_STATES
gstelement.c:2163:gst_element_continue_state:<filesrc> posting
state-changed READY to PAUSED
0:00:00.681131000 4539 0x981f920 INFO GST_STATES
gstbin.c:2194:gst_bin_change_state_func:<videopipeline> child 'filesrc'
changed state to 3(PAUSED) successfully
Everything stays paused. If I tell it to do the video on it's own, it
converts fine, but if I tell it to do the audio by itself, it also
pauses. I'm not quite sure what I'm doing wrong (probably audio
related), so if anyone can point out my mistakes, it would be very
appreciated!
The code:
#!/usr/bin/env python
import sys, os
import pygtk, gtk, gobject
import pygst,time
pygst.require("0.10")
import gst
import gtk
class Main:
def __init__(self):
self.vidline = gst.Pipeline("videopipeline") #This deals with
video processing
#self.audline = gst.Pipeline("audiopipeline") #This deals with
audio processing
#The below are generic works, mpeg2 -> theora (ogg mux)
conversion
#With mp3 -> vorbis audio conversion
#Decoding
self.demuxer = gst.element_factory_make("mpegdemux", "demux")
self.vdecoder = gst.element_factory_make("mpeg2dec", "mpeg2dec")
#vdecoder = video decoder
self.adecoder = gst.element_factory_make("mad","mad") #adecoder
= audio decoder
#Encoding
self.muxer = gst.element_factory_make("oggmux", "mux")
self.vencoder = gst.element_factory_make("theoraenc",
"video_enc") #video encoder
self.aencoder =
gst.element_factory_make("vorbisenc","audio_enc") #audio encoder
self.aconv =
gst.element_factory_make("audioconvert","audio_convert") #you need this
after decoding and before encoding stage
#File I/O
self.filesrc = gst.element_factory_make("filesrc","filesrc") #
file input
self.filesink = gst.element_factory_make("filesink", "file
sink")# file output
#File details (hardcoded for now)
self.filesrc.set_property("location", "testmovie.mpg")
self.filesink.set_property("location","test.ogg")
#buffers
#input
self.aqueue = gst.element_factory_make("queue", "Abuffer")
self.vqueue = gst.element_factory_make("queue", "Vbuffer")
#output
self.aqueueout = gst.element_factory_make("queue", "AbufferOut")
self.vqueueout = gst.element_factory_make("queue", "VbufferOut")
#Build the pipeline(s)!
self.demuxer.connect("pad-added", self.demuxer_callback)
self.muxer.connect("pad-added", self.muxer_callback)
#Video Encoding Pipeline
self.vidline.add(self.filesrc)
self.vidline.add(self.demuxer)
##Temporarily removed till we get audio working
self.vidline.add(self.vqueue)
self.vidline.add(self.vqueueout)
self.vidline.add(self.vdecoder)
self.vidline.add(self.vencoder)
self.vidline.add(self.muxer)
self.vidline.add(self.filesink)
#Audio Encoding Pipeline
self.vidline.add(self.aqueue)
self.vidline.add(self.adecoder)
self.vidline.add(self.aconv)
self.vidline.add(self.aencoder)
self.vidline.add(self.aqueueout)
#self.vidline.add(self.audiosink)
#Link it all together
gst.element_link_many(self.filesrc,self.demuxer)
gst.element_link_many(self.vqueue,self.vdecoder,self.vencoder,self.vqueueout)
gst.element_link_many(self.aqueue,self.adecoder,self.aconv,self.aencoder,self.aqueueout)
gst.element_link_many(self.vqueueout, self.muxer)
gst.element_link_many(self.aqueueout, self.muxer)
gst.element_link_many(self.muxer,self.filesink)
self.vidline.set_state(gst.STATE_PLAYING)
#bus = self.vidline.get_bus()
#bus.add_signal_watch()
#bus.connect("message::eos", self.onEndOfStream)
#bus.connect("message::state-changed", self.onStateChanged)
def demuxer_callback(self, demuxer, pad):
if pad.get_property("template").name_template == "video_%02d":
qv_pad = self.vqueue.get_pad("sink")
pad.link(qv_pad)
elif pad.get_property("template").name_template == "audio_%02d":
qa_pad = self.aqueue.get_pad("sink")
pad.link(qa_pad)
def muxer_callback(self, muxer, pad):
if pad.get_property("template").name_template == "video_%02d":
qv_pad = self.vqueueout.get_pad("src")
pad.link(qv_pad)
elif pad.get_property("template").name_template == "audio_%02d":
qa_pad = self.aqueueout.get_pad("src")
pad.link(qa_pad)
start=Main()
gtk.main()
More information about the gstreamer-devel
mailing list