GStreamer application to convert a .ogg file to an .mp4 file

William Metcalf wmetcalf at niftytv.com
Fri Jun 3 13:41:51 PDT 2011


I am writing an application to convert a .ogg file to an .mp4 file.  My 
code is below.  I already have the terminal pipeline for completing this 
task, and I am just trying to convert the terminal pipeline into an 
actual c program.  The pipeline seems correct to be, but when I run the 
application I get an error "Internal data stream error".  Does anyone 
have any suggestions as to what is wrong with my code?  Thank you in 
advance.

#include<gst\gst.h>
#include<glib-2.0\glib.h>
#include<string.h>
#include"Encode_File_Test.h"
#include"Preprocessor_Defines.h"

/* Handle messages from the pipeline bus */
static gboolean bus_call(GstBus *bus, GstMessage *msg, gpointer data)
{
     GMainLoop *loop = (GMainLoop * ) data;

     switch(GST_MESSAGE_TYPE(msg))
     {
         case GST_MESSAGE_EOS:
             g_print("End of stream\n");
             g_main_loop_quit(loop);
             break;
         case GST_MESSAGE_ERROR:
             {
                 gchar *debug;
                 GError *error;

                 gst_message_parse_error(msg, &error, &debug);
                 g_free(debug);

                 g_printerr ("Error: %s\n", error->message);
                 g_error_free(error);

                 g_main_loop_quit(loop);
                 break;
             }
         default:
             break;
     }

     return TRUE;
}

/* Handles when a pad is added to the demuxer */
static void on_pad_added(GstElement *element, GstPad *pad, gpointer data)
{
     guint result;
     GstPad *sinkpad;
     gchar* name;
     name = gst_pad_get_name(pad);

     result = g_strcasecmp(name, "serial_1fa82364");
     /* This is the audio pad of the demuxer we need to link it to the 
decoder's audio output in the queue */
     if(!result)
     {
         /* We can now link this pad with the audio decoder's sink pad */
         g_print("Dynamic pad created, linking the demuxer to the audio 
decoder\n");

         sinkpad = gst_element_get_pad(audio_queue, "sink");

         gst_pad_link(pad, sinkpad);

         gst_object_unref(sinkpad);
     }

     result = g_strcasecmp(name, "serial_0f0678e1");
     /* This is the video pad of the demuxer we need to link it to the 
decoder's video output in the queue */
     if(!result)
     {

         /* We can now link this pad with the video decoder's sink pad */
         g_print("Dynamic pad created, linking the demuxer to the video 
decoder\n");

         sinkpad = gst_element_get_static_pad(video_queue, "sink");

         gst_pad_link(pad, sinkpad);

         gst_object_unref(sinkpad);
     }
}

/* Connects the src pad of the audio out queue to the audio sink pad of 
the mxuer */
static void link_audio_to_multiplexer(GstElement *audio_output, 
GstElement *muxer)
{
     GstPad *pad, *link_pad;

     link_pad = gst_element_get_static_pad(audio_output, "src");

     pad = gst_element_get_request_pad(muxer, "audio_%d");

     gst_pad_link(link_pad, pad);

     gst_object_unref(pad);
}

/* Connects the src pad of the video out queue to the video sink pad of 
the muxer */
static void link_video_to_multiplexer(GstElement *video_output, 
GstElement *muxer)
{
     GstPad *pad, *link_pad;

     link_pad = gst_element_get_static_pad(video_output, "src");

     pad = gst_element_get_request_pad(muxer, "video_%d");

     gst_pad_link(link_pad, pad);

     gst_object_unref(pad);
}

int main (int argc, char *argv[])
{
     gst_init(&argc, &argv);

     loop = g_main_loop_new(NULL, FALSE);

     /* Create elements */
     pipeline            = gst_pipeline_new("audio-player");

     /* Element for reading in a file */
     source                = gst_element_factory_make("filesrc", 
"file-source");

     /* OGG elements */
     oggdemuxer            = gst_element_factory_make("oggdemux", 
"ogg-demuxer");
     vorbis_decoder        = gst_element_factory_make("vorbisdec", 
"vorbis-decoder");
     theora_decoder        = gst_element_factory_make("theoradec", 
"theora-decoder");

     /* MP4 elements */
     audio_encoder        = gst_element_factory_make("faac", "aac-encoder");
     video_encoder        = gst_element_factory_make("ffenc_mpeg4", 
"avc-encoder");
     mp4_muxer            = gst_element_factory_make("qtmux", "mp4-muxer");

     /* Audio Video elements */
     audio_converter        = gst_element_factory_make("audioconvert", 
"audio-converter");
     video_converter        = 
gst_element_factory_make("ffmpegcolorspace", "video-converter");
     audio_resample        = gst_element_factory_make("audioresample", 
"audio-resample");
     audio_rate            = gst_element_factory_make("audiorate", 
"audio-rate");
     videosink            = gst_element_factory_make("autovideosink", 
"video-sink");
     audiosink            = gst_element_factory_make("autoaudiosink", 
"audio-sink");

     /* Queues */
     video_queue            = gst_element_factory_make("queue", 
"video-queue");
     audio_queue            = gst_element_factory_make("queue", 
"audio-queue");
     video_out_queue        = gst_element_factory_make("queue", 
"video-out-queue");
     audio_out_queue        = gst_element_factory_make("queue", 
"audio-out-queue");

     /* Element to write to the file */
     filesink                = gst_element_factory_make("filesink", 
"file-sink");

     /* Make sure we were able to create all of the elements*/
     if(!pipeline || !source || !oggdemuxer || !vorbis_decoder || 
!theora_decoder || !audio_encoder || !video_encoder || !mp4_muxer || 
!audio_converter || !video_converter || !audio_resample || !audio_rate 
|| !videosink || !audiosink || !video_queue || !audio_queue || 
!video_out_queue || !audio_out_queue || !filesink )
     {
         g_printerr("One ore more elements could not be created. 
Exiting.\n");
         return -1;
     }

     /* Input file */
     g_object_set(G_OBJECT(source), "location", "c:\\video.ogg", NULL);
     g_object_set(G_OBJECT(filesink), "location", "c:\\video(encoded 
from ogg).mp4", NULL);

     /* Add a message handler from pipeline bus */
     bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
     gst_bus_add_watch(bus, bus_call, loop);
     gst_object_unref(bus);

     /* add elements to the pipeline */
     gst_bin_add_many(GST_BIN(pipeline), source, oggdemuxer, 
video_queue, theora_decoder, video_converter, video_encoder, 
video_out_queue, audio_queue, vorbis_decoder, audio_converter, 
audio_encoder, audio_out_queue, mp4_muxer, filesink, NULL);

     /* link the elements together */
     gst_element_link(source, oggdemuxer);

     /* video_queue -> theoradec -> ffmpegcolorspace -> ffenc_mpeg4 -> 
video_out_queue ~> qtmux */
     res = gst_element_link_many(video_queue, theora_decoder, 
video_converter, video_encoder, video_out_queue, NULL);
     if(!res)
     {
         g_printerr("There was a problem linking video elements.\n");
         return 1;
     }

     /* queue_audio -> vorbisdec -> audioconvert -> faac -> 
audio_out_queue ~> qtmux */
     res = gst_element_link_many(audio_queue, vorbis_decoder, 
audio_converter, audio_encoder, audio_out_queue, NULL);
     if(!res)
     {
         g_printerr("There was a problem linking elements.\n");
         return 1;
     }
     g_signal_connect(oggdemuxer, "pad-added", G_CALLBACK(on_pad_added), 
NULL);

     /* qtmux -> filesink */
     res = gst_element_link(mp4_muxer, filesink);

     link_video_to_multiplexer(video_out_queue, mp4_muxer);
     link_audio_to_multiplexer(audio_out_queue, mp4_muxer);


     /* Set the pipeline stat to play and start the main loop*/
     g_print("Starting the pipeline\n");
     gst_element_set_state(pipeline, GST_STATE_PLAYING);
     g_main_loop_run(loop);

     /* We are done now, do a little bit of cleaning up */
     g_print("Stopping the pipeline.\n");
     gst_element_set_state(pipeline, GST_STATE_NULL);
     gst_object_unref(GST_OBJECT(pipeline));

     return 0;
}


More information about the gstreamer-devel mailing list