How to push audio and video to RTMP server
han_ge_2005
han_ge_2005 at 163.com
Wed May 22 01:31:34 UTC 2019
Hi,All
I am writing a program to push audio and video to rtmp server.But now it only support pushing video to server.Can not mux with audio.Can you give me some guide? Thanks!
The relevant code is as follows:
#include <gst/gst.h>
typedef struct _CustomData {
GstElement *pipeline;
GstElement *vsource;
GstElement *vconvert;
GstElement *x264enc;
GstElement *h264parse;
GstElement *vcapsfilter;
GstElement *vsink;
GstElement *asource;
GstElement *aconvert;
GstElement *amp3enc;
GstElement *acapfilter;
GstElement *ampegaudiopaser;
GstElement *asink;
GstElement *vqueue;
GstElement *aqueue;
GstElement *video_enc_queue;
GstElement *audio_enc_queue;
GstElement *flvmuxer;
GstElement *rtmpsink;
} CustomData;
int main(int argc, char *argv[]) {
CustomData data;
GstBus *bus;
GstMessage *msg;
GstStateChangeReturn ret;
gboolean terminate = FALSE;
/* Initialize GStreamer */
//gst_init (&argc, &argv);
gst_init(NULL, NULL);
/* Create the elements */
//"video/x-raw, width=(int)160, height=(int)120, framerate=(fraction)30/1, format=I420, pixel-aspect-ratio=(fraction)1/1, interlace-mode=(string)progressive"
data.vsource = gst_element_factory_make("v4l2src", "camera-source");//autovideosrc v4l2src
data.vconvert = gst_element_factory_make("videoconvert", "convert");
data.vsink = gst_element_factory_make("ximagesink", "camera-sink");
data.vqueue = gst_element_factory_make("queue", "camera-queue");
data.x264enc = gst_element_factory_make("x264enc", "x264-encoder");
data.vcapsfilter = gst_element_factory_make("capsfilter", "video-filter");
data.h264parse = gst_element_factory_make("h264parse", "mux-video-parser");;
data.asource = gst_element_factory_make("alsasrc", "audio-alsa-source");
data.aconvert = gst_element_factory_make("audioconvert", "audio-converter");
data.amp3enc = gst_element_factory_make("lamemp3enc", "mp3-encode");
data.acapfilter = gst_element_factory_make("capsfilter", "audio-capsfilter");
data.asink = gst_element_factory_make("filesink", "file-sink");
data.aqueue = gst_element_factory_make("queue", "audio-queue");
data.ampegaudiopaser = gst_element_factory_make("mpegaudioparse", "audio-paser");
data.flvmuxer = gst_element_factory_make("flvmux", "mux-flvmux");
data.rtmpsink = gst_element_factory_make("rtmpsink", "rtmp-sink");
data.video_enc_queue = gst_element_factory_make("queue", "video_enc_queue");
data.audio_enc_queue = gst_element_factory_make("queue", "audio_enc_queue");
g_object_set(G_OBJECT(data.rtmpsink), "location", "rtmp://localhost:1935/live/movie", NULL);
g_object_set(G_OBJECT(data.flvmuxer), "streamable", true, NULL);
/* Create the empty pipeline */
data.pipeline = gst_pipeline_new("push-pipeline");
if (!data.pipeline
|| !data.vsource || !data.vconvert || !data.vqueue || !data.x264enc || !data.vcapsfilter || !data.vsink
|| !data.asource || !data.aconvert || !data.amp3enc || !data.acapfilter || !data.asink || !data.aqueue) {
g_printerr("Not all elements could be created.\n");
return -1;
}
gst_bin_add_many(GST_BIN(data.pipeline), data.asource, data.aqueue, data.aconvert, data.amp3enc, data.acapfilter,
data.ampegaudiopaser, data.audio_enc_queue, NULL);
gst_bin_add_many(GST_BIN(data.pipeline), data.vsource, data.vqueue, data.vconvert, data.x264enc, data.vcapsfilter,
data.h264parse, data.video_enc_queue, NULL);
gst_bin_add_many(GST_BIN(data.pipeline), data.rtmpsink, data.flvmuxer, NULL);
//"video/x-h264, width=(int)640, height=(int)480, framerate=(fraction)30/1, stream-format=avc, alignment=au, profile=main"
GstCaps *filtercaps = gst_caps_new_simple("video/x-h264",
"stream-format", G_TYPE_STRING, "byte-stream",//avc,byte-stream
"width", G_TYPE_INT, 320,
"height", G_TYPE_INT, 240,
"framerate", GST_TYPE_FRACTION, 30, 1,
"alignment", G_TYPE_STRING, "au",
"profile", G_TYPE_STRING, "main",
NULL);
g_object_set(G_OBJECT (data.vcapsfilter), "caps", filtercaps, NULL);
bool v1 = gst_element_link(data.vsource, data.vconvert);
if (!gst_element_link(data.vconvert, data.vqueue)) {
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
return -1;
}
if (!gst_element_link(data.vqueue, data.x264enc)) {
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
return -1;
}
if (!gst_element_link(data.x264enc, data.vcapsfilter)) {
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
return -1;
}
if (!gst_element_link(data.vcapsfilter, data.h264parse)) {
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
return -1;
}
if (!gst_element_link(data.h264parse, data.video_enc_queue)) {
g_printerr("Elements could not be linked.\n");
gst_object_unref(data.pipeline);
return -1;
}
//"audio/mpeg, mpegversion=1, layer=3, rate=(int)11025, channels=(int)2"
GstCaps *fcaps = gst_caps_new_simple("audio/mpeg",
"stream-format", G_TYPE_STRING, "byte-stream",
"mpegversion", G_TYPE_INT, 1,
"layer", G_TYPE_INT, 3,
"rate", G_TYPE_INT, 11025,//44100
"channels", G_TYPE_INT, 2,
NULL);
g_object_set(G_OBJECT(data.acapfilter), "caps", fcaps, NULL);
//g_object_set(G_OBJECT(data.asink), "location", "/home/greg/pre-push1.mp3", NULL);
bool a0 = gst_element_link(data.asource, data.aqueue);
bool a1 = gst_element_link(data.aqueue, data.aconvert);
bool a2 = gst_element_link(data.aconvert, data.amp3enc);
bool a3 = gst_element_link(data.amp3enc, data.acapfilter);
bool aa = gst_element_link(data.acapfilter, data.ampegaudiopaser);
bool xz = gst_element_link(data.ampegaudiopaser, data.audio_enc_queue);
bool c1 = gst_element_link(data.audio_enc_queue, data.flvmuxer);
bool c2 = gst_element_link(data.video_enc_queue, data.flvmuxer);
bool c3 = gst_element_link(data.flvmuxer, data.rtmpsink);
/* Set device=/dev/video0 */
g_object_set(data.vsource, "device", "/dev/video0", NULL);
// g_object_set(G_OBJECT(data.amp3enc) , "target" , 1 , NULL) ;
// g_object_set(G_OBJECT(data.amp3enc) , "cbr" , true , NULL) ; // CBR
// g_object_set(G_OBJECT(data.amp3enc) , "bitrate" , 64 , NULL) ; // CBR
ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
if (ret == GST_STATE_CHANGE_FAILURE) {
g_printerr("Unable to set the pipeline to the playing state.\n");
gst_object_unref(data.pipeline);
return -1;
}
/* Listen to the bus */
bus = gst_element_get_bus(data.pipeline);
do {
msg = gst_bus_timed_pop_filtered(bus, GST_CLOCK_TIME_NONE,
(GstMessageType) (GST_MESSAGE_STATE_CHANGED | GST_MESSAGE_ERROR |
GST_MESSAGE_EOS));
/* Parse message */
if (msg != NULL) {
GError *err;
gchar *debug_info;
switch (GST_MESSAGE_TYPE (msg)) {
case GST_MESSAGE_ERROR:
gst_message_parse_error(msg, &err, &debug_info);
g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
g_clear_error(&err);
g_free(debug_info);
terminate = TRUE;
break;
case GST_MESSAGE_EOS:
g_print("End-Of-Stream reached.\n");
terminate = TRUE;
break;
case GST_MESSAGE_STATE_CHANGED:
/* We are only interested in state-changed messages from the pipeline */
if (GST_MESSAGE_SRC (msg) == GST_OBJECT (data.pipeline)) {
GstState old_state, new_state, pending_state;
gst_message_parse_state_changed(msg, &old_state, &new_state, &pending_state);
g_print("Pipeline state changed from %s to %s:\n",
gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
}
break;
default:
/* We should not reach here */
g_printerr("Unexpected message received.\n");
break;
}
gst_message_unref(msg);
}
} while (!terminate);
/* Free resources */
gst_object_unref(bus);
gst_element_set_state(data.pipeline, GST_STATE_NULL);
gst_object_unref(data.pipeline);
return 0;
}
【网易自营|30天无忧退货】爱上书写:施华洛世奇制造商星空原色水晶笔,限时仅29元>>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.freedesktop.org/archives/gstreamer-devel/attachments/20190522/9c12988b/attachment-0001.html>
More information about the gstreamer-devel
mailing list