AW: AW: AW: Gstreamer output to a buffer then file On Jetson tegra Tx2
aasim
mdamirraza at gmail.com
Sat Feb 3 06:58:20 UTC 2018
*a) *For audio i am not adding any PTS values only for video i have
incremented PTS values (appsrc filling encoded data ).
*b) *audio start at same time but video is captured + encoded by different
apis and then filled in appsrc and audio is captured and muxed by gstreamer
*c) cmd*
sprintf(launch_string_,
"appsrc name=mysource !
video/x-h264,width=%d,height=%d,stream-format=byte-stream !",
STREAM_SIZE.width(), STREAM_SIZE.height());
sprintf(launch_string_ + strlen(launch_string_),
" h264parse ! flvmux name=mux alsasrc device=plughw:2 !
audioresample ! audio/x-raw,rate=48000,channels=1 ! queue ! voaacenc
bitrate=32000 ! queue ! mux. mux. ! queue ! "
*d)* audio* sampling rate 48000*
*e)* i am* unable to use mp4 muxer *as its giving problem(not going in
audio function so no output file write )
*
f) *After getting muxed output in local buffer i write in to file as .mp4
but muxer *i am using is flv *as mp4 muxer is not working.
For audio code just capturing from gstreamer and muxing with appsrcc . here
audio is playing properly but video is fast forward.
if i remove incremented PTS for video audio also plays in fast forward way.
Question
How to sync video and audio when video is captured separately
*This is audio code *
/* called when the appsink notifies us that there is a new buffer ready for
* processing */
static void on_new_sample_from_sink (GstElement * elt)
{
guint size;
gpointer raw_buffer;
GstBuffer *app_buffer, *buffer;
GstElement *source;
GstMapInfo map = {0};
GstSample *sample;
static GstClockTime timestamp;
/* get the buffer from appsink */
g_signal_emit_by_name (sink, "pull-sample", &sample,NULL);
if(sample)
{
buffer = gst_sample_get_buffer (sample);
gst_buffer_map (buffer, &map, GST_MAP_READ);
raw_buffer = g_malloc0 (map.size);
memcpy (raw_buffer, map.data, map.size);
printf("\n output sample= %ld \n",map.size);
m_outputFile1->write((char *) raw_buffer,map.size);
gst_buffer_unmap (buffer,&map);
gst_sample_unref(sample);
}
}
*main function*
static bool execute()
{
GMainLoop *main_loop;
GstPipeline *gst_pipeline = NULL;
GError *err = NULL;
GstElement *appsrc_;
gst_init (0, NULL);
main_loop = g_main_loop_new (NULL, FALSE);
char launch_string_[1024];
sprintf(launch_string_,
"appsrc name=mysource !
video/x-h264,width=%d,height=%d,stream-format=byte-stream !",
STREAM_SIZE.width(), STREAM_SIZE.height());
sprintf(launch_string_ + strlen(launch_string_),
" h264parse ! flvmux name=mux alsasrc device=plughw:2 !
audioresample ! audio/x-raw,rate=48000,channels=1 ! queue ! voaacenc
bitrate=32000 ! queue ! mux. mux. ! queue ! "
"appsink name=sink ");
// sprintf(launch_string_ + strlen(launch_string_),
// " h264parse ! avmux_mp4 name=mux alsasrc device=plughw:2 !
audioresample ! audio/x-raw,rate=48000,channels=1 ! queue ! voaacenc
bitrate=32000 ! queue ! mux. mux. ! queue ! "
// "appsink name=sink ");
// " h264parse ! qtmux ! filesink location=a.mp4 ");
printf("\n cmd of gstremer = %s \n",launch_string_);
gst_pipeline = (GstPipeline*)gst_parse_launch(launch_string_, &err);
appsrc_ = gst_bin_get_by_name(GST_BIN(gst_pipeline), "mysource");
/* get sink */
sink = gst_bin_get_by_name (GST_BIN (gst_pipeline), "sink");
g_object_set(G_OBJECT(sink), "emit-signals", TRUE, "sync", FALSE, NULL);
g_signal_connect(sink, "new-sample", G_CALLBACK(on_new_sample_from_sink),
NULL);
gst_app_src_set_stream_type(GST_APP_SRC(appsrc_),
GST_APP_STREAM_TYPE_STREAM);
gst_element_set_state((GstElement*)gst_pipeline, GST_STATE_PLAYING);
--
Sent from: http://gstreamer-devel.966125.n4.nabble.com/
More information about the gstreamer-devel
mailing list