AW: Gstreamer output to a buffer then file On Jetson tegra Tx2

aasim mdamirraza at gmail.com
Thu Feb 1 09:32:35 UTC 2018


Hi  Thornton,

Thanks for quick reply, *can you suggest me any exampl*e. as i am not much
aware of gstreamer.

*i have pasted core code where actual command line is written.
{ gst_pipeline = (GstPipeline*)gst_parse_launch()..}*

i wanted to capture audio & video + encode audio & video + mux data and
store in buffer.
tricky part is video  part is handled by tegra API's and audio and mux part
by gstreamer.


below is the core code in two parts 
1) audio part + mux part AND 
2) Encoded video data passing to gstreamer. 


*1) audio part + mux part *
static bool execute()
{
    GMainLoop *main_loop;
    GstPipeline *gst_pipeline = NULL;
    GError *err = NULL;
    GstElement *appsrc_;

    gst_init (0, NULL);
    main_loop = g_main_loop_new (NULL, FALSE);
    char launch_string_[1024];

    sprintf(launch_string_,
            "appsrc name=mysource !
video/x-h264,width=%d,height=%d,stream-format=byte-stream !",
            STREAM_SIZE.width(), STREAM_SIZE.height());
    sprintf(launch_string_ + strlen(launch_string_),
                " h264parse ! flvmux name=mux alsasrc device=plughw:2 !
audioresample ! audio/x-raw,rate=48000,channels=1 ! queue ! voaacenc
bitrate=32000 ! queue ! mux. mux.  ! queue ! filesink location=a.mp4 ");
    printf("\n cmd of gstremer = %s \n",launch_string_);
    gst_pipeline = (GstPipeline*)gst_parse_launch(launch_string_, &err);
    appsrc_ = gst_bin_get_by_name(GST_BIN(gst_pipeline), "mysource");
    gst_app_src_set_stream_type(GST_APP_SRC(appsrc_),
GST_APP_STREAM_TYPE_STREAM);
    gst_element_set_state((GstElement*)gst_pipeline, GST_STATE_PLAYING);

 // Create the CameraProvider object and get the core interface.
    UniqueObj<CameraProvider> cameraProvider =
UniqueObj<CameraProvider>(CameraProvider::create());
    ICameraProvider *iCameraProvider =
interface_cast<ICameraProvider>(cameraProvider);
    if (!iCameraProvider)
        ORIGINATE_ERROR("Failed to create CameraProvider");

    // Get the camera devices.
    std::vector<CameraDevice*> cameraDevices;
    iCameraProvider->getCameraDevices(&cameraDevices);
    if (cameraDevices.size() == 0)
        ORIGINATE_ERROR("No cameras available");
''''''
'''''
other code.
}





*2) Encoded video data passing to gstreamer. 
*


bool ConsumerThread::encoderCapturePlaneDqCallback(struct v4l2_buffer
*v4l2_buf,
                                                   NvBuffer * buffer,
                                                   NvBuffer * shared_buffer,
                                                   void *arg)
{
    ConsumerThread *thiz = (ConsumerThread*)arg;

    if (!v4l2_buf)
    {
        thiz->abort();
        ORIGINATE_ERROR("Failed to dequeue buffer from encoder capture
plane");
    }

#if 1
   // printf("\n encoderCapturePlaneDqCallback \n");
    if (buffer->planes[0].bytesused > 0)
    {
        GstBuffer *gstbuf;
        GstMapInfo map = {0};
        GstFlowReturn ret;
        gstbuf = gst_buffer_new_allocate (NULL, buffer->planes[0].bytesused,
NULL);
        gstbuf->pts = thiz->timestamp;
        thiz->timestamp += 33333333; // ns

        gst_buffer_map (gstbuf, &map, GST_MAP_WRITE);
        memcpy(map.data, buffer->planes[0].data ,
buffer->planes[0].bytesused);
        gst_buffer_unmap(gstbuf, &map);

        g_signal_emit_by_name (thiz->m_appsrc_, "push-buffer", gstbuf,
&ret);
        gst_buffer_unref(gstbuf);
    }
    else
    {
        gst_app_src_end_of_stream((GstAppSrc *)thiz->m_appsrc_);
        sleep(1);
    }
#else
     thiz->m_outputFile->write((char *) buffer->planes[0].data,
                               buffer->planes[0].bytesused);

#endif
 '''''''''
''''''
other code
}



--
Sent from: http://gstreamer-devel.966125.n4.nabble.com/


More information about the gstreamer-devel mailing list