Resolving format errors while using AppSrc in Java

Manoj Nirala nirala.manoj at gmail.com
Wed Jun 27 05:02:38 UTC 2018


 Dear All,

I have been trying to put together a working example for an AppSrc based
pipeline. The objective is to capture RGB frames through webcam using
OpenCV's VideoCapture module. These frames are to be pushed into a
Gstreamer pipeline using AppSrc (in live-mode), and then they are either
displayed on screen using autovideosink/glimagesink, or they are saved in
an AVI video.

This example is constructed to serve as a proof of concept for a different
application, which continuously generates processed frames. I am
successfully able to get the display properly with an error on console (as
given below), repeated multiple times. Filesink however, also gives the
same error, but does not write the AVI file properly. The file remains 1KB
in size and does not open. The application freezes if I try to insert
videorate into the pipeline.

Error repeated on console: (javaw.exe:10320): GStreamer-CRITICAL **:
gst_segment_to_running_time: assertion 'segment->format == format' failed

Attached below is my code for your reference. Any help in sorting out this
application will be very valuable. I really appreciate your time for giving
an answer.

Thanks,
Manoj

//////////////////////////////////////////////////////////////////////////////////////////////
/*
 * Used jars: jna-4.4.0.jar; opencv-330.jar; gst1-java-core-0.9.3.jar
 * Gstreamer version - 1.12.4, opencv version - 3.3.0
 *
 */
import java.nio.ByteBuffer;
import org.freedesktop.gstreamer.Buffer;
import org.freedesktop.gstreamer.Bus;
import org.freedesktop.gstreamer.Caps;
import org.freedesktop.gstreamer.ClockTime;
import org.freedesktop.gstreamer.Element;
import org.freedesktop.gstreamer.ElementFactory;
import org.freedesktop.gstreamer.Format;
import org.freedesktop.gstreamer.Gst;
import org.freedesktop.gstreamer.GstObject;
import org.freedesktop.gstreamer.Message;
import org.freedesktop.gstreamer.Pipeline;
import org.freedesktop.gstreamer.elements.AppSrc;
import org.freedesktop.gstreamer.elements.AppSrc.Type;
import org.freedesktop.gstreamer.event.EOSEvent;
import org.freedesktop.gstreamer.lowlevel.MainLoop;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.videoio.VideoCapture;
import org.opencv.videoio.Videoio;

public class AppSrcToFileSinkExample {
static {
System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
}
private AppSrc appSrc;
private Pipeline pipe;
private Element queue;
private Element videoRate;
private Element convert;
private Element encoder;
private Element muxer;
private Element fileSink;
private Element glSink;

private  MainLoop loop;
private  static int count = 0;

private Mat matImage;
private VideoCapture capture;
private int sourceWidth;
private int sourceHeight;
private int fps = 15;
private Caps videoCaps;
private long mPts = 0;

private Bus bus;
public AppSrcToFileSinkExample() {

/****************** Creating Elements *****************************/
queue = ElementFactory.make("queue2", "queue");
videoRate = ElementFactory.make("videorate", "videoRate");
convert = ElementFactory.make("videoconvert", "convert");
encoder = ElementFactory.make("jpegenc", "encoder");
muxer = ElementFactory.make("avimux", "muxer");
fileSink = ElementFactory.make("filesink", "fileSink");
fileSink.set("location","C:/MvaWorkspace/AppSrcExample/test.avi");
glSink = ElementFactory.make("glimagesink", "glSink");
/********************** End **************************************/

/****************** OpenCV videocapture() and
appSrc************************/
capture = new VideoCapture(0);
capture.set(Videoio.CAP_PROP_FRAME_WIDTH, 640);
capture.set(Videoio.CAP_PROP_FRAME_HEIGHT, 480);
capture.set(Videoio.CAP_PROP_FRAME_COUNT, fps);
capture.set(Videoio.CAP_PROP_FORMAT, Videoio.CAP_MODE_BGR);
matImage = new Mat();

loop = new MainLoop();

sourceWidth = 640;
sourceHeight = 480;
videoCaps = Caps.fromString("video/x-raw, format=BGR, framerate=15/1,
pixel-aspect-ratio=1/1, interlace-mode=progressive, width=" + 640 + ",
height=" + 480);
appSrc = (AppSrc) ElementFactory.make("appsrc", "appSrc");
appSrc.setLive(true);
appSrc.setStreamType(Type.STREAM);
appSrc.setFormat(Format.TIME);
appSrc.setLatency(0, 1000*1000*1000);
appSrc.setSize(-1);
appSrc.setMaxBytes(sourceWidth * sourceHeight * 12);
appSrc.setCaps(videoCaps);
appSrc.set("emit-signals", true);
appSrc.setTimestamp(true);
mPts = appSrc.getBaseTime().toMicros();
appSrc.connect(new AppSrc.NEED_DATA() {
@Override
public void needData(AppSrc appSrc1, int size) {
System.out.println("Count: "+count);
byte[] imgBytes = getImageBytes();
if(imgBytes==null) {
pipe.sendEvent(new EOSEvent());
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
input(appSrc1, imgBytes);
}
});
/********************** End **************************************/


/****************** Launch pipeline *****************************/
        pipe = new Pipeline();

        //uncomment for writing to file
        pipe.addMany(appSrc, queue, convert, encoder, muxer, fileSink);
        Pipeline.linkMany(appSrc, queue, convert, encoder, muxer, fileSink);

        //uncomment to display video
        //pipe.addMany(appSrc, queue, convert, glSink);
        //Pipeline.linkMany(appSrc, queue, convert, glSink);

        bus = pipe.getBus();
        bus.connect(new Bus.MESSAGE() {
@Override
public void busMessage(Bus arg0, Message arg1) {
System.out.println(arg1.getStructure());
}
});
        bus.connect(new Bus.EOS() {
            @Override
            public void endOfStream(GstObject source) {
                System.out.println("Reached end of stream");
                //To allow End of Stream to settle down, I observed that a
sleep of around 5 seconds is required
                try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
                loop.quit();
            }
        });

        pipe.play();
        loop.run();
/********************** End **************************************/
  }

public void input(AppSrc appSrc1, byte[] imageBytes) {
Buffer buf = new Buffer(imageBytes.length);
ByteBuffer byteBuffer = ByteBuffer.wrap(imageBytes);
buf.map(true).put(byteBuffer);
buf.setDuration(ClockTime.fromMicros(1000000 / 15));
buf.setPresentationTimestamp(ClockTime.fromMicros(mPts+buf.getDuration().toMicros()));
mPts = buf.getPresentationTimestamp().toMicros();
appSrc1.pushBuffer(buf);
count++;
/////////////////////
}

//Just wanted to experiment with only 500 frames, and the code below is
written accordingly
public byte[] getImageBytes() {
byte[] imageBytes = null;
if(count >=500) {
capture.release();
return null;
}
if (capture.isOpened()) {
capture.read(matImage);
if (!matImage.empty()) {
System.out.println("matImage.channels(): " + matImage.channels());
imageBytes = new byte[matImage.channels() * matImage.cols() *
matImage.rows()];
matImage.get(0, 0, imageBytes);
System.out.println(imageBytes.length);
}
}
return imageBytes;
}


public static void main(String[] args) {
Gst.init("appsrc", args);
new AppSrcToFileSinkExample();
    }
}
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.freedesktop.org/archives/gstreamer-devel/attachments/20180627/cc1cdd11/attachment-0001.html>


More information about the gstreamer-devel mailing list