Gstreamer appsrc problem

Sowrabha Indukumar sowrabha.indukumar at harman.com
Fri Jun 14 07:13:22 PDT 2013


Need your help , Please find the code below, in which I am trying to
construct the following pipelines
Filesrc -> jpegdec -> x264enc->appsink           
appsrc->queue->ffdec_h264->ffmpegcolorspace->ximagesink.
I am trying to read 3 jpeg files convert it into h264 buffer and stack. And
play the buffer in the second pipeline in a loop(sort of slideshow).

I am able to see only two images change on the screen and the screen freezes
with no errors. I can see that the data are being pushed into the pipeline
by the appsrc but no further action is taken.
One more inference I could draw with the GST_DEBUG on is that
“gst_event_new_qos_full” event is thrown before the screen freezes.
               
Below is the command to build the code.
gcc H264FrameTest.c -o H264FrameTest `pkg-config --cflags --libs
gstreamer-0.10 gstreamer-app-0.10` -lgstinterfaces-0.10

Thank you.

/*

============================================================================
 Name        : H264FrameTest.c
 Author      : Sowrabha Indukumar
 Version     :
 Copyright   : Your copyright notice
 Description : Hello World in C, Ansi-style

============================================================================
 */

#include <gst/gst.h>
#include <string.h>
#include <pthread.h>
#include <glib.h>
#include <unistd.h>
#include <gst/app/gstappsrc.h>
#define VIDEO_CAPS
"video/x-h264,width=(int)1920,height=(int)1080,framerate=(fraction)0/1,stream-format=avc"

guint image;
/* Structure to contain all our information, so we can pass it to callbacks
*/
typedef struct _CustomData {
	GstElement *pipelineA, *file_source, *jpegdec, *x264enc, *app_sink;
	GstElement *pipelineB, *app_source, *ffdec_h264, *ffmpegcolorspace,
			*freeze, *ximagesink,*queue;

	guint64 num_samples; /* Number of samples generated so far (for timestamp
generation) */
	gfloat a, b, c, d; /* For waveform generation */

	GstBuffer *gbuffer,*gbuffer1,*gbuffer2;
	GMutex *m_mutex;
	GCond *m_cond;

	guint sourceid; /* To control the GSource */
	gboolean data_available;
	GMainLoop *main_loopA;
	GMainLoop *main_loopB;/* GLib's Main Loop */
} CustomData;

/* This method is called by the idle GSource in the mainloop, to feed
CHUNK_SIZE bytes into appsrc.
 * The ide handler is added to the mainloop when appsrc requests us to start
sending data (need-data signal)
 * and is removed when appsrc has enough data (enough-data signal).
 */
static gboolean push_data(CustomData *data) {
	GstBuffer *outbuf;
	GstFlowReturn ret;

	g_mutex_lock(data->m_mutex);
	if (!data->data_available) {
		printf("no data waiting \n");
		g_cond_wait(data->m_cond, data->m_mutex);
	}
	g_mutex_unlock(data->m_mutex);
	printf("have data so writing \n");
	if(image == 1){
		outbuf = data->gbuffer;
		image = 2;
	}else if(image == 2){
		outbuf = data->gbuffer1;
		image = 3;
	}else{
		outbuf = data->gbuffer2;
		image = 1;
	}
	printf("got data pushing further of size %d\n", GST_BUFFER_SIZE( outbuf));
	g_signal_emit_by_name (data->app_source, "push-buffer", outbuf, &ret);
	//ret = gst_app_src_push_buffer(data->app_source, outbuf);

	if (ret != GST_FLOW_OK) {
		g_debug("push buffer returned %d for %d bytes \n", ret,
				GST_BUFFER_SIZE( outbuf));
		return FALSE;
	}
	return TRUE;
}

/* This signal callback triggers when appsrc needs data. Here, we add an
idle handler
 * to the mainloop to start pushing data into the appsrc */
static void start_feed(GstElement *source, guint size, CustomData *data) {
	 if (data->sourceid == 0) {
	    g_print ("Start feeding\n");
	    data->sourceid = g_idle_add ((GSourceFunc) push_data, data);
	  }
}

/* This callback triggers when appsrc has enough data and we can stop
sending.
 * We remove the idle handler from the mainloop */
static void stop_feed(GstElement *source, CustomData *data) {
	 if (data->sourceid != 0) {
	    g_print ("Stop feeding\n");
	    g_source_remove (data->sourceid);
	    data->sourceid = 0;
	  }
}

/* The appsink has received a buffer */
static void new_buffer(GstElement *sink, CustomData *data) {
	GstBuffer *buffer;

	/* Retrieve the buffer */
	g_signal_emit_by_name(sink, "pull-buffer", &buffer);
	if (buffer) {
		/* The only thing we do in this example is print a * to indicate a
received buffer */
		g_print("Received data on appsink of buffer size %d \n",
				GST_BUFFER_SIZE( buffer));
		if(image == 1){
				data->gbuffer = gst_buffer_copy (buffer);
			}else if(image == 2){
				data->gbuffer1 = gst_buffer_copy (buffer);
			}else{
			data->gbuffer2 = gst_buffer_copy (buffer);
			g_mutex_lock(data->m_mutex);
			data->data_available = TRUE;
			g_cond_signal(data->m_cond);
			g_mutex_unlock(data->m_mutex);

		}

		gst_buffer_unref(buffer);
	}
}

/* This function is called when an error message is posted on the bus */
static void error_cb(GstBus *bus, GstMessage *msg, CustomData *data) {
	GError *err;
	gchar *debug_info;

	/* Print error details on the screen */
	gst_message_parse_error(msg, &err, &debug_info);
	g_printerr("Error received from element %s: %s\n",
			GST_OBJECT_NAME (msg->src), err->message);
	g_printerr("Debugging information: %s\n", debug_info ? debug_info :
"none");
	g_clear_error(&err);
	g_free(debug_info);

	g_main_loop_quit(data->main_loopA);
	g_main_loop_quit(data->main_loopB);
}

static gboolean bus_call(GstBus *bus, GstMessage *msg, CustomData *data) {

	switch (GST_MESSAGE_TYPE (msg)) {

	case GST_MESSAGE_EOS:
		g_print("End of stream\n");
		break;

	case GST_MESSAGE_ERROR: {
		gchar *debug;
		GError *error;

		gst_message_parse_error(msg, &error, &debug);
		g_free(debug);

		g_printerr("Error: %s\n", error->message);
		g_error_free(error);

		g_main_loop_quit(data->main_loopA);
		break;
	}
	default:
		break;
	}

	return TRUE;
}

static gboolean changeimage(CustomData *data) {
	char* img;
	while (1) {
		getchar();
		g_print("Change Image\n");
		if (image == 1) {
			img = "img1.jpg";
			g_print("test.jpg\n");
			image = 2;
		} else if(image == 2){
			img = "img2.jpg";
			g_print("test2.jpg\n");
			image = 3;
		} else if(image == 3){
			img = "img3.jpg";
			g_print("test3.jpg\n");
			image = 1;
		}
		gst_element_set_state(data->pipelineA, GST_STATE_READY);
		g_object_set(G_OBJECT(data->file_source), "location", img, NULL);
		gst_element_set_state(data->pipelineA, GST_STATE_PLAYING);

	}
	return TRUE;

}

static void startPipeline(CustomData *data) {
	GstBus *busB;
	/* Instruct the bus to emit signals for each received message, and connect
to the interesting signals */
	busB = gst_element_get_bus(data->pipelineB);
	gst_bus_add_signal_watch(busB);
	g_signal_connect(G_OBJECT(busB), "message::error", (GCallback) error_cb,
			&data);
	gst_object_unref(busB);

	/* Start playing the pipeline */
	gst_element_set_state(data->pipelineB, GST_STATE_PLAYING);

	/* Create a GLib Main Loop and set it to run */
	data->main_loopB = g_main_loop_new(NULL, FALSE);
	g_printerr("Starting pipeline B\n");
	g_main_loop_run(data->main_loopB);
}

int main(int argc, char *argv[]) {
	CustomData data;
	GstBus *busA;
	gchar *video_caps_text;
	GstCaps *video_caps;
	pthread_t *thread, *thread1;
	data.data_available = FALSE;
	guint bus_watch_id;
	data.sourceid = 0;
	/* Initialize GStreamer */
	gst_init(&argc, &argv);
	image = 1;

	/* Create the elements */
	data.file_source = gst_element_factory_make("filesrc", "file_source");
	data.jpegdec = gst_element_factory_make("jpegdec", "mjpegdec");
	data.x264enc = gst_element_factory_make("x264enc", "mx264enc");
	data.app_sink = gst_element_factory_make("appsink", "app_sink");
	data.app_source = gst_element_factory_make("appsrc", "audio_source");
	data.queue = gst_element_factory_make("queue", "mqueue");
	data.ffdec_h264 = gst_element_factory_make("ffdec_h264", "mffdec_h264");
	data.ffmpegcolorspace = gst_element_factory_make("ffmpegcolorspace",
			"mffmpegcolorspace");
	data.freeze = gst_element_factory_make("freeze", "mfreeze");
	data.ximagesink = gst_element_factory_make("ximagesink", "videosink");

	/* Create the empty pipeline */
	data.pipelineA = gst_pipeline_new("appsrc-pipeline");
	data.pipelineB = gst_pipeline_new("appsink-pipeline");

	if (!data.pipelineA || !data.pipelineB || !data.file_source
			|| !data.jpegdec || !data.x264enc || !data.app_sink
			|| !data.app_source || !data.ffdec_h264 || !data.ffmpegcolorspace
			|| !data.ximagesink || !data.freeze || !data.queue) {
		g_printerr("Not all elements could be created.\n");
		return -1;
	}
	g_object_set(G_OBJECT(data.file_source), "location", "img3.jpg", NULL);
	g_object_set (G_OBJECT (data.ximagesink), "sync", FALSE,NULL);
	g_object_set (G_OBJECT (data.ximagesink), "async", FALSE,NULL);
	g_object_set (G_OBJECT (data.app_source), "is-live", TRUE, "do-timestamp",
TRUE,NULL);

	/* Configure appsrc */
	video_caps_text = g_strdup_printf(VIDEO_CAPS);
	video_caps = gst_caps_from_string(video_caps_text);
	g_signal_connect(data.app_source, "need-data", G_CALLBACK(start_feed),
			&data);
	g_signal_connect(data.app_source, "enough-data", G_CALLBACK(stop_feed),
			&data);

	/* Configure appsink */
	g_object_set(data.app_sink, "emit-signals", TRUE, "caps", video_caps,
NULL);
	g_signal_connect(data.app_sink, "new-buffer", G_CALLBACK(new_buffer),
&data);
	gst_caps_unref(video_caps);
	g_free(video_caps_text);

	/* Link all elements that can be automatically linked because they have
"Always" pads */
	gst_bin_add_many(GST_BIN (data.pipelineA), data.file_source, data.jpegdec,
			data.x264enc, data.app_sink, NULL);
	gst_bin_add_many(GST_BIN (data.pipelineB), data.app_source,data.queue ,
			data.ffdec_h264, data.ffmpegcolorspace, data.ximagesink, NULL);

	if (gst_element_link_many(data.file_source, data.jpegdec, data.x264enc,
			data.app_sink, NULL) != TRUE) {
		g_printerr("Elements could not be linked.\n");
		gst_object_unref(data.pipelineA);
		return -1;
	}

	if (gst_element_link_many(data.app_source, data.queue,data.ffdec_h264,
			data.ffmpegcolorspace, data.ximagesink, NULL) != TRUE) {
		g_printerr("Elements could not be linked.\n");
		gst_object_unref(data.pipelineB);
		return -1;
	}

	data.m_mutex = g_mutex_new();
	data.m_cond = g_cond_new();

	// Starting pipelineB in a seperate thread.
	thread = (pthread_t *) malloc(sizeof(thread));
	/* Create a thread for downstream events */
	if (pthread_create(&(thread), NULL, startPipeline, &data) != 0) {
		g_printerr("Unable to create thread, aborting..");

		return FALSE;
	}
	/* Instruct the bus to emit signals for each received message, and connect
to the interesting signals */
	busA = gst_element_get_bus(data.pipelineA);
	bus_watch_id = gst_bus_add_watch(busA, bus_call, &data);
	gst_object_unref(busA);

	/* Start playing the pipeline */
	gst_element_set_state(data.pipelineA, GST_STATE_PLAYING);

	// Starting seperate thread to change image.
	thread1 = (pthread_t *) malloc(sizeof(thread1));
	if (pthread_create(&(thread1), NULL, changeimage, &data) != 0) {
	 g_printerr("Unable to create thread, aborting..");

	 return FALSE;
	 }

	/* Create a GLib Main Loop and set it to run */
	data.main_loopA = g_main_loop_new(NULL, FALSE);
	g_printerr("Starting pipeline A\n");
	g_main_loop_run(data.main_loopA);

	/* Free resources */
	g_source_remove(bus_watch_id);
	gst_element_set_state(data.pipelineA, GST_STATE_NULL);
	gst_object_unref(data.pipelineA);
	gst_element_set_state(data.pipelineB, GST_STATE_NULL);
	gst_object_unref(data.pipelineB);
	return 0;
}





--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/Gstreamer-appsrc-problem-tp4660561.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.


More information about the gstreamer-devel mailing list