problom with constructing GstBuffer

Ugly Face xuchangxue365812 at 126.com
Tue Jan 7 20:03:27 PST 2014


My c file:  camera_gst_codec.c
<http://gstreamer-devel.966125.n4.nabble.com/file/n4664545/camera_gst_codec.c>  
#include <string.h>
#include <jni.h>
#include <android/log.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
#include <gst/video/video.h>
#include <pthread.h>

GST_DEBUG_CATEGORY_STATIC (debug_category);
#define GST_CAT_DEFAULT debug_category

/*
gst-launch -v gstrtpbin name=rtpbin v4l2src !
									video/x-raw-yuv,width=320,height=240 !
									queue !
									videorate !
									ffmpegcolorspace !
									x264enc byte-stream=true bitrate=300 speed-preset=superfast !
									rtph264pay !
									rtpbin.send_rtp_sink_0 rtpbin.send_rtp_src_0 !
									udpsink port=5000 host=$HOST ts-offset=0 name=vrtpsink
rtpbin.send_rtcp_src_0 !
									udpsink port=5001 host=$HOST sync=false async=false name=vrtcpsink
udpsrc port=5005 									name=vrtpsrc ! rtpbin.recv_rtcp_sink_0

gst-lssaunch -v filesrc location=~/a.mp4 !
									qtdemux name=demux demux.video_00 !
									queue !
									rtph264pay config-interval=2 !
									udpsink port=5000 host=127.0.0.1
*/

#if GLIB_SIZEOF_VOID_P == 8
# define GET_CUSTOM_DATA(env, thiz, fieldID) (CustomData
*)(*env)->GetLongField (env, thiz, fieldID)
# define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField
(env, thiz, fieldID, (jlong)data)
#else
# define GET_CUSTOM_DATA(env, thiz, fieldID) (CustomData
*)(jint)(*env)->GetLongField (env, thiz, fieldID)
# define SET_CUSTOM_DATA(env, thiz, fieldID, data) (*env)->SetLongField
(env, thiz, fieldID, (jlong)(jint)data)
#endif

#define CAPS "video/x-raw-yuv,format=(fourcc)NV21,width=128,height=96"
#define REMOTEIP "192.168.1.127"
#define RTPPORT 5000
#define RTCPINPORT 5005
#define RTCPOUTPORT 5001

typedef struct _CustomData {
	jobject app;
	GMainLoop  *main_loop;
	GstElement *pipeline;  // pipeline to hold everything.
	GstElement *appsrc;	// source of the raw data.
	guint64 frame_num;
}CustomData;

static JavaVM *java_vm;
static pthread_t gst_app_thread;
static pthread_key_t current_jni_env;
static jfieldID custom_data_field_id;

/**
 *  private methods
 * */
/* Register this thread with the VM */
static JNIEnv *attach_current_thread (void) 
{
	JNIEnv *env;
	JavaVMAttachArgs args;

	GST_DEBUG ("Attaching thread %p", g_thread_self ());
	args.version = JNI_VERSION_1_4;
	args.name = NULL;
	args.group = NULL;

	if ((*java_vm)->AttachCurrentThread (java_vm, &env, &args) < 0) 
	{
		GST_ERROR ("Failed to attach current thread");
		return NULL;
	}
  	return env;
}

/* Unregister this thread from the VM */
static void detach_current_thread (void *env) 
{
	GST_DEBUG ("Detaching thread %p", g_thread_self ());
	(*java_vm)->DetachCurrentThread (java_vm);
}

static void *app_function(CustomData *userdata)
{
	gboolean res;
  	GstPadLinkReturn linkres;
	GstCaps	   *caps;
	CustomData *data = (CustomData *)userdata;

	GstPad	   *srcpad;
	GstPad	   *sinkpad;

	GstElement *rtpbin;		// rtpbin to get rtp work.
	GstElement *queue;
	GstElement *videorate;	// to deal with timestamps.
	GstElement *videoconv;
	GstElement *x264encoder;// encode the raw data.
	GstElement *rtph264pay; // packetize the encoded data.
	GstElement *rtpsink;
	GstElement *rtcpsink;
	GstElement *rtcpsrc;
	gst_init(NULL, NULL);
	data->pipeline 	= gst_pipeline_new(NULL);
	g_assert(data->pipeline);

	data->appsrc= gst_element_factory_make("appsrc", "appsrc");
	g_assert(data->appsrc);
	g_object_set(data->appsrc, "is-live", TRUE, NULL);
	caps		= gst_caps_from_string(CAPS);
	g_object_set(data->appsrc, "caps", caps, NULL);
	
	queue		= gst_element_factory_make("queue", "queue");
	g_assert(queue);

	videorate	= gst_element_factory_make("videorate", "videorate");
	g_assert(videorate);

	videoconv	= gst_element_factory_make("ffmpegcolorspace", "videoconv");
	g_assert(videoconv);

	x264encoder = gst_element_factory_make("x264enc", "x264encoder");
	g_assert(x264encoder);
	g_object_set(x264encoder, "byte-stream", TRUE, "bitrate",
300,"speed-preset", 2, NULL);

	rtph264pay	= gst_element_factory_make("rtph264pay", "rtph264pay");
	g_assert(rtph264pay);
	g_object_set(rtph264pay, "config-interval", 2, NULL);

	rtpsink		= gst_element_factory_make("udpsink", "rtpsink");
	g_assert(rtpsink);
	g_object_set(rtpsink, "port", RTPPORT, "host", REMOTEIP,"ts-offset", 0,
NULL);

	rtcpsink	= gst_element_factory_make("udpsink", "rtcpsink");
	g_assert(rtcpsink);
	g_object_set(rtcpsink, "port", RTCPOUTPORT, "host", REMOTEIP, "sync",
FALSE, "async", FALSE, NULL);

	rtcpsrc		= gst_element_factory_make("udpsrc", "rtcpsrc");
	g_assert(rtcpsrc);
	g_object_set(rtcpsrc, "port", RTCPINPORT, NULL);

	rtpbin 		= gst_element_factory_make("gstrtpbin", "rtpbin");
	g_assert(rtpbin);

	__android_log_print (ANDROID_LOG_ERROR, "camera_gst_codec", "elements make
finished!");

	gst_bin_add_many(GST_BIN(data->pipeline), data->appsrc, queue, videorate,
videoconv, x264encoder,rtph264pay, 
		rtpsink, rtcpsink, rtcpsrc, rtpbin, NULL);

	res=gst_element_link(data->appsrc, queue);
	g_assert(res);
	
	res=gst_element_link(queue, videorate);
	g_assert(res);

	res=gst_element_link(videorate, videoconv);
	g_assert(res);

	res=gst_element_link(videoconv, x264encoder);
	g_assert(res);

	res=gst_element_link(x264encoder, rtph264pay);
	g_assert(res);

	srcpad 	= gst_element_get_static_pad(rtph264pay, "src");
	sinkpad	= gst_element_get_request_pad(rtpbin, "send_rtp_sink_0");
	linkres	= gst_pad_link(srcpad, sinkpad);
	g_assert(GST_PAD_LINK_OK == linkres);
	g_object_unref(srcpad);

	// lead rtp stream out of rtpbin via udpsink
	srcpad 	= gst_element_get_static_pad(rtpbin, "send_rtp_src_0");
	sinkpad	= gst_element_get_static_pad(rtpsink, "sink");
	linkres	= gst_pad_link(srcpad, sinkpad);
	g_assert(GST_PAD_LINK_OK == linkres);
	g_object_unref(srcpad);
	g_object_unref(sinkpad);

	// let rtcp stream into rtpbin via udpsrc.
	srcpad 	= gst_element_get_static_pad(rtcpsrc, "src");
	sinkpad	= gst_element_get_request_pad(rtpbin, "recv_rtcp_sink_0");
	linkres	= gst_pad_link(srcpad, sinkpad);
	g_assert(GST_PAD_LINK_OK == linkres);
	g_object_unref(srcpad);

	// lead rtcp stream out of rtpbin via udpsink.
	srcpad 	= gst_element_get_request_pad(rtpbin, "send_rtcp_src_0");
	sinkpad	= gst_element_get_static_pad(rtcpsink, "sink");
	linkres	= gst_pad_link(srcpad, sinkpad);
	g_assert(GST_PAD_LINK_OK == linkres);
	g_object_unref(sinkpad);

	g_print ("starting sender pipeline\n");
	gst_element_set_state (data->pipeline, GST_STATE_PLAYING);

	data->main_loop = g_main_loop_new (NULL, FALSE);
	g_main_loop_run (data->main_loop);

  	gst_element_set_state (data->pipeline, GST_STATE_NULL);
  	gst_object_unref(GST_OBJECT(data->pipeline));
}

/**
 *  public methods
 * */

//push frame data to the appsrc
static void gst_native_push_data(JNIEnv * env, jobject thiz, jbyteArray
fdata)
{
	GstCaps	   *caps;
	jbyte *temp;
	jint data_len = 18432;
	jint frame_rate = 15;
	GstBuffer *buffer;
	buffer = gst_buffer_new_and_alloc(data_len);
	CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
	caps		= gst_caps_from_string(CAPS);
	gst_buffer_set_caps(buffer, caps);
	GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (data->frame_num,
GST_SECOND, frame_rate);
	GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale (1, GST_SECOND,
frame_rate);

	data->frame_num++;
	temp = (*env)->GetByteArrayElements(env, fdata,  JNI_FALSE);
	memcpy(GST_BUFFER_DATA(buffer) , temp, data_len);
	//GST_BUFFER_DATA (buffer) = (*env)->GetByteArrayElements(env, fdata, 
JNI_FALSE);
	gst_app_src_push_buffer(data->appsrc, buffer);
	(*env)->ReleaseByteArrayElements(env, fdata, temp, JNI_ABORT);
	gst_buffer_unref(buffer);
}

/* Instruct the native code to create its internal data structure, pipeline
and thread */
static void gst_native_init (JNIEnv* env, jobject thiz) 
{
	CustomData *data = g_new0 (CustomData, 1);
	SET_CUSTOM_DATA (env, thiz, custom_data_field_id, data);
	GST_DEBUG_CATEGORY_INIT (debug_category, "camera_gst_codec", 0,
"camera_gst_codec");
	gst_debug_set_threshold_for_name("camera_gst_codec", GST_LEVEL_DEBUG);
	GST_DEBUG ("Created CustomData at %p", data);
	data->app = (*env)->NewGlobalRef (env, thiz);
	GST_DEBUG ("Created GlobalRef for app object at %p", data->app);
	pthread_create (&gst_app_thread, NULL, &app_function, data);
}

/* Quit the main loop, remove the native thread and free resources */
static void gst_native_finalize (JNIEnv* env, jobject thiz) 
{
	CustomData *data = GET_CUSTOM_DATA (env, thiz, custom_data_field_id);
	if (!data) return;
	GST_DEBUG ("Quitting main loop...");
	g_main_loop_quit (data->main_loop);
	GST_DEBUG ("Waiting for thread to finish...");
	pthread_join (gst_app_thread, NULL);
	GST_DEBUG ("Deleting GlobalRef for app object at %p", data->app);
	(*env)->DeleteGlobalRef (env, data->app);
	GST_DEBUG ("Freeing CustomData at %p", data);
	g_free (data);
	SET_CUSTOM_DATA (env, thiz, custom_data_field_id, NULL);
	GST_DEBUG ("Done finalizing");
}

/* Static class initializer: retrieve method and field IDs */
static jboolean gst_native_class_init (JNIEnv* env, jclass klass) 
{
	custom_data_field_id = (*env)->GetFieldID (env, klass,
"native_custom_data", "J");

	if (!custom_data_field_id ) 
	{
		/* We emit this message through the Android log instead of the GStreamer
log because the later
		 * has not been initialized yet.
		 */
		__android_log_print (ANDROID_LOG_ERROR, "tutorial-4", "The calling class
does not implement all necessary interface methods");
		return JNI_FALSE;
	}
	return JNI_TRUE;
}


static JNINativeMethod native_methods[] = {
  { "nativeInit",		"()V",	(void *) gst_native_init},
  { "nativeFinalize",	"()V",	(void *) gst_native_finalize},
  { "nativeClassInit",	"()Z",	(void *) gst_native_class_init},
  { "nativePushData",	"([B)V",(void *) gst_native_push_data}
};

/* Library initializer */
jint JNI_OnLoad(JavaVM *vm, void *reserved) 
{
	JNIEnv *env = NULL;

	java_vm = vm;

	if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) 
	{
		__android_log_print (ANDROID_LOG_ERROR, "camera_gst_codec", "Could not
retrieve JNIEnv");
		return 0;
	}
	jclass klass = (*env)->FindClass (env,
"com/example/camera_gst_codec/MainActivity");
	(*env)->RegisterNatives (env, klass, native_methods,
G_N_ELEMENTS(native_methods));

	pthread_key_create (&current_jni_env, detach_current_thread);

	return JNI_VERSION_1_4;
}

*The java array :
// I use JNI to run gstreamer under android platform. The function under
gives data of each frame captured by camera . The format of data is NV21*

private Camera.PreviewCallback mPreviewCallback = new
Camera.PreviewCallback() 
	{
		@Override
		public void onPreviewFrame(byte[] data, Camera camera) 
		{
			// TODO Auto-generated method stub

			nativePushData(data);

		}
	};
The data is NV21.

The warnnings when I run my program: 

01-08 11:54:09.770: W/GStreamer+bin(3386): 0:00:00.622650146 0x57ba6fa0
gstbin.c:2399:gst_bin_do_latency_func:<pipeline0> failed to query latency

01-08 11:54:10.699: W/GStreamer+videorate(3386): 0:00:01.556091308
0x4189c830 gstvideorate.c:830:gst_video_rate_event:<videorate> Got segment
but doesn't have GST_FORMAT_TIME value

01-08 11:54:10.699: W/GLib(3386): Unexpected item 0x57cb7000 dequeued from
queue queue (refcounting problem?)

01-08 11:54:10.809: W/GStreamer+basesrc(3386): 0:00:01.663970947 0x418584f0
gstbasesrc.c:2633:gst_base_src_loop:<appsrc> error: Internal data flow
error.

01-08 11:54:10.809: W/GStreamer+basesrc(3386): 0:00:01.664154053 0x418584f0
gstbasesrc.c:2633:gst_base_src_loop:<appsrc> error: streaming task paused,
reason error (-5)

I do not know what do these warnning mean. The default data format of
android camera is NV21 . 



--
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/problom-with-constructing-GstBuffer-tp4664432p4664545.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.


More information about the gstreamer-devel mailing list