[gst-devel] Trouble using x264enc with a tee

JonathanHenson jonathan.henson at innovisit.com
Mon Dec 6 21:40:26 CET 2010


Here is the code. I actually have started using ffmux_asf untill I can get
the x264enc to work. However, this is the same exact pipeline except I am
using the asf encoders and muxers instead of x264 and avi. However, now I
have the problem of when I use filesink with a location, the file is perfect
(though with no seeking--don't know what that is about), however, when I use
this code, the client on the other end of the socket can't play the file. I
added a standard file to the multifdsink as a test but it isn't receiving
any output.

Thanks for your reply,

I hope your head gets better.

/*
 * H264Stream.cpp
 *
 *  Created on: Nov 12, 2010
 *      Author: jonathan
 */

#include "H264Stream.h"
#include "VideoInput.h"

int fileD;

H264Stream::H264Stream() : PThread (1000, NoAutoDeleteThread,
HighestPriority, "H264Stream"),
	encoding(false)
{
	//temporary setting of variables
	width = 352;
	height = 288;
	fps = 25;

	audioChannels = 2;
	audioSampleRate = 8000;
	bitWidth = 16;


	GError* error = NULL;
	gchar* command = NULL;

	command = g_strdup_printf ("v4l2src ! video/x-raw-yuv, format=(fourcc)I420,
width=%d, height=%d, framerate=(fraction)%d/1 !"
			" videobalance name=VideoBalance ! textoverlay name=chanNameFilter !
textoverlay name=osdMessageFilter ! textoverlay name=sessionTimerOverlay ! "
			"tee name=t ! queue ! appsink name=videoSink t. ! queue ! ffenc_wmv2
name=videoEncoder me-method=5 ! amux.  alsasrc ! "
			"audio/x-raw-int, depth=%d, width=%d, channels=2, endianness=1234,
rate=%d, signed=true ! volume name=volumeFilter ! "
			"tee name=souTee ! queue ! appsink name=soundSink souTee. ! queue !
ffenc_wmav2 ! amux. ffmux_asf name=amux ! multifdsink name=multifdsink",
			 width, height, fps, bitWidth, bitWidth, audioSampleRate);

   g_print ("Pipeline: %s\n", command);
	h264Pipeline = gst_parse_launch (command, &error);

	if(error != NULL)
	std::cout << error->message << "\n";

	chanNameFilter = gst_bin_get_by_name (GST_BIN (h264Pipeline),
"chanNameFilter");
	osdMessageFilter = gst_bin_get_by_name (GST_BIN (h264Pipeline),
"osdMessageFilter");
	sessionTimerFilter = gst_bin_get_by_name (GST_BIN (h264Pipeline),
"sessionTimerOverlay");
	videoBalance = gst_bin_get_by_name (GST_BIN (h264Pipeline),
"VideoBalance");
	videoEncoder = gst_bin_get_by_name (GST_BIN (h264Pipeline),
"videoEncoder");
	volume = gst_bin_get_by_name (GST_BIN (h264Pipeline), "volumeFilter");
	multifdSink = gst_bin_get_by_name (GST_BIN (h264Pipeline), "multifdsink");
	soundSink = gst_bin_get_by_name (GST_BIN (h264Pipeline), "soundSink");
}

H264Stream::~H264Stream()
{
	for(std::map::iterator pair = streamHandles.begin(); pair !=
streamHandles.end(); pair++)
	{
		g_signal_emit_by_name(multifdSink, "remove", pair->first, NULL);
		delete pair->second;
	}

	streamHandles.clear();

	gst_element_set_state (h264Pipeline, GST_STATE_NULL);
	gst_object_unref (GST_OBJECT (h264Pipeline));
}

void H264Stream::Main()
{
	while(true)
	{
		PWaitAndSignal m(mutex);
		if(encoding)
		{
		  OSDSettings osd;

		  if(osd.getShowChanName())
		  {
			  g_object_set (G_OBJECT (chanNameFilter), "silent", false , NULL);
			  g_object_set (G_OBJECT (chanNameFilter), "text",
osd.getChanName().c_str() , NULL);
			  g_object_set (G_OBJECT (chanNameFilter), "halignment",
osd.getChanNameHAlign() , NULL);
			  g_object_set (G_OBJECT (chanNameFilter), "valignment",
osd.getChanNameVAlign() , NULL);
			  g_object_set (G_OBJECT (chanNameFilter), "wrap-mode",
osd.getChanNameWordWrapMode() , NULL);
			  g_object_set (G_OBJECT (chanNameFilter), "font-desc",
osd.getChanNameFont().c_str() , NULL);
			  g_object_set (G_OBJECT (chanNameFilter), "shaded-background",
osd.getChanNameShadow() , NULL);
		  }
		  else
		  {
			  g_object_set (G_OBJECT (chanNameFilter), "text", "" , NULL);
			  g_object_set (G_OBJECT (chanNameFilter), "silent", true , NULL);
		  }

		  if(osd.getShowOSDMessage())
		  {
			  g_object_set (G_OBJECT (osdMessageFilter), "silent", false , NULL);
			  g_object_set (G_OBJECT (osdMessageFilter), "text",
osd.getOSDMessage().c_str() , NULL);
			  g_object_set (G_OBJECT (osdMessageFilter), "halignment",
osd.getOSDMessageHAlign() , NULL);
			  g_object_set (G_OBJECT (osdMessageFilter), "valignment",
osd.getOSDMessageVAlign() , NULL);
			  g_object_set (G_OBJECT (osdMessageFilter), "wrap-mode",
osd.getOSDMessageWordWrapMode() , NULL);
			  g_object_set (G_OBJECT (osdMessageFilter), "font-desc",
osd.getOSDMessageFont().c_str() , NULL);
			  g_object_set (G_OBJECT (osdMessageFilter), "shaded-background",
osd.getOSDMessageShadow() , NULL);
		  }
		  else
		  {
			  g_object_set (G_OBJECT (osdMessageFilter), "text", "" , NULL);
			  g_object_set (G_OBJECT (osdMessageFilter), "silent", true , NULL);
		  }

		  if(osd.getShowSessionTimer())
		  {
			  g_object_set (G_OBJECT (sessionTimerFilter), "silent", false , NULL);
			  g_object_set (G_OBJECT (sessionTimerFilter), "text",
osd.getSessionTimer().c_str() , NULL);
			  g_object_set (G_OBJECT (sessionTimerFilter), "halignment",
osd.getSessionTimerHAlign() , NULL);
			  g_object_set (G_OBJECT (sessionTimerFilter), "valignment",
osd.getSessionTimerVAlign() , NULL);
			  g_object_set (G_OBJECT (sessionTimerFilter), "wrap-mode",
osd.getSessionTimerWordWrapMode() , NULL);
			  g_object_set (G_OBJECT (sessionTimerFilter), "font-desc",
osd.getSessionTimerFont().c_str() , NULL);
			  g_object_set (G_OBJECT (sessionTimerFilter), "shaded-background",
osd.getSessionTimerShadow() , NULL);

		  }
		  else
		  {
			  g_object_set (G_OBJECT (sessionTimerFilter), "text", "" , NULL);
			  g_object_set (G_OBJECT (sessionTimerFilter), "silent", true , NULL);
		  }

			this->Sleep(1000);
		}
	}
}

void H264Stream::RemoveStream(int handle)
{
	if(handle != -1)
	{
		g_signal_emit_by_name(multifdSink, "remove", handle, G_TYPE_NONE);
		delete streamHandles[handle];
		streamHandles.erase(handle);

		g_signal_emit_by_name(multifdSink, "remove", fileD, G_TYPE_NONE);
		close(fileD);
	}

	if(!streamHandles.size())
		StopEncoding();
}

bool H264Stream::CheckAndBeginEncoding()
{
	if(!encoding)
	{
		GstStateChangeReturn stateRet;
		stateRet = gst_element_set_state (h264Pipeline, GST_STATE_PLAYING);

		GstState state;

		stateRet = gst_element_get_state(h264Pipeline, &state, NULL, GST_SECOND);
		encoding = true;
		this->Restart();
		return true;
	}
	else
		return true;
}

bool H264Stream::StopEncoding()
{
	gst_element_set_state (h264Pipeline, GST_STATE_READY);

	encoding = false;
	return true;
}

int H264Stream::AddStreamOutput(string ip, string port)
{
	PWaitAndSignal m(mutex);
	if(CheckAndBeginEncoding())
	{
		fileD = open("/home/jonathan/anotherTest.wmv", O_RDWR | O_APPEND |
O_CREAT, 0666);

		if(fileD != -1)
		{
			g_signal_emit_by_name(multifdSink, "add", fileD, G_TYPE_NONE);
			//streamHandles.insert(std::pair(fd, socket));
		}

		ClientSocket* socket = new ClientSocket(ip, atoi(port.c_str()));

		int fd = socket->getDescriptor();

		if(fd != -1)
		{
			g_signal_emit_by_name(multifdSink, "add", fd, G_TYPE_NONE);
			streamHandles.insert(std::pair(fd, socket));
			return fd;
		}


	}
	return -1;
}

GstBuffer* H264Stream::GetAudioBuffer()
{
	PWaitAndSignal m(mutex);

	 if (soundSink != NULL) {
		 return gst_app_sink_pull_buffer (GST_APP_SINK (soundSink));
	 }
	 return NULL;
}

GstBuffer* H264Stream::GetVideoBuffer()
{
	PWaitAndSignal m(mutex);

	 if (videoSink != NULL) {
		 return gst_app_sink_pull_buffer (GST_APP_SINK (videoSink));
	 }
	 return NULL;
}

GstCaps* H264Stream::GetCurrentAudioCaps()
{
	PWaitAndSignal m(mutex);

	 if (soundSink != NULL) {
		 return gst_app_sink_get_caps (GST_APP_SINK (soundSink));
	 }
	 return NULL;
}

GstCaps* H264Stream::GetCurrentVideoCaps()
{
	PWaitAndSignal m(mutex);

	 if (videoSink != NULL) {
		 return gst_app_sink_get_caps (GST_APP_SINK (videoSink));
	 }
	 return NULL;
}

bool H264Stream::SetSessionAudioCaps(GstCaps* caps)
{
	 PWaitAndSignal m(mutex);

	 if (soundSink != NULL) {
		 gst_app_sink_set_caps (GST_APP_SINK (soundSink), caps);
		 gst_caps_unref(caps);
		 return true;
	 }
	 return false;
}

bool H264Stream::SetSessionVideoCaps(GstCaps* caps)
{
	 PWaitAndSignal m(mutex);

	 if (videoSink != NULL) {
		 gst_app_sink_set_caps (GST_APP_SINK (videoSink), caps);
		 gst_caps_unref(caps);
		 return true;
	 }
	 return false;
}

void H264Stream::SetVolume(gfloat value)
{
	g_object_set(G_OBJECT (volume), "volume", value, NULL);
}

bool H264Stream::SetSaturation(double color)
{
	g_object_set(G_OBJECT (videoBalance), "saturation", color, NULL);

	return true;
}

bool H264Stream::SetBrightness(double brightness)
{
	g_object_set(G_OBJECT (videoBalance), "brightness", brightness, NULL);

	return true;
}

bool H264Stream::SetHue(double hue)
{
	g_object_set(G_OBJECT (videoBalance), "hue", hue, NULL);

	return true;
}

bool H264Stream::SetContrast(double contrast)
{
	g_object_set(G_OBJECT (videoBalance), "contrast", contrast, NULL);

	return true;
}





-- 
View this message in context: http://gstreamer-devel.966125.n4.nabble.com/Trouble-using-x264enc-with-a-tee-tp3067583p3075279.html
Sent from the GStreamer-devel mailing list archive at Nabble.com.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/20101206/e28ec8cf/attachment.htm>


More information about the gstreamer-devel mailing list