Encode YUV420 buffer with appsrc
Antonio Ospite
ao2 at ao2.it
Tue Oct 24 15:47:27 UTC 2017
On Tue, 24 Oct 2017 02:39:41 -0700 (MST)
pchaurasia <pchaurasia at gameloreinc.com> wrote:
> Hi Antonio,
>
> 1. When I set image width to 2048, I do get better looking video. However,
> It seems colors are not perfectly right, in the video. I think I am 50%
> there.
This is because the data format is not actually I420 but YV12, the U and
V plane are switched.
Setting the width equal to the stride helped to understand this.
> 2. I dumped the image as raw pixel image of size 2048*1080*1.5. In YUV
> Planar format the way you described above. I am attaching the image.
A very handy way to figure out the raw data format is to use the
rawvideoparse element, for example I can correctly interpret the frame
you sent with the following script:
-----------------------------------------------------------------------
#!/bin/sh
set -e
set -x
WIDTH=1920
HEIGHT=1080
STRIDE=2048
STRIDE1=$STRIDE
STRIDE2=$(($STRIDE / 2))
STRIDE3=$(($STRIDE / 2))
OFFSET1=0
OFFSET2=$(($STRIDE * $HEIGHT))
OFFSET3=$(($STRIDE * $HEIGHT + ($STRIDE / 2) * ($HEIGHT / 2)))
gst-launch-1.0 filesrc location=frame0065.jpg ! \
rawvideoparse \
width=$WIDTH \
height=$HEIGHT \
format=yv12 \
plane-strides="<$STRIDE1,$STRIDE2,$STRIDE3>" \
plane-offsets="<$OFFSET1,$OFFSET2,$OFFSET3>" ! \
videoconvert ! pngenc ! filesink location=out.png
-----------------------------------------------------------------------
> 3. I am attaching the code which will load this image and try to encode.
> This code also displays the image before encoding. WHen I run , I see that
> pre-encoded image as displayed by imshow() give right colors - however - the
> encoded image does not.
To set the offsets you have to calculate the planes sizes in bytes like
above; then set the right pixelformat and you will have an almost correct
image. I say _almost_ because there is still a border on the right.
You can fix that by differentiating again between the width of the final
image and the stride of the raw data.
int stride = 2048;
int width = 1920;
and use these variables appropriately, as you were doing before.
I didn't do that in the patch below to keep the changes at a minimum.
$ diff -pruN main3.cpp.orig main3.cpp
--- main3.cpp.orig 2017-10-24 16:57:14.209146385 +0200
+++ main3.cpp 2017-10-24 17:38:22.660606798 +0200
@@ -47,7 +47,7 @@ typedef struct _CustomData {
int width = 2048;
int height = 1080;
-static int fno = 60;
+static int fno = 65;
unsigned char *imgdata;
int dx = 0;
int dy = 0;
@@ -64,8 +64,13 @@ static void start_feed (GstElement *sour
gsize m_offset[3];
gint m_stride[3];
+ // at the beginning of the data
+ m_offset[0] = 0;
+ // after the first plane
+ m_offset[1] = width * height;
+ // after the first and second plane
+ m_offset[2] = width * height + (width / 2) * (height / 2); // or: width * height * 1.25
- m_offset[0] = m_offset[1] = m_offset[2] = 0;
m_stride[0] = width;
m_stride[1] = width/2;
m_stride[2] = width/2;
@@ -109,7 +114,7 @@ static void start_feed (GstElement *sour
#endif
m_pgstBuffer = gst_buffer_new_wrapped_full( (GstMemoryFlags)0, (gpointer)(inputImgGray.data), size, 0, size, NULL, NULL );
- m_pgstVideoMeta = gst_buffer_add_video_meta_full(m_pgstBuffer,GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_I420, width,height, 3, m_offset, m_stride );
+ m_pgstVideoMeta = gst_buffer_add_video_meta_full(m_pgstBuffer,GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_YV12, width,height, 3, m_offset, m_stride );
//ref buffer to give copy to appsrc
gst_buffer_ref(m_pgstBuffer);
@@ -176,7 +181,7 @@ main (int argc, char * argv[])
data.app_source = gst_element_factory_make ("appsrc", "audio_source");
//data.app_sink = gst_element_factory_make ("appsink", "app_sink");
data.m_pvideoConvert = gst_element_factory_make("autovideoconvert", "aa-videoconvert");
- data.m_pencoder = gst_element_factory_make("omxh265enc", "aa-videoencoder");
+ data.m_pencoder = gst_element_factory_make("x264enc", "aa-videoencoder");
data.m_pmux = gst_element_factory_make("matroskamux", "aa-mux");
data.m_pfsink = gst_element_factory_make("filesink", "aa-filesink");
data.sourceid = 0;
@@ -192,11 +197,11 @@ main (int argc, char * argv[])
}
- gst_video_info_set_format(&info, GST_VIDEO_FORMAT_I420, width, height);
+ gst_video_info_set_format(&info, GST_VIDEO_FORMAT_YV12, width, height);
caps = gst_video_info_to_caps(&info);
caps = gst_caps_new_simple ("video/x-raw",
- "format",G_TYPE_STRING,"I420",
+ "format",G_TYPE_STRING,"YV12",
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, 30, 1,
Ciao ciao,
Antonio
--
Antonio Ospite
https://ao2.it
https://twitter.com/ao2it
A: Because it messes up the order in which people normally read text.
See http://en.wikipedia.org/wiki/Posting_style
Q: Why is top-posting such a bad thing?
More information about the gstreamer-devel
mailing list