push opengl texture to gstreamer pipeline
Matthew Waters
ystreet00 at gmail.com
Tue Aug 10 06:21:22 UTC 2021
Ah, if you want to limit to a particular OpenGL variant, you need to
also call gst_gl_display_filter_gl_api() with that variant.
GST_DEBUG=gl*:7 will output a whole bunch of information about what is
generated and tried in all of this respect.
Cheers
-Matt
On 9/8/21 12:58 am, Lusine Hayrapetyan wrote:
> Hi Matthew,
> Seems the second approach fits to my use case- I need to push textures
> which are created in opengles & egl( it means not in gstreamer
> context) to gstreamer.
> I've changed my code to use |gst_gl_display_create_context & |
> |gst_gl_display_add_context but still can't read textures in
> gstreamer, gstreamer prudeces the following errors after I push buffer
> to appsrc:|
> |(testegl1:24602): GStreamer-CRITICAL **: 13:02:55.568:
> gst_debug_log_valist: assertion 'category != NULL' failed
> 0:00:14.039444188 24602 0x5611bc356d90 WARN glbasetexture
> gstglmemory.c:401:gst_gl_memory_read_pixels: Could not create
> framebuffer to read pixels for memory 0x7f8cf0017ac0
> 0:00:14.039480909 24602 0x5611bc356d90 WARN glbasememory
> gstglbasememory.c:585:gst_gl_base_memory_memcpy: could not read map
> source memory 0x7f8cf0017ac0
> 0:00:14.039505741 24602 0x5611bc356d90 WARN glmemory
> gstglmemorypbo.c:592:_gl_mem_copy: Could not copy GL Memory
> 0:00:14.039779268 24602 0x5611bc4f14f0 ERROR videometa
> gstvideometa.c:247:default_map: cannot map memory range 0-1
> 0:00:14.039846056 24602 0x5611bc4f14f0 ERROR default
> video-frame.c:168:gst_video_frame_map_id: failed to map video frame
> plane 0
> 0:00:14.039891314 24602 0x5611bc4f14f0 WARN videofilter
> gstvideofilter.c:297:gst_video_filter_transform:<videoconvert0>
> warning: invalid video buffer received
> |
> |
> |
> |
> |
> |This is how I implemented it, did I misunderstand something? |
> |
> |
> I have rendering thread where I initialize opengles context and create
> wrapped and the new contexts.
> //
> // Description: Sets the display, OpenGL|ES context and screen stuff
> // Created GstGLContext s - wrapped (gst_gl_context_new_wrapped) and
> new context(gst_gl_display_create_context)
> //
> static void
> init_ogl (APP_STATE_T * state)
> {
> ...
> /* get an EGL display connection */
> state->display = eglGetDisplay (EGL_DEFAULT_DISPLAY);
> assert (state->display != EGL_NO_DISPLAY);
> /* initialize the EGL display connection */
> result = eglInitialize (state->display, NULL, NULL);
> assert (EGL_FALSE != result);
>
> /* create an EGL rendering context */
> state->context =
> eglCreateContext (state->display, config,
> EGL_NO_CONTEXT, context_attributes);
> assert (state->context != EGL_NO_CONTEXT);
> //
> // Initialize GStreamer related resources.
> //
> state->gst_display = gst_gl_display_egl_new_with_egl_display
> (state->display);
> state->gl_context =
> gst_gl_context_new_wrapped (GST_GL_DISPLAY (state->gst_display),
> (guintptr) state->context, GST_GL_PLATFORM_EGL, GST_GL_API_GLES2);
>
> GError *error = NULL;
> if (
> !gst_gl_display_create_context(GST_GL_DISPLAY(state->gst_display),
> state->gl_context, &state->newContext, &error) )
> g_print("Failed to create new context\n");
>
> if ( !gst_gl_display_add_context(GST_GL_DISPLAY(state->gst_display),
> state->newContext))
> g_print("Failed to add new context to display\n");
>
> } // init_ogl end.
>
>
> static void
> sync_bus_call (GstBus * bus, GstMessage * msg, gpointer * data)
> {
>
> APP_STATE_T *state = (APP_STATE_T *)data;
> switch (GST_MESSAGE_TYPE (msg))
> {
> case GST_MESSAGE_NEED_CONTEXT:
> {
> const gchar *context_type;
> gst_message_parse_context_type (msg, &context_type);
>
> GstContext *context = NULL;
> if (g_strcmp0 (context_type, GST_GL_DISPLAY_CONTEXT_TYPE)
> == 0)
> {
> GstGLDisplay * gl_display =
> GST_GL_DISPLAY(state->gst_display);
> context = gst_context_new (GST_GL_DISPLAY_CONTEXT_TYPE,
> TRUE);
> gst_context_set_gl_display(context, gl_display);
> gst_element_set_context (GST_ELEMENT(msg->src), context);
> }
> else if (g_strcmp0 (context_type, "gst.gl.app_context") == 0)
> {
> GstContext *context =
> gst_context_new("gst.gl.app_context", TRUE);
> GstStructure *s = gst_context_writable_structure
> (context);
> gst_structure_set (s, "context", GST_TYPE_GL_CONTEXT,
> state->gl_context, NULL);
> gst_element_set_context(GST_ELEMENT(msg->src), context);
> }
> break;
> }
> default:
> break;
> }
> } // sync_bus_call end
>
>
> I use need-data callback to create a buffer from texture_id and and
> push it in the appsrc:
> /g_signal_connect (state->appsrc, "need-data", G_CALLBACK (/ pushFrame
> /), state);/
> /
> /
> static bool pushFrame(..., APP_STATE_T * state)
> {
> // Wrap the texture into GstGLMemory
> GstVideoInfo vinfo;
> gst_video_info_set_format(&vinfo, GST_VIDEO_FORMAT_RGBA,
> state->screen_width, state->screen_height);
> // Use state->newContext for allocator.
> GstAllocator* allocator =
> GST_ALLOCATOR(gst_gl_memory_allocator_get_default(state->newContext));
> GstGLVideoAllocationParams* params =
> gst_gl_video_allocation_params_new_wrapped_texture(
> state->newContext, NULL, &vinfo, 0, NULL,
> GST_GL_TEXTURE_TARGET_2D, GST_GL_RGBA, state->tex,
> NULL, 0);
>
> GstGLMemory* glMemory = GST_GL_MEMORY_CAST(gst_gl_base_memory_alloc(
> GST_GL_BASE_MEMORY_ALLOCATOR_CAST(allocator),
> (GstGLAllocationParams*) params));
>
> gst_gl_allocation_params_free((GstGLAllocationParams *)params);
> gst_object_unref(allocator);
>
> // Attach GstGLMemory object into buffer, timestamp the buffer and
> push it downstream
> GstBuffer* buffer = gst_buffer_new();
> gst_buffer_append_memory(buffer, GST_MEMORY_CAST(glMemory));
>
> GstVideoMeta * vmeta = gst_buffer_add_video_meta(buffer,
> GST_VIDEO_FRAME_FLAG_NONE, GST_VIDEO_FORMAT_RGBA, state->screen_width,
> state->screen_height);
>
> // Put timestamps into buffer
> GST_BUFFER_PTS (buffer) = timestamp;
> GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int (1,
> GST_SECOND, 2);
> timestamp += GST_BUFFER_DURATION (buffer);
>
> GstFlowReturn ret;
> g_signal_emit_by_name(state->appsrc, "push-buffer", buffer, &ret);
>
> if (ret != GST_FLOW_OK)
> {
> // Something wrong, stop pushing.
> g_printerr("Something went wrong: Pushing buffer into appsrc
> is stopped.\n");
> return false;
> }
>
> return true;
> } // pushFrame end
>
> Regards,
> Lusine
>
> On Fri, Aug 6, 2021 at 8:59 AM Matthew Waters <ystreet00 at gmail.com
> <mailto:ystreet00 at gmail.com>> wrote:
>
> Hi,
>
> On 6/8/21 1:12 am, Lusine Hayrapetyan wrote:
>> Dear Matt,
>> Thank you very much for your response. It helped me to understand
>> that using 'wrapped' OpenGL context is a wrong direction to go :)
>>
>> /WRT Suggestion 1:/
>> Do I understand correctly that I need to get local context from
>> gstreamer and pass it to opengl rendering thread? (I have a
>> rendering thread where I set OpenGL|ES context and screen stuff).
>> GstGLContext* mContext = nullptr;
>> g_object_get(state->gldownload, "context", &mContext, NULL);
>> guintptr handle;
>> handle = gst_gl_context_get_gl_context(mContext); // is this correct?
>> state->context = (EGLContext)handle; // state->context
>> is EGLContext type;
>> And then use state->context in OpenGL|ES?
>> Do I need to get and pass window and display from gstreamer to my
>> rendering thread as well?
>> Although my use scenario is different from this one - I need to
>> pass context from OpenGL to Gstreamer.
>
> You only need to retrieve or create a non-wrapped GstGLContext and
> use that for creating your textures that you are pushing into
> GStreamer. You don't need to use GStreamer's provided OpenGL
> context for anything else. Everything else in your sample remains
> the same. You may need to add a GstGLSyncMeta on your buffers you
> are pushing into GStreamer to provide the necessary
> synchronisation guarantees between the shared OpenGL contexts
> (application and GStreamer). On some platforms the window handle
> type and format may be important however in general on linux
> (X11/wayland) it doesn't really matter.
>
> You must not attempt to use GStreamer's OpenGL context as-is
> (using e.g. eglMakeCurrent() or anything of the like) from outside
> the GStreamer OpenGL context thread (as provided by the
> gst_gl_context_thread_add() API).
>
>> /WRT Suggestion 2:/
>> gst_gl_display_create_context accepts /other_context /argument,
>> should the /other_context /be the 'wrapped' context?
>
> Yes. other_context is the GstGLContext that will be shared with
> the newly created GstGLContext.
>
> Side note, GStreamer cannot use any application-provided OpenGL
> context as-is due to the overhead of dealing with all the OpenGL
> state that may be changed behind GStreamer's back. This is why
> the OpenGL context sharing dance is required.
>
> Cheers
> -Matt
>
>> Best Regards,
>> Lusine
>>
>> On Thu, Aug 5, 2021 at 12:39 PM Matthew Waters
>> <ystreet00 at gmail.com <mailto:ystreet00 at gmail.com>> wrote:
>>
>> So, I think you almost have the correct sequence.
>>
>> Response inline.
>>
>> On 5/8/21 1:04 am, Lusine Hayrapetyan via gstreamer-devel wrote:
>>> Hi Folks,
>>> I'm struggling with the following issue and can't understand
>>> what I'm doing wrong.
>>> I need to pass opengl texture to the gstreamer pipeline.
>>> I have a rendering thread where I create opengl texture, the
>>> following objects created in this thread:
>>> /EGLDisplay display;/
>>> /EGLContext context;/
>>> /
>>> /
>>> I create gstreamer pipeline in the main thread and as
>>> described in the following article sharing an X11 display
>>> and GstGLContext with the bus callback.
>>> http://ystreet00.blogspot.com/2015/09/gstreamer-16-and-opengl-contexts.html
>>> <http://ystreet00.blogspot.com/2015/09/gstreamer-16-and-opengl-contexts.html>
>>>
>>> GstGLDisplayEGL and GstGLContext are created in this way:
>>> /GstGLDisplayEGL* gst_display =
>>> gst_gl_display_egl_new_with_egl_display (display);
>>> /
>>> /GstGLContext *gl_context =
>>> gst_gl_context_new_wrapped (GST_GL_DISPLAY (gst_display),
>>> (guintptr) context, GST_GL_PLATFORM_EGL,
>>> GST_GL_API_GLES2);/
>>>
>>> The first element of my pipeline is appsrc:
>>> /appsrc stream-type=0 emit-signals=1 format=3
>>> caps=video/x-raw(memory:GLMemory), width=300, height=300,
>>> framerate=(fraction)20/1, format=(string)RGBA ! gldownload !
>>> .../
>>>
>>> I use need-data callback to create a buffer from texture_id
>>> and and push it in the appsrc:
>>> /g_signal_connect (state->appsrc, "need-data", G_CALLBACK (/
>>> pushFrame /), state);/
>>> /
>>> /
>>> /bool pushFrame()/
>>> /{
>>> // Wrap the texture into GstGLMemory
>>> GstVideoInfo vinfo;
>>> gst_video_info_set_format(&vinfo, GST_VIDEO_FORMAT_RGBA,
>>> 300, 300);
>>>
>>> GstAllocator* allocator =
>>> GST_ALLOCATOR(gst_gl_memory_allocator_get_default(gl_context));
>>>
>>> GstGLVideoAllocationParams* params =
>>> gst_gl_video_allocation_params_new_wrapped_texture(
>>> state->gl_context, NULL, &vinfo, 0, NULL,
>>> GST_GL_TEXTURE_TARGET_2D, GST_GL_RGBA, / texture_id /,
>>> NULL, 0);
>>> /
>>
>> The use of state->gl_context is probably your OpenGL context
>> that has been wrapped from the application. This 'wrapped'
>> OpenGL context has some limitations, one being that GStreamer
>> cannot actually do a complete gst_gl_context_thread_add where
>> the request is marshalled to an OpenGL-specific thread. This
>> is what that critical is complaining about effectively.
>>
>> To do this properly, you would need to do one of two things:
>> 1. Retrieve the OpenGL context from the downstream gldownload
>> element using either the 'context' property or using an
>> appropriate GST_CONTEXT QUERY or the helper
>> gst_gl_query_local_gl_context().
>> 2. Create your own GStreamer OpenGL context and add it to the
>> GstGLDisplay using something like:
>> https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/-/blob/master/gst-libs/gst/gl/gstglbasefilter.c#L550-565
>> <https://gitlab.freedesktop.org/gstreamer/gst-plugins-base/-/blob/master/gst-libs/gst/gl/gstglbasefilter.c#L550-565>.
>>
>> Cheers
>> -Matt
>>
>>> /
>>> // The following line produces an error!!!
>>> GstGLMemory* glMemory =
>>> GST_GL_MEMORY_CAST(gst_gl_base_memory_alloc(
>>> GST_GL_BASE_MEMORY_ALLOCATOR_CAST(allocator),
>>> (GstGLAllocationParams*) params));
>>>
>>> gst_gl_allocation_params_free((GstGLAllocationParams *)params);
>>> gst_object_unref(allocator);
>>>
>>> // Attach GstGLMemory object into buffer, timestamp the
>>> buffer and push it downstream
>>> GstBuffer* buffer = gst_buffer_new();
>>> gst_buffer_append_memory(buffer, GST_MEMORY_CAST(glMemory));
>>>
>>> // Put timestamps into buffer
>>> GST_BUFFER_PTS (buffer) = timestamp;
>>> GST_BUFFER_DURATION (buffer) = gst_util_uint64_scale_int
>>> (1, GST_SECOND, 2);
>>>
>>> timestamp += GST_BUFFER_DURATION (buffer);
>>> GstFlowReturn ret;
>>> g_signal_emit_by_name(state->appsrc, "push-buffer", buffer,
>>> &ret);
>>>
>>> if (ret != GST_FLOW_OK)
>>> {
>>> // Something wrong, stop pushing.
>>> g_printerr("Something went wrong: Pushing buffer
>>> into appsrc is stopped.\n");
>>> return false;
>>> }
>>>
>>> return true;
>>> }
>>> /
>>>
>>> pushFrame produces the following error:
>>> gst_gl_context_thread_add: assertion
>>> 'context->priv->active_thread == g_thread_self ()' failIed
>>>
>>> What am I doing wrong or how can push gpu texture to gstreamer?
>>>
>>> Thanks,
>>> Lusine
>>>
>>
>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.freedesktop.org/archives/gstreamer-devel/attachments/20210810/a26e26c5/attachment-0001.htm>
-------------- next part --------------
A non-text attachment was scrubbed...
Name: OpenPGP_signature
Type: application/pgp-signature
Size: 495 bytes
Desc: OpenPGP digital signature
URL: <https://lists.freedesktop.org/archives/gstreamer-devel/attachments/20210810/a26e26c5/attachment-0001.sig>
More information about the gstreamer-devel
mailing list