AW: Dynamic change of glshader element (Dušan Poizl)
Christian Winkler
christian at heidelbergs.de
Sat Nov 15 07:14:38 PST 2014
Dear Dusan,
thanks. I tried that.
gst_gl_shader_compile (data->distortion,&glerror);
gives me a
11-15 15:44:46.812: E/GLib(16781): gst_gl_shader_compile: assertion 'GST_GL_IS_SHADER (shader)' failed
if(gst_gl_shader_is_compiled (data->distortion)){
gst_gl_shader_set_uniform_1f(data->distortion, "test", 0.1);
}
gives me a
11-15 15:44:46.812: E/GLib(16781): gst_gl_shader_is_compiled: assertion 'GST_GL_IS_SHADER (shader)' failed
If i use
gst_gl_shader_set_uniform_1f(data->distortion, "test", 0.1);
i get a segmentation violation.
I simply do not get where to put the gst_gl_shader_set_uniform.... stuff.
/* Main method for the native code. This is executed on its own thread. */
static void *app_function (void *userdata) {
JavaVMAttachArgs args;
GstBus *bus;
CustomData *data = (CustomData *)userdata;
GSource *bus_source;
GError *error = NULL;
GError *glerror = NULL;
GstCaps *filtercaps;
gboolean false = 0;
gboolean true = 1;
GST_DEBUG ("Creating pipeline in CustomData at %p", data);
/* Create our own GLib Main Context and make it the default one */
data->context = g_main_context_new ();
g_main_context_push_thread_default(data->context);
/****************************************************************/
/* Build pipeline */
data->pipeline = gst_pipeline_new ("fpv-pipeline");
data->depayloader = gst_element_factory_make ("rtph264depay","rtph264depay");
data->decoder = gst_element_factory_make ("decodebin","decodebin");
data->video_sink = gst_element_factory_make ("glimagesink","glimagesink");
data->distortion = gst_element_factory_make ("glshader","glshader");
data->filter=gst_element_factory_make("capsfilter","filter");
if(pipelineNumberFromJava==1) { //RTSP
// rtspsrc location=rtsp://192.168.137.240:8554/test latency=0 drop-on-latency=true ! application/x-rtp, payload=96 ! rtph264depay ! decodebin ! glimagesink sync=false
data->source = gst_element_factory_make ("rtspsrc","rtspsrc");
g_object_set(G_OBJECT (data->source),"location",pipelineParameterFromJava,"latency",0,"drop-on-latency",true,NULL);
data->filter=gst_element_factory_make("capsfilter","filter");
filtercaps = gst_caps_new_simple ("application/x-rtp","payload", G_TYPE_INT, 96,NULL);
g_object_set (G_OBJECT (data->distortion), "location", "/data/data/com.lonestar.groundpi/files/distortion.frag", NULL);
gst_caps_unref (filtercaps);
g_object_set(G_OBJECT (data->video_sink),"sync",false,NULL);
if (!data->pipeline || !data->source || !data->filter || !data->depayloader || !data->decoder || !data->video_sink || !data->distortion) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
if(cardboardModeEnabled){
gst_bin_add_many (GST_BIN (data->pipeline), data->source, data->filter, data->depayloader, data->decoder, data->video_sink, data->distortion, NULL);
gst_element_link_many (data->filter, data->depayloader, data->decoder, NULL);
gst_element_link_many (data->distortion, data->video_sink, NULL);
g_signal_connect (data->source, "pad-added", G_CALLBACK (cb_new_pad), data->filter);
g_signal_connect (data->decoder, "pad-added", G_CALLBACK (cb_new_pad), data->distortion);
} else {
gst_bin_add_many (GST_BIN (data->pipeline), data->source, data->filter, data->depayloader, data->decoder, data->video_sink, NULL);
gst_element_link_many (data->filter, data->depayloader, data->decoder, NULL);
g_signal_connect (data->source, "pad-added", G_CALLBACK (cb_new_pad), data->filter);
g_signal_connect (data->decoder, "pad-added", G_CALLBACK (cb_new_pad), data->video_sink);
}
} else if( pipelineNumberFromJava==2) { //UDP
// udpsrc port=9000 ! application/x-rtp, payload=96 ! rtph264depay ! decodebin ! glimagesink sync=false
data->source = gst_element_factory_make ("udpsrc","udpsrc");
g_object_set(G_OBJECT (data->source),"port",9000,NULL);
g_object_set (G_OBJECT (data->distortion), "location", "/data/data/com.lonestar.groundpi/files/distortion.frag", NULL);
filtercaps = gst_caps_new_simple ("application/x-rtp","payload", G_TYPE_INT, 96,NULL);
g_object_set (G_OBJECT (data->filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
g_object_set(G_OBJECT (data->video_sink),"sync",false,NULL);
if (!data->pipeline || !data->source || !data->filter || !data->depayloader || !data->decoder || !data->video_sink || !data->distortion) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
if(cardboardModeEnabled){
gst_bin_add_many (GST_BIN (data->pipeline), data->source, data->filter, data->depayloader, data->decoder, data->distortion, data->video_sink, NULL);
gst_element_link_many (data->source, data->filter, data->depayloader, data->decoder, NULL);
gst_element_link_many (data->distortion, data->video_sink, NULL);
g_signal_connect (data->decoder, "pad-added", G_CALLBACK (cb_new_pad), data->distortion);
} else {
gst_bin_add_many (GST_BIN (data->pipeline), data->source, data->filter, data->depayloader, data->decoder, data->video_sink, NULL);
gst_element_link_many (data->source, data->filter, data->depayloader, data->decoder, NULL);
g_signal_connect (data->decoder, "pad-added", G_CALLBACK (cb_new_pad), data->video_sink);
}
} else if( pipelineNumberFromJava==4) { //TestPipeline
data->source = gst_element_factory_make ("videotestsrc","videotestsrc");
g_object_set (G_OBJECT (data->distortion), "location", "/data/data/com.lonestar.groundpi/files/distortion.frag", NULL);
filtercaps = gst_caps_new_simple ("video/x-raw","width", G_TYPE_INT, 1280,"height", G_TYPE_INT,720,NULL);
g_object_set (G_OBJECT (data->filter), "caps", filtercaps, NULL);
gst_caps_unref (filtercaps);
if (!data->pipeline || !data->source || !data->filter || !data->video_sink || !data->distortion) {
g_printerr ("One element could not be created. Exiting.\n");
return -1;
}
if(cardboardModeEnabled){
gst_bin_add_many (GST_BIN (data->pipeline), data->source, data->filter, data->distortion, data->video_sink, NULL);
gst_element_link_many (data->source, data->filter, data->distortion, data->video_sink, NULL);
} else {
gst_bin_add_many (GST_BIN (data->pipeline), data->source, data->filter, data->video_sink, NULL);
gst_element_link_many (data->source, data->filter, data->video_sink, NULL);
}
} else { // Complete Pipelines from app...
data->pipeline = gst_parse_launch(pipelineParameterFromJava, &error);
}
/****************************************************************/
if (error) {
gchar *message = g_strdup_printf("Unable to build pipeline: %s", error->message);
g_clear_error (&error);
set_ui_message(message, data);
g_free (message);
return NULL;
}
/* Set the pipeline to READY, so it can already accept a window handle, if we have one */
gst_element_set_state(data->pipeline, GST_STATE_READY);
data->video_sink = gst_bin_get_by_interface(GST_BIN(data->pipeline), GST_TYPE_VIDEO_OVERLAY);
if (!data->video_sink) {
GST_ERROR ("Could not retrieve video sink");
return NULL;
}
/* Instruct the bus to emit signals for each received message, and connect to the interesting signals */
bus = gst_element_get_bus (data->pipeline);
bus_source = gst_bus_create_watch (bus);
g_source_set_callback (bus_source, (GSourceFunc) gst_bus_async_signal_func, NULL, NULL);
g_source_attach (bus_source, data->context);
g_source_unref (bus_source);
g_signal_connect (G_OBJECT (bus), "message::error", (GCallback)error_cb, data);
g_signal_connect (G_OBJECT (bus), "message::state-changed", (GCallback)state_changed_cb, data);
g_signal_connect (G_OBJECT (bus), "message::clock-lost", (GCallback)clock_lost_cb, data);
gst_object_unref (bus);
/* Create a GLib Main Loop and set it to run */
GST_DEBUG ("Entering main loop... (CustomData:%p)", data);
data->main_loop = g_main_loop_new (data->context, FALSE);
check_initialization_complete (data);
g_main_loop_run (data->main_loop);
GST_DEBUG ("Exited main loop");
g_main_loop_unref (data->main_loop);
data->main_loop = NULL;
/* Free resources */
g_main_context_pop_thread_default(data->context);
g_main_context_unref (data->context);
gst_element_set_state (data->pipeline, GST_STATE_NULL);
gst_object_unref (data->video_sink);
gst_object_unref (data->source);
gst_object_unref (data->filter);
gst_object_unref (data->depayloader);
gst_object_unref (data->decoder);
gst_object_unref (data->pipeline);
return NULL;
}
-----Ursprüngliche Nachricht-----
Von: gstreamer-devel [mailto:gstreamer-devel-bounces at lists.freedesktop.org] Im Auftrag von Dušan Poizl
Gesendet: Samstag, 15. November 2014 14:04
An: Discussion of the development of and with GStreamer
Betreff: Re: Dynamic change of glshader element (Dušan Poizl)
exactly that element. in your case distortion. refer to GstGLShader doc http://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-bad-libs/html/gst-plugins-bad-libs-gstglshader.html
Dňa 15.11.2014 o 13:42 Christian Winkler napísal(a):
> Thanks!
>
> What is "shader" in your example?
> How can i access taht in native code?
> Im am setting up the element like this:
>
> GstElement *distortion;
> distortion = gst_element_factory_make ("glshader","glshader");
>
> Kind regards
> Christian
>
>
>
>
>
> change it to uniform variable. "uniform vec2 leftCenter;"
>
> then you should be able to change it with
> gst_gl_shader_set_uniform_2f(shader, "leftCenter", x, y);
>
> Dňa 15.11.2014 o 11:10 Christian Winkler napísal(a):
>> Dear Members,
>>
>>
>>
>> i am trying to change a glshader element at runtime in a native
>> Gstreamer application.
>>
>>
>>
>> The idea is to convert a rtsp Stream to an side-by-side view for a
>> android google cardboard VR device.
>>
>> I want to implement headtracking as well and therefore want to change
>> the opengl fragment dynamically based on head position.
>>
>>
>>
>> The code below works for a static .frag file.
>>
>>
>>
>> QUESTION: How can i change for example
>>
>> _const_vec2 leftCenter = vec2(0.25, 0.4);
>>
>> _const_vec2 rightCenter = vec2(0.75, 0.4);
>>
>> DYNAMICALLY during runtime in the native code?
>>
>>
>>
>> There used tob o something like gstglfiltershader, but i can not find
>> a similar functionality in the current 1.4.x releases.
>>
>>
>>
>> Thanks!!!!
>>
>> Kind regards
>>
>> Christian
>>
>>
>>
>>
>>
>> The current Pipeline is set up as follows (JNI Code for an Android App).
>>
>> This
>>
>>
>>
>>
>>
>> ….
>>
>> // rtspsrc location=rtsp://192.168.137.240:8554/test
>> latency=0 drop-on-latency=true ! application/x-rtp, payload=96 !
>> rtph264depay ! decodebin ! glimagesink sync=false
>>
>> data->source = gst_element_factory_make
>> ("rtspsrc","rtspsrc");
>>
>> g_object_set(G_OBJECT
>> (data->source),"location",pipelineParameterFromJava,"latency",0,"drop
>> -
>> on-latency",true,NULL);
>>
>>
>>
>> data->pipeline = gst_pipeline_new ("fpv-pipeline");
>>
>>
>> data->filter=gst_element_factory_make("capsfilter","filter");
>>
>> data->depayloader = gst_element_factory_make
>> ("rtph264depay","rtph264depay");
>>
>> data->decoder = gst_element_factory_make
>> ("decodebin","decodebin");
>>
>> data->distortion = gst_element_factory_make
>> ("glshader","glshader");
>>
>> data->video_sink = gst_element_factory_make
>> ("glimagesink","glimagesink");
>>
>>
>>
>> filtercaps = gst_caps_new_simple
>> ("application/x-rtp","payload", G_TYPE_INT, 96,NULL);
>>
>> g_object_set (G_OBJECT (data->filter), "caps",
>> filtercaps, NULL);
>>
>> g_object_set (G_OBJECT (data->distortion),
>> "location", "/data/data/com.lonestar.groundpi/files/distortion.frag",
>> NULL);
>>
>> gst_caps_unref (filtercaps);
>>
>> g_object_set(G_OBJECT
>> (data->video_sink),"sync",false,NULL);
>>
>>
>>
>> if (!data->pipeline || !data->source || !data->filter
>> || !data->depayloader || !data->decoder || !data->video_sink ||
>> !data->distortion) {
>>
>> g_printerr ("One element could not be created.
>> Exiting.\n");
>>
>> return -1;
>>
>> }
>>
>>
>>
>> gst_bin_add_many (GST_BIN (data->pipeline),
>> data->source, data->filter, data->depayloader, data->decoder,
>> data->video_sink, data->distortion, NULL);
>>
>> gst_element_link_many (data->filter,
>> data->depayloader, data->decoder, NULL);
>>
>> gst_element_link_many (data->distortion,
>> data->video_sink, NULL);
>>
>> g_signal_connect (data->source, "pad-added",
>> G_CALLBACK (cb_new_pad), data->filter);
>>
>> g_signal_connect (data->decoder, "pad-added",
>> G_CALLBACK (cb_new_pad), data->distortion);
>>
>> ….
>>
>>
>>
>>
>>
>>
>>
>>
>>
>> Distortion.frag:
>>
>>
>>
>> #extension GL_ARB_texture_rectangle : enable
>>
>> precision _mediump_ float;
>>
>> varying vec2 v_texcoord;
>>
>> uniform sampler2D _tex_;
>>
>> _const_vec4 _kappa_ = vec4(2.75,1.7,0.5,0.5);
>>
>> _const_float screen_width = 1920.0;
>>
>> _const_float screen_height = 1080.0;
>>
>> _const_float scaleFactor = 0.62;
>>
>> _const_vec2 leftCenter = vec2(0.25, 0.4);
>>
>> _const_vec2 rightCenter = vec2(0.75, 0.4);
>>
>> _const_float separation = -0.025;
>>
>> _const__bool_ stereo_input = false;
>>
>>
>>
>> // Scales input texture coordinates for distortion.
>>
>> vec2 hmdWarp(vec2 LensCenter, vec2 texCoord, vec2 Scale, vec2
>> ScaleIn) {
>>
>> vec2 _theta_ = (texCoord - LensCenter) * ScaleIn;
>>
>> float rSq = theta.x * theta.x + theta.y * theta.y;
>>
>> vec2 _rvector_ = _theta_ * (kappa.x + kappa.y * rSq + kappa.z * rSq *
>> rSq + kappa.w * rSq * rSq * rSq);
>>
>> vec2 _tc_ = LensCenter + Scale * _rvector_;
>>
>> return _tc_;
>>
>> }
>>
>> _bool_validate(vec2 _tc_, _int_ eye) {
>>
>> if ( stereo_input ) {
>>
>> //keep within bounds of texture
>>
>> if ((eye == 1 && (tc.x < 0.0 || tc.x > 0.5)) ||
>>
>> (eye == 0 && (tc.x < 0.5 || tc.x > 1.0)) ||
>>
>> tc.y < 0.0 || tc.y > 1.0) {
>>
>> return false;
>>
>> }
>>
>> } else {
>>
>> if ( tc.x < 0.0 || tc.x > 1.0 ||
>>
>> tc.y < 0.0 || tc.y > 1.0 ) {
>>
>> return false;
>>
>> }
>>
>> }
>>
>> return true;
>>
>> }
>>
>> void main() {
>>
>> float as = float(screen_width / 2.0) / float(screen_height);
>>
>> vec2 Scale = vec2(0.25, as);
>>
>> vec2 ScaleIn = vec2(2.0 * scaleFactor, 1.0 / as * scaleFactor);
>>
>> vec2 texCoord = v_texcoord;
>>
>> vec2 _tc_ = vec2(0);
>>
>> vec4 color = vec4(0);
>>
>> if ( texCoord.x < 0.5 ) {
>>
>> texCoord.x += separation;
>>
>> texCoord = hmdWarp(leftCenter, texCoord, Scale, ScaleIn );
>>
>> if ( !stereo_input ) {
>>
>> texCoord.x *= 2.0;
>>
>> }
>>
>> color = texture2D(_tex_, texCoord);
>>
>> if ( !validate(texCoord, 0) ) {
>>
>> color = vec4(0);
>>
>> }
>>
>> } else {
>>
>> texCoord.x -= separation;
>>
>> texCoord = hmdWarp(rightCenter, texCoord, Scale, ScaleIn);
>>
>> if ( !stereo_input ) {
>>
>> texCoord.x = (texCoord.x - 0.5) * 2.0;
>>
>> }
>>
>> color = texture2D(_tex_, texCoord);
>>
>> if ( !validate(texCoord, 1) ) {
>>
>> color = vec4(0);
>>
>> }
>>
>> }
>>
>> gl_FragColor = color;
>>
>> }
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>>
>> _______________________________________________
>> gstreamer-devel mailing list
>> gstreamer-devel at lists.freedesktop.org
>> http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
> -------------- next part -------------- An HTML attachment was
> scrubbed...
> URL:
> <http://lists.freedesktop.org/archives/gstreamer-devel/attachments/201
> 41115/755b9301/attachment.html>
>
> ------------------------------
>
> Subject: Digest Footer
>
> _______________________________________________
> gstreamer-devel mailing list
> gstreamer-devel at lists.freedesktop.org
> http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
>
>
> ------------------------------
>
> End of gstreamer-devel Digest, Vol 46, Issue 43
> ***********************************************
>
> _______________________________________________
> gstreamer-devel mailing list
> gstreamer-devel at lists.freedesktop.org
> http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
_______________________________________________
gstreamer-devel mailing list
gstreamer-devel at lists.freedesktop.org
http://lists.freedesktop.org/mailman/listinfo/gstreamer-devel
More information about the gstreamer-devel
mailing list