[Piglit] [PATCH] fbo-blending-formats: Correct precision of signed normalized types.

Brian Paul brianp at vmware.com
Thu Dec 6 07:04:55 PST 2012


On 12/06/2012 06:21 AM, jfonseca at vmware.com wrote:
> From: José Fonseca<jfonseca at vmware.com>
>
> Signed types have less precision due to the sign bit.
>
> Without this it is impossible to devise an efficient way of lerping
> signed normalized types that makes this test happy -- it would require
> 32bit float/int multiply and divides to get exact results.
> ---
>   tests/fbo/fbo-blending-formats.c |   46 ++++++++++++++++++++++++++++----------
>   1 file changed, 34 insertions(+), 12 deletions(-)
>
> diff --git a/tests/fbo/fbo-blending-formats.c b/tests/fbo/fbo-blending-formats.c
> index 3271429..7bc6686 100644
> --- a/tests/fbo/fbo-blending-formats.c
> +++ b/tests/fbo/fbo-blending-formats.c
> @@ -61,6 +61,22 @@ static void blend(const float *rect, const float *src, const float *dst, const f
>   						  a>= 0.333 ? 0.333f : 0.0f) : \
>   	 (a))
>
> +static int
> +get_texture_bits(GLenum target, GLenum size_enum, GLenum type_enum) {

Opening { on next line.


> +	GLint size = 0;
> +	glGetTexLevelParameteriv(target, 0, size_enum,&size);
> +	if (!size) {
> +		return size;
> +	}
> +	GLint type = GL_NONE;

Let's move this decl before the code for MSVC.


> +	glGetTexLevelParameteriv(target, 0, type_enum,&type);
> +	if (type == GL_SIGNED_NORMALIZED) {
> +		/* One bit is lost for the sign */
> +		size -= 1;
> +	}
> +	return size;
> +}
> +
>   static enum piglit_result test_format(const struct format_desc *format)
>   {
>   	GLboolean pass = GL_TRUE;
> @@ -128,18 +144,24 @@ static enum piglit_result test_format(const struct format_desc *format)
>   		     piglit_width, piglit_height, 0,
>   		     GL_RGBA, GL_FLOAT, NULL);
>
> -	glGetTexLevelParameteriv(GL_TEXTURE_2D, 0,
> -				 GL_TEXTURE_LUMINANCE_SIZE,&l);
> -	glGetTexLevelParameteriv(GL_TEXTURE_2D, 0,
> -				 GL_TEXTURE_ALPHA_SIZE,&a);
> -	glGetTexLevelParameteriv(GL_TEXTURE_2D, 0,
> -				 GL_TEXTURE_INTENSITY_SIZE,&i);
> -	glGetTexLevelParameteriv(GL_TEXTURE_2D, 0,
> -				 GL_TEXTURE_RED_SIZE,&r);
> -	glGetTexLevelParameteriv(GL_TEXTURE_2D, 0,
> -				 GL_TEXTURE_GREEN_SIZE,&g);
> -	glGetTexLevelParameteriv(GL_TEXTURE_2D, 0,
> -				 GL_TEXTURE_BLUE_SIZE,&b);
> +	l = get_texture_bits(GL_TEXTURE_2D,
> +			     GL_TEXTURE_LUMINANCE_SIZE,
> +			     GL_TEXTURE_LUMINANCE_TYPE);
> +	a = get_texture_bits(GL_TEXTURE_2D,
> +			     GL_TEXTURE_ALPHA_SIZE,
> +			     GL_TEXTURE_ALPHA_TYPE);
> +	i = get_texture_bits(GL_TEXTURE_2D,
> +			     GL_TEXTURE_INTENSITY_SIZE,
> +			     GL_TEXTURE_INTENSITY_TYPE);
> +	r = get_texture_bits(GL_TEXTURE_2D,
> +			     GL_TEXTURE_RED_SIZE,
> +			     GL_TEXTURE_RED_TYPE);
> +	g = get_texture_bits(GL_TEXTURE_2D,
> +			     GL_TEXTURE_GREEN_SIZE,
> +			     GL_TEXTURE_GREEN_TYPE);
> +	b = get_texture_bits(GL_TEXTURE_2D,
> +			     GL_TEXTURE_BLUE_SIZE,
> +			     GL_TEXTURE_BLUE_TYPE);
>
>   	/* Compute expected result colors when reading back from a texture/FBO */
>           if (i) {


Reviewed-by: Brian Paul <brianp at vmware.com>


More information about the Piglit mailing list