[Mesa-dev] [PATCH 5/6] i965/draw: Do resolves properly for textures used by TXF
Iago Toral
itoral at igalia.com
Thu Jan 11 09:48:21 UTC 2018
On Wed, 2018-01-10 at 11:22 -0800, Jason Ekstrand wrote:
> ---
> src/mesa/drivers/dri/i965/brw_draw.c | 41
> ++++++++++++++++++++++++++++++++++++
> 1 file changed, 41 insertions(+)
>
> diff --git a/src/mesa/drivers/dri/i965/brw_draw.c
> b/src/mesa/drivers/dri/i965/brw_draw.c
> index 4945dec..9fd44e4 100644
> --- a/src/mesa/drivers/dri/i965/brw_draw.c
> +++ b/src/mesa/drivers/dri/i965/brw_draw.c
> @@ -40,6 +40,7 @@
> #include "swrast_setup/swrast_setup.h"
> #include "drivers/common/meta.h"
> #include "util/bitscan.h"
> +#include "util/bitset.h"
>
> #include "brw_blorp.h"
> #include "brw_draw.h"
> @@ -371,6 +372,20 @@ intel_disable_rb_aux_buffer(struct brw_context
> *brw,
> return found;
> }
>
> +static void
> +mark_textures_used_for_txf(BITSET_WORD *used_for_txf,
> + const struct gl_program *prog)
> +{
> + if (!prog)
> + return;
> +
> + unsigned mask = prog->SamplersUsed & prog-
> >info.textures_used_by_txf;
> + while (mask) {
> + int s = u_bit_scan(&mask);
> + BITSET_SET(used_for_txf, prog->SamplerUnits[s]);
> + }
> +}
> +
> /**
> * \brief Resolve buffers before drawing.
> *
> @@ -386,6 +401,18 @@ brw_predraw_resolve_inputs(struct brw_context
> *brw, bool rendering)
> memset(brw->draw_aux_buffer_disabled, 0,
> sizeof(brw->draw_aux_buffer_disabled));
>
> + BITSET_DECLARE(used_for_txf, MAX_COMBINED_TEXTURE_IMAGE_UNITS);
> + memset(used_for_txf, 0, sizeof(used_for_txf));
> + if (rendering) {
> + mark_textures_used_for_txf(used_for_txf, ctx-
> >VertexProgram._Current);
> + mark_textures_used_for_txf(used_for_txf, ctx-
> >TessCtrlProgram._Current);
> + mark_textures_used_for_txf(used_for_txf, ctx-
> >TessEvalProgram._Current);
> + mark_textures_used_for_txf(used_for_txf, ctx-
> >GeometryProgram._Current);
> + mark_textures_used_for_txf(used_for_txf, ctx-
> >FragmentProgram._Current);
> + } else {
> + mark_textures_used_for_txf(used_for_txf, ctx-
> >ComputeProgram._Current);
> + }
> +
> /* Resolve depth buffer and render cache of each enabled texture.
> */
> int maxEnabledUnit = ctx->Texture._MaxEnabledTexImageUnit;
> for (int i = 0; i <= maxEnabledUnit; i++) {
> @@ -422,6 +449,20 @@ brw_predraw_resolve_inputs(struct brw_context
> *brw, bool rendering)
> min_layer, num_layers,
> disable_aux);
>
> + /* If any programs are using it with texelFetch, we may need
> to also do
> + * a prepare with an sRGB format to ensure texelFetch works
> "properly".
> + */
I am not sure I understand this. The only way that txf_format and
view_format can be different is if the texture format is sRGB and the
user has selected GL_SKIP_DECODE_EXT, right? If the user selected that,
it would mean that they do not want sRGB decoded data when sampling,
but I understand that is what they would get, since we are preparing
the MT with the sRGB format in this case. What am I missing?
> + if (BITSET_TEST(used_for_txf, i)) {
> + enum isl_format txf_format =
> + translate_tex_format(brw, tex_obj->_Format,
> GL_DECODE_EXT);
> + if (txf_format != view_format) {
> + intel_miptree_prepare_texture(brw, tex_obj->mt,
> txf_format,
> + min_level, num_levels,
> + min_layer, num_layers,
> + disable_aux);
> + }
> + }
> +
> brw_cache_flush_for_read(brw, tex_obj->mt->bo);
>
> if (tex_obj->base.StencilSampling ||
More information about the mesa-dev
mailing list