[Mesa-dev] [PATCH] draw/llvm: reduce memory usage
Roland Scheidegger
sroland at vmware.com
Wed Apr 23 14:54:10 PDT 2014
Am 23.04.2014 23:10, schrieb Zack Rusin:
> Lets make draw_get_option_use_llvm function available unconditionally
> and use it to avoid useless allocations when LLVM paths are active.
> TGSI machine is never used when we're using LLVM.
>
> Signed-off-by: Zack Rusin <zackr at vmware.com>
> ---
> src/gallium/auxiliary/draw/draw_context.c | 6 ++++++
> src/gallium/auxiliary/draw/draw_context.h | 2 --
> src/gallium/auxiliary/draw/draw_gs.c | 26 ++++++++++++--------------
> src/gallium/auxiliary/draw/draw_vs.c | 11 +++++++----
> src/gallium/auxiliary/draw/draw_vs_exec.c | 2 ++
> 5 files changed, 27 insertions(+), 20 deletions(-)
>
> diff --git a/src/gallium/auxiliary/draw/draw_context.c b/src/gallium/auxiliary/draw/draw_context.c
> index 0a67879..ddc305b 100644
> --- a/src/gallium/auxiliary/draw/draw_context.c
> +++ b/src/gallium/auxiliary/draw/draw_context.c
> @@ -68,6 +68,12 @@ draw_get_option_use_llvm(void)
> }
> return value;
> }
> +#else
> +boolean
> +draw_get_option_use_llvm(void)
> +{
> + return FALSE;
> +}
> #endif
>
>
> diff --git a/src/gallium/auxiliary/draw/draw_context.h b/src/gallium/auxiliary/draw/draw_context.h
> index f114f50..48549fe 100644
> --- a/src/gallium/auxiliary/draw/draw_context.h
> +++ b/src/gallium/auxiliary/draw/draw_context.h
> @@ -288,9 +288,7 @@ draw_get_shader_param(unsigned shader, enum pipe_shader_cap param);
> int
> draw_get_shader_param_no_llvm(unsigned shader, enum pipe_shader_cap param);
>
> -#ifdef HAVE_LLVM
> boolean
> draw_get_option_use_llvm(void);
> -#endif
>
> #endif /* DRAW_CONTEXT_H */
> diff --git a/src/gallium/auxiliary/draw/draw_gs.c b/src/gallium/auxiliary/draw/draw_gs.c
> index 7de5e03..5e503ff 100644
> --- a/src/gallium/auxiliary/draw/draw_gs.c
> +++ b/src/gallium/auxiliary/draw/draw_gs.c
> @@ -674,11 +674,7 @@ int draw_geometry_shader_run(struct draw_geometry_shader *shader,
> void draw_geometry_shader_prepare(struct draw_geometry_shader *shader,
> struct draw_context *draw)
> {
> -#ifdef HAVE_LLVM
> boolean use_llvm = draw_get_option_use_llvm();
> -#else
> - boolean use_llvm = FALSE;
> -#endif
> if (!use_llvm && shader && shader->machine->Tokens != shader->state.tokens) {
> tgsi_exec_machine_bind_shader(shader->machine,
> shader->state.tokens,
> @@ -690,16 +686,18 @@ void draw_geometry_shader_prepare(struct draw_geometry_shader *shader,
> boolean
> draw_gs_init( struct draw_context *draw )
> {
> - draw->gs.tgsi.machine = tgsi_exec_machine_create();
> - if (!draw->gs.tgsi.machine)
> - return FALSE;
> -
> - draw->gs.tgsi.machine->Primitives = align_malloc(
> - MAX_PRIMITIVES * sizeof(struct tgsi_exec_vector), 16);
> - if (!draw->gs.tgsi.machine->Primitives)
> - return FALSE;
> - memset(draw->gs.tgsi.machine->Primitives, 0,
> - MAX_PRIMITIVES * sizeof(struct tgsi_exec_vector));
> + if (!draw_get_option_use_llvm()) {
> + draw->gs.tgsi.machine = tgsi_exec_machine_create();
> + if (!draw->gs.tgsi.machine)
> + return FALSE;
> +
> + draw->gs.tgsi.machine->Primitives = align_malloc(
> + MAX_PRIMITIVES * sizeof(struct tgsi_exec_vector), 16);
> + if (!draw->gs.tgsi.machine->Primitives)
> + return FALSE;
> + memset(draw->gs.tgsi.machine->Primitives, 0,
> + MAX_PRIMITIVES * sizeof(struct tgsi_exec_vector));
> + }
>
> return TRUE;
> }
> diff --git a/src/gallium/auxiliary/draw/draw_vs.c b/src/gallium/auxiliary/draw/draw_vs.c
> index 55cbeb9..8bb9a7f 100644
> --- a/src/gallium/auxiliary/draw/draw_vs.c
> +++ b/src/gallium/auxiliary/draw/draw_vs.c
> @@ -149,9 +149,11 @@ draw_vs_init( struct draw_context *draw )
> {
> draw->dump_vs = debug_get_option_gallium_dump_vs();
>
> - draw->vs.tgsi.machine = tgsi_exec_machine_create();
> - if (!draw->vs.tgsi.machine)
> - return FALSE;
> + if (!draw_get_option_use_llvm()) {
> + draw->vs.tgsi.machine = tgsi_exec_machine_create();
> + if (!draw->vs.tgsi.machine)
> + return FALSE;
> + }
>
> draw->vs.emit_cache = translate_cache_create();
> if (!draw->vs.emit_cache)
> @@ -173,7 +175,8 @@ draw_vs_destroy( struct draw_context *draw )
> if (draw->vs.emit_cache)
> translate_cache_destroy(draw->vs.emit_cache);
>
> - tgsi_exec_machine_destroy(draw->vs.tgsi.machine);
> + if (draw_get_option_use_llvm())
> + tgsi_exec_machine_destroy(draw->vs.tgsi.machine);
> }
>
>
> diff --git a/src/gallium/auxiliary/draw/draw_vs_exec.c b/src/gallium/auxiliary/draw/draw_vs_exec.c
> index 133b116..6a18d8c 100644
> --- a/src/gallium/auxiliary/draw/draw_vs_exec.c
> +++ b/src/gallium/auxiliary/draw/draw_vs_exec.c
> @@ -63,6 +63,7 @@ vs_exec_prepare( struct draw_vertex_shader *shader,
> {
> struct exec_vertex_shader *evs = exec_vertex_shader(shader);
>
> + debug_assert(!draw_get_option_use_llvm());
> /* Specify the vertex program to interpret/execute.
> * Avoid rebinding when possible.
> */
> @@ -96,6 +97,7 @@ vs_exec_run_linear( struct draw_vertex_shader *shader,
> unsigned slot;
> boolean clamp_vertex_color = shader->draw->rasterizer->clamp_vertex_color;
>
> + debug_assert(!draw_get_option_use_llvm());
> tgsi_exec_set_constant_buffers(machine, PIPE_MAX_CONSTANT_BUFFERS,
> constants, const_size);
>
>
With the patch you mentioned, looks good to me.
Reviewed-by: Roland Scheidegger <sroland at vmware.com>
More information about the mesa-dev
mailing list