[Mesa-dev] [PATCH v3 06/12] anv: modify the internal concept of format to express multiple planes

Jason Ekstrand jason at jlekstrand.net
Wed Oct 4 21:16:12 UTC 2017


All my comments below are on chunks that are no longer needed now that
anv_get_isl_format hasn't had it's name changed.  Take or leave them as
your personal level of pedantry dictates. :)

On Wed, Oct 4, 2017 at 10:34 AM, Lionel Landwerlin <
lionel.g.landwerlin at intel.com> wrote:

> A given Vulkan format can now be decomposed into a set of planes. We
> now use 'struct anv_format_plane' to represent the format of those
> planes.
>
> v2: by Jason
>     Rename anv_get_plane_format() to anv_get_format_plane()
>     Don't rename anv_get_isl_format()
>     Replace ds_fmt() by fmt2()
>     Introduce fmt_unsupported()
>
> Signed-off-by: Lionel Landwerlin <lionel.g.landwerlin at intel.com>
> ---
>  src/intel/vulkan/anv_blorp.c     |  18 +-
>  src/intel/vulkan/anv_formats.c   | 512 +++++++++++++++++++++---------
> ---------
>  src/intel/vulkan/anv_image.c     |  12 +-
>  src/intel/vulkan/anv_private.h   |  54 ++++-
>  src/intel/vulkan/genX_pipeline.c |   7 +-
>  5 files changed, 339 insertions(+), 264 deletions(-)
>
> diff --git a/src/intel/vulkan/anv_blorp.c b/src/intel/vulkan/anv_blorp.c
> index 8dead1d87a8..187042c71cf 100644
> --- a/src/intel/vulkan/anv_blorp.c
> +++ b/src/intel/vulkan/anv_blorp.c
> @@ -459,12 +459,12 @@ void anv_CmdBlitImage(
>        get_blorp_surf_for_anv_image(dst_image, dst_res->aspectMask,
>                                     dst_image->aux_usage, &dst);
>
> -      struct anv_format src_format =
> -         anv_get_format(&cmd_buffer->device->info, src_image->vk_format,
> -                        src_res->aspectMask, src_image->tiling);
> -      struct anv_format dst_format =
> -         anv_get_format(&cmd_buffer->device->info, dst_image->vk_format,
> -                        dst_res->aspectMask, dst_image->tiling);
> +      struct anv_format_plane src_format =
> +         anv_get_format_plane(&cmd_buffer->device->info,
> src_image->vk_format,
> +                              src_res->aspectMask, src_image->tiling);
> +      struct anv_format_plane dst_format =
> +         anv_get_format_plane(&cmd_buffer->device->info,
> dst_image->vk_format,
> +                              dst_res->aspectMask, dst_image->tiling);
>
>        unsigned dst_start, dst_end;
>        if (dst_image->type == VK_IMAGE_TYPE_3D) {
> @@ -758,9 +758,9 @@ void anv_CmdClearColorImage(
>
>        assert(pRanges[r].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
>
> -      struct anv_format src_format =
> -         anv_get_format(&cmd_buffer->device->info, image->vk_format,
> -                        VK_IMAGE_ASPECT_COLOR_BIT, image->tiling);
> +      struct anv_format_plane src_format =
> +         anv_get_format_plane(&cmd_buffer->device->info,
> image->vk_format,
> +                              VK_IMAGE_ASPECT_COLOR_BIT, image->tiling);
>
>        unsigned base_layer = pRanges[r].baseArrayLayer;
>        unsigned layer_count = anv_get_layerCount(image, &pRanges[r]);
> diff --git a/src/intel/vulkan/anv_formats.c b/src/intel/vulkan/anv_
> formats.c
> index 9db80ba14e3..e623b4f6324 100644
> --- a/src/intel/vulkan/anv_formats.c
> +++ b/src/intel/vulkan/anv_formats.c
> @@ -44,14 +44,40 @@
>  #define BGRA _ISL_SWIZZLE(BLUE, GREEN, RED, ALPHA)
>  #define RGB1 _ISL_SWIZZLE(RED, GREEN, BLUE, ONE)
>
> -#define swiz_fmt(__vk_fmt, __hw_fmt, __swizzle)     \
> +#define _fmt(__hw_fmt, __swizzle) \
> +   { .isl_format = __hw_fmt, \
> +     .swizzle = __swizzle }
> +
> +#define swiz_fmt1(__vk_fmt, __hw_fmt, __swizzle) \
>     [VK_ENUM_OFFSET(__vk_fmt)] = { \
> -      .isl_format = __hw_fmt, \
> -      .swizzle = __swizzle, \
> +      .planes = { \
> +          { .isl_format = __hw_fmt, .swizzle = __swizzle },     \
> +      }, \
> +      .n_planes = 1, \
>     }
>
> -#define fmt(__vk_fmt, __hw_fmt) \
> -   swiz_fmt(__vk_fmt, __hw_fmt, RGBA)
> +#define fmt1(__vk_fmt, __hw_fmt) \
> +   swiz_fmt1(__vk_fmt, __hw_fmt, RGBA)
> +
> +#define fmt2(__vk_fmt, __fmt1, __fmt2) \
> +   [VK_ENUM_OFFSET(__vk_fmt)] = { \
> +      .planes = { \
> +         { .isl_format = __fmt1, \
> +           .swizzle = RGBA,       \
> +         }, \
> +         { .isl_format = __fmt2, \
> +           .swizzle = RGBA,       \
> +         }, \
> +      }, \
> +      .n_planes = 2, \
> +   }
> +
> +#define fmt_unsupported(__vk_fmt) \
> +   [VK_ENUM_OFFSET(__vk_fmt)] = { \
> +      .planes = { \
> +         { .isl_format = ISL_FORMAT_UNSUPPORTED, }, \
> +      }, \
> +   }
>
>  /* HINT: For array formats, the ISL name should match the VK name.  For
>   * packed formats, they should have the channels in reverse order from
> each
> @@ -59,196 +85,199 @@
>   * bspec) names are in LSB -> MSB order while VK formats are MSB -> LSB.
>   */
>  static const struct anv_format main_formats[] = {
> -   fmt(VK_FORMAT_UNDEFINED,               ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_R4G4_UNORM_PACK8,        ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_R4G4B4A4_UNORM_PACK16,   ISL_FORMAT_A4B4G4R4_UNORM),
> -   swiz_fmt(VK_FORMAT_B4G4R4A4_UNORM_PACK16,
>  ISL_FORMAT_A4B4G4R4_UNORM,  BGRA),
> -   fmt(VK_FORMAT_R5G6B5_UNORM_PACK16,     ISL_FORMAT_B5G6R5_UNORM),
> -   swiz_fmt(VK_FORMAT_B5G6R5_UNORM_PACK16,     ISL_FORMAT_B5G6R5_UNORM,
> BGRA),
> -   fmt(VK_FORMAT_R5G5B5A1_UNORM_PACK16,   ISL_FORMAT_A1B5G5R5_UNORM),
> -   fmt(VK_FORMAT_B5G5R5A1_UNORM_PACK16,   ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_A1R5G5B5_UNORM_PACK16,   ISL_FORMAT_B5G5R5A1_UNORM),
> -   fmt(VK_FORMAT_R8_UNORM,                ISL_FORMAT_R8_UNORM),
> -   fmt(VK_FORMAT_R8_SNORM,                ISL_FORMAT_R8_SNORM),
> -   fmt(VK_FORMAT_R8_USCALED,              ISL_FORMAT_R8_USCALED),
> -   fmt(VK_FORMAT_R8_SSCALED,              ISL_FORMAT_R8_SSCALED),
> -   fmt(VK_FORMAT_R8_UINT,                 ISL_FORMAT_R8_UINT),
> -   fmt(VK_FORMAT_R8_SINT,                 ISL_FORMAT_R8_SINT),
> -   swiz_fmt(VK_FORMAT_R8_SRGB,            ISL_FORMAT_L8_UNORM_SRGB,
> -                                          _ISL_SWIZZLE(RED, ZERO, ZERO,
> ONE)),
> -   fmt(VK_FORMAT_R8G8_UNORM,              ISL_FORMAT_R8G8_UNORM),
> -   fmt(VK_FORMAT_R8G8_SNORM,              ISL_FORMAT_R8G8_SNORM),
> -   fmt(VK_FORMAT_R8G8_USCALED,            ISL_FORMAT_R8G8_USCALED),
> -   fmt(VK_FORMAT_R8G8_SSCALED,            ISL_FORMAT_R8G8_SSCALED),
> -   fmt(VK_FORMAT_R8G8_UINT,               ISL_FORMAT_R8G8_UINT),
> -   fmt(VK_FORMAT_R8G8_SINT,               ISL_FORMAT_R8G8_SINT),
> -   fmt(VK_FORMAT_R8G8_SRGB,               ISL_FORMAT_UNSUPPORTED), /*
> L8A8_UNORM_SRGB */
> -   fmt(VK_FORMAT_R8G8B8_UNORM,            ISL_FORMAT_R8G8B8_UNORM),
> -   fmt(VK_FORMAT_R8G8B8_SNORM,            ISL_FORMAT_R8G8B8_SNORM),
> -   fmt(VK_FORMAT_R8G8B8_USCALED,          ISL_FORMAT_R8G8B8_USCALED),
> -   fmt(VK_FORMAT_R8G8B8_SSCALED,          ISL_FORMAT_R8G8B8_SSCALED),
> -   fmt(VK_FORMAT_R8G8B8_UINT,             ISL_FORMAT_R8G8B8_UINT),
> -   fmt(VK_FORMAT_R8G8B8_SINT,             ISL_FORMAT_R8G8B8_SINT),
> -   fmt(VK_FORMAT_R8G8B8_SRGB,             ISL_FORMAT_R8G8B8_UNORM_SRGB),
> -   fmt(VK_FORMAT_R8G8B8A8_UNORM,          ISL_FORMAT_R8G8B8A8_UNORM),
> -   fmt(VK_FORMAT_R8G8B8A8_SNORM,          ISL_FORMAT_R8G8B8A8_SNORM),
> -   fmt(VK_FORMAT_R8G8B8A8_USCALED,        ISL_FORMAT_R8G8B8A8_USCALED),
> -   fmt(VK_FORMAT_R8G8B8A8_SSCALED,        ISL_FORMAT_R8G8B8A8_SSCALED),
> -   fmt(VK_FORMAT_R8G8B8A8_UINT,           ISL_FORMAT_R8G8B8A8_UINT),
> -   fmt(VK_FORMAT_R8G8B8A8_SINT,           ISL_FORMAT_R8G8B8A8_SINT),
> -   fmt(VK_FORMAT_R8G8B8A8_SRGB,           ISL_FORMAT_R8G8B8A8_UNORM_
> SRGB),
> -   fmt(VK_FORMAT_A8B8G8R8_UNORM_PACK32,   ISL_FORMAT_R8G8B8A8_UNORM),
> -   fmt(VK_FORMAT_A8B8G8R8_SNORM_PACK32,   ISL_FORMAT_R8G8B8A8_SNORM),
> -   fmt(VK_FORMAT_A8B8G8R8_USCALED_PACK32, ISL_FORMAT_R8G8B8A8_USCALED),
> -   fmt(VK_FORMAT_A8B8G8R8_SSCALED_PACK32, ISL_FORMAT_R8G8B8A8_SSCALED),
> -   fmt(VK_FORMAT_A8B8G8R8_UINT_PACK32,    ISL_FORMAT_R8G8B8A8_UINT),
> -   fmt(VK_FORMAT_A8B8G8R8_SINT_PACK32,    ISL_FORMAT_R8G8B8A8_SINT),
> -   fmt(VK_FORMAT_A8B8G8R8_SRGB_PACK32,    ISL_FORMAT_R8G8B8A8_UNORM_
> SRGB),
> -   fmt(VK_FORMAT_A2R10G10B10_UNORM_PACK32, ISL_FORMAT_B10G10R10A2_UNORM),
> -   fmt(VK_FORMAT_A2R10G10B10_SNORM_PACK32, ISL_FORMAT_B10G10R10A2_SNORM),
> -   fmt(VK_FORMAT_A2R10G10B10_USCALED_PACK32, ISL_FORMAT_B10G10R10A2_
> USCALED),
> -   fmt(VK_FORMAT_A2R10G10B10_SSCALED_PACK32, ISL_FORMAT_B10G10R10A2_
> SSCALED),
> -   fmt(VK_FORMAT_A2R10G10B10_UINT_PACK32, ISL_FORMAT_B10G10R10A2_UINT),
> -   fmt(VK_FORMAT_A2R10G10B10_SINT_PACK32, ISL_FORMAT_B10G10R10A2_SINT),
> -   fmt(VK_FORMAT_A2B10G10R10_UNORM_PACK32, ISL_FORMAT_R10G10B10A2_UNORM),
> -   fmt(VK_FORMAT_A2B10G10R10_SNORM_PACK32, ISL_FORMAT_R10G10B10A2_SNORM),
> -   fmt(VK_FORMAT_A2B10G10R10_USCALED_PACK32, ISL_FORMAT_R10G10B10A2_
> USCALED),
> -   fmt(VK_FORMAT_A2B10G10R10_SSCALED_PACK32, ISL_FORMAT_R10G10B10A2_
> SSCALED),
> -   fmt(VK_FORMAT_A2B10G10R10_UINT_PACK32, ISL_FORMAT_R10G10B10A2_UINT),
> -   fmt(VK_FORMAT_A2B10G10R10_SINT_PACK32, ISL_FORMAT_R10G10B10A2_SINT),
> -   fmt(VK_FORMAT_R16_UNORM,               ISL_FORMAT_R16_UNORM),
> -   fmt(VK_FORMAT_R16_SNORM,               ISL_FORMAT_R16_SNORM),
> -   fmt(VK_FORMAT_R16_USCALED,             ISL_FORMAT_R16_USCALED),
> -   fmt(VK_FORMAT_R16_SSCALED,             ISL_FORMAT_R16_SSCALED),
> -   fmt(VK_FORMAT_R16_UINT,                ISL_FORMAT_R16_UINT),
> -   fmt(VK_FORMAT_R16_SINT,                ISL_FORMAT_R16_SINT),
> -   fmt(VK_FORMAT_R16_SFLOAT,              ISL_FORMAT_R16_FLOAT),
> -   fmt(VK_FORMAT_R16G16_UNORM,            ISL_FORMAT_R16G16_UNORM),
> -   fmt(VK_FORMAT_R16G16_SNORM,            ISL_FORMAT_R16G16_SNORM),
> -   fmt(VK_FORMAT_R16G16_USCALED,          ISL_FORMAT_R16G16_USCALED),
> -   fmt(VK_FORMAT_R16G16_SSCALED,          ISL_FORMAT_R16G16_SSCALED),
> -   fmt(VK_FORMAT_R16G16_UINT,             ISL_FORMAT_R16G16_UINT),
> -   fmt(VK_FORMAT_R16G16_SINT,             ISL_FORMAT_R16G16_SINT),
> -   fmt(VK_FORMAT_R16G16_SFLOAT,           ISL_FORMAT_R16G16_FLOAT),
> -   fmt(VK_FORMAT_R16G16B16_UNORM,         ISL_FORMAT_R16G16B16_UNORM),
> -   fmt(VK_FORMAT_R16G16B16_SNORM,         ISL_FORMAT_R16G16B16_SNORM),
> -   fmt(VK_FORMAT_R16G16B16_USCALED,       ISL_FORMAT_R16G16B16_USCALED),
> -   fmt(VK_FORMAT_R16G16B16_SSCALED,       ISL_FORMAT_R16G16B16_SSCALED),
> -   fmt(VK_FORMAT_R16G16B16_UINT,          ISL_FORMAT_R16G16B16_UINT),
> -   fmt(VK_FORMAT_R16G16B16_SINT,          ISL_FORMAT_R16G16B16_SINT),
> -   fmt(VK_FORMAT_R16G16B16_SFLOAT,        ISL_FORMAT_R16G16B16_FLOAT),
> -   fmt(VK_FORMAT_R16G16B16A16_UNORM,      ISL_FORMAT_R16G16B16A16_UNORM),
> -   fmt(VK_FORMAT_R16G16B16A16_SNORM,      ISL_FORMAT_R16G16B16A16_SNORM),
> -   fmt(VK_FORMAT_R16G16B16A16_USCALED,    ISL_FORMAT_R16G16B16A16_
> USCALED),
> -   fmt(VK_FORMAT_R16G16B16A16_SSCALED,    ISL_FORMAT_R16G16B16A16_
> SSCALED),
> -   fmt(VK_FORMAT_R16G16B16A16_UINT,       ISL_FORMAT_R16G16B16A16_UINT),
> -   fmt(VK_FORMAT_R16G16B16A16_SINT,       ISL_FORMAT_R16G16B16A16_SINT),
> -   fmt(VK_FORMAT_R16G16B16A16_SFLOAT,     ISL_FORMAT_R16G16B16A16_FLOAT),
> -   fmt(VK_FORMAT_R32_UINT,                ISL_FORMAT_R32_UINT),
> -   fmt(VK_FORMAT_R32_SINT,                ISL_FORMAT_R32_SINT),
> -   fmt(VK_FORMAT_R32_SFLOAT,              ISL_FORMAT_R32_FLOAT),
> -   fmt(VK_FORMAT_R32G32_UINT,             ISL_FORMAT_R32G32_UINT),
> -   fmt(VK_FORMAT_R32G32_SINT,             ISL_FORMAT_R32G32_SINT),
> -   fmt(VK_FORMAT_R32G32_SFLOAT,           ISL_FORMAT_R32G32_FLOAT),
> -   fmt(VK_FORMAT_R32G32B32_UINT,          ISL_FORMAT_R32G32B32_UINT),
> -   fmt(VK_FORMAT_R32G32B32_SINT,          ISL_FORMAT_R32G32B32_SINT),
> -   fmt(VK_FORMAT_R32G32B32_SFLOAT,        ISL_FORMAT_R32G32B32_FLOAT),
> -   fmt(VK_FORMAT_R32G32B32A32_UINT,       ISL_FORMAT_R32G32B32A32_UINT),
> -   fmt(VK_FORMAT_R32G32B32A32_SINT,       ISL_FORMAT_R32G32B32A32_SINT),
> -   fmt(VK_FORMAT_R32G32B32A32_SFLOAT,     ISL_FORMAT_R32G32B32A32_FLOAT),
> -   fmt(VK_FORMAT_R64_UINT,                ISL_FORMAT_R64_PASSTHRU),
> -   fmt(VK_FORMAT_R64_SINT,                ISL_FORMAT_R64_PASSTHRU),
> -   fmt(VK_FORMAT_R64_SFLOAT,              ISL_FORMAT_R64_PASSTHRU),
> -   fmt(VK_FORMAT_R64G64_UINT,             ISL_FORMAT_R64G64_PASSTHRU),
> -   fmt(VK_FORMAT_R64G64_SINT,             ISL_FORMAT_R64G64_PASSTHRU),
> -   fmt(VK_FORMAT_R64G64_SFLOAT,           ISL_FORMAT_R64G64_PASSTHRU),
> -   fmt(VK_FORMAT_R64G64B64_UINT,          ISL_FORMAT_R64G64B64_PASSTHRU),
> -   fmt(VK_FORMAT_R64G64B64_SINT,          ISL_FORMAT_R64G64B64_PASSTHRU),
> -   fmt(VK_FORMAT_R64G64B64_SFLOAT,        ISL_FORMAT_R64G64B64_PASSTHRU),
> -   fmt(VK_FORMAT_R64G64B64A64_UINT,       ISL_FORMAT_R64G64B64A64_
> PASSTHRU),
> -   fmt(VK_FORMAT_R64G64B64A64_SINT,       ISL_FORMAT_R64G64B64A64_
> PASSTHRU),
> -   fmt(VK_FORMAT_R64G64B64A64_SFLOAT,     ISL_FORMAT_R64G64B64A64_
> PASSTHRU),
> -   fmt(VK_FORMAT_B10G11R11_UFLOAT_PACK32, ISL_FORMAT_R11G11B10_FLOAT),
> -   fmt(VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,  ISL_FORMAT_R9G9B9E5_SHAREDEXP),
> -
> -   fmt(VK_FORMAT_D16_UNORM,               ISL_FORMAT_R16_UNORM),
> -   fmt(VK_FORMAT_X8_D24_UNORM_PACK32,     ISL_FORMAT_R24_UNORM_X8_
> TYPELESS),
> -   fmt(VK_FORMAT_D32_SFLOAT,              ISL_FORMAT_R32_FLOAT),
> -   fmt(VK_FORMAT_S8_UINT,                 ISL_FORMAT_R8_UINT),
> -   fmt(VK_FORMAT_D16_UNORM_S8_UINT,       ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_D24_UNORM_S8_UINT,       ISL_FORMAT_R24_UNORM_X8_
> TYPELESS),
> -   fmt(VK_FORMAT_D32_SFLOAT_S8_UINT,      ISL_FORMAT_R32_FLOAT),
> -
> -   swiz_fmt(VK_FORMAT_BC1_RGB_UNORM_BLOCK,     ISL_FORMAT_BC1_UNORM,
> RGB1),
> -   swiz_fmt(VK_FORMAT_BC1_RGB_SRGB_BLOCK,
> ISL_FORMAT_BC1_UNORM_SRGB, RGB1),
> -   fmt(VK_FORMAT_BC1_RGBA_UNORM_BLOCK,    ISL_FORMAT_BC1_UNORM),
> -   fmt(VK_FORMAT_BC1_RGBA_SRGB_BLOCK,     ISL_FORMAT_BC1_UNORM_SRGB),
> -   fmt(VK_FORMAT_BC2_UNORM_BLOCK,         ISL_FORMAT_BC2_UNORM),
> -   fmt(VK_FORMAT_BC2_SRGB_BLOCK,          ISL_FORMAT_BC2_UNORM_SRGB),
> -   fmt(VK_FORMAT_BC3_UNORM_BLOCK,         ISL_FORMAT_BC3_UNORM),
> -   fmt(VK_FORMAT_BC3_SRGB_BLOCK,          ISL_FORMAT_BC3_UNORM_SRGB),
> -   fmt(VK_FORMAT_BC4_UNORM_BLOCK,         ISL_FORMAT_BC4_UNORM),
> -   fmt(VK_FORMAT_BC4_SNORM_BLOCK,         ISL_FORMAT_BC4_SNORM),
> -   fmt(VK_FORMAT_BC5_UNORM_BLOCK,         ISL_FORMAT_BC5_UNORM),
> -   fmt(VK_FORMAT_BC5_SNORM_BLOCK,         ISL_FORMAT_BC5_SNORM),
> -   fmt(VK_FORMAT_BC6H_UFLOAT_BLOCK,       ISL_FORMAT_BC6H_UF16),
> -   fmt(VK_FORMAT_BC6H_SFLOAT_BLOCK,       ISL_FORMAT_BC6H_SF16),
> -   fmt(VK_FORMAT_BC7_UNORM_BLOCK,         ISL_FORMAT_BC7_UNORM),
> -   fmt(VK_FORMAT_BC7_SRGB_BLOCK,          ISL_FORMAT_BC7_UNORM_SRGB),
> -   fmt(VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, ISL_FORMAT_ETC2_RGB8),
> -   fmt(VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,  ISL_FORMAT_ETC2_SRGB8),
> -   fmt(VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, ISL_FORMAT_ETC2_RGB8_PTA),
> -   fmt(VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, ISL_FORMAT_ETC2_SRGB8_PTA),
> -   fmt(VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, ISL_FORMAT_ETC2_EAC_RGBA8),
> -   fmt(VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, ISL_FORMAT_ETC2_EAC_SRGB8_A8),
> -   fmt(VK_FORMAT_EAC_R11_UNORM_BLOCK,     ISL_FORMAT_EAC_R11),
> -   fmt(VK_FORMAT_EAC_R11_SNORM_BLOCK,     ISL_FORMAT_EAC_SIGNED_R11),
> -   fmt(VK_FORMAT_EAC_R11G11_UNORM_BLOCK,  ISL_FORMAT_EAC_RG11),
> -   fmt(VK_FORMAT_EAC_R11G11_SNORM_BLOCK,  ISL_FORMAT_EAC_SIGNED_RG11),
> -   fmt(VK_FORMAT_ASTC_4x4_SRGB_BLOCK,     ISL_FORMAT_ASTC_LDR_2D_4X4_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_5x4_SRGB_BLOCK,     ISL_FORMAT_ASTC_LDR_2D_5X4_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_5x5_SRGB_BLOCK,     ISL_FORMAT_ASTC_LDR_2D_5X5_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_6x5_SRGB_BLOCK,     ISL_FORMAT_ASTC_LDR_2D_6X5_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_6x6_SRGB_BLOCK,     ISL_FORMAT_ASTC_LDR_2D_6X6_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_8x5_SRGB_BLOCK,     ISL_FORMAT_ASTC_LDR_2D_8X5_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_8x6_SRGB_BLOCK,     ISL_FORMAT_ASTC_LDR_2D_8X6_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_8x8_SRGB_BLOCK,     ISL_FORMAT_ASTC_LDR_2D_8X8_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_10x5_SRGB_BLOCK,    ISL_FORMAT_ASTC_LDR_2D_10X5_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_10x6_SRGB_BLOCK,    ISL_FORMAT_ASTC_LDR_2D_10X6_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_10x8_SRGB_BLOCK,    ISL_FORMAT_ASTC_LDR_2D_10X8_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_10x10_SRGB_BLOCK,   ISL_FORMAT_ASTC_LDR_2D_10X10_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_12x10_SRGB_BLOCK,   ISL_FORMAT_ASTC_LDR_2D_12X10_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_12x12_SRGB_BLOCK,   ISL_FORMAT_ASTC_LDR_2D_12X12_
> U8SRGB),
> -   fmt(VK_FORMAT_ASTC_4x4_UNORM_BLOCK,    ISL_FORMAT_ASTC_LDR_2D_4X4_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_5x4_UNORM_BLOCK,    ISL_FORMAT_ASTC_LDR_2D_5X4_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_5x5_UNORM_BLOCK,    ISL_FORMAT_ASTC_LDR_2D_5X5_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_6x5_UNORM_BLOCK,    ISL_FORMAT_ASTC_LDR_2D_6X5_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_6x6_UNORM_BLOCK,    ISL_FORMAT_ASTC_LDR_2D_6X6_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_8x5_UNORM_BLOCK,    ISL_FORMAT_ASTC_LDR_2D_8X5_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_8x6_UNORM_BLOCK,    ISL_FORMAT_ASTC_LDR_2D_8X6_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_8x8_UNORM_BLOCK,    ISL_FORMAT_ASTC_LDR_2D_8X8_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_10x5_UNORM_BLOCK,   ISL_FORMAT_ASTC_LDR_2D_10X5_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_10x6_UNORM_BLOCK,   ISL_FORMAT_ASTC_LDR_2D_10X6_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_10x8_UNORM_BLOCK,   ISL_FORMAT_ASTC_LDR_2D_10X8_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_10x10_UNORM_BLOCK,  ISL_FORMAT_ASTC_LDR_2D_10X10_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_12x10_UNORM_BLOCK,  ISL_FORMAT_ASTC_LDR_2D_12X10_
> FLT16),
> -   fmt(VK_FORMAT_ASTC_12x12_UNORM_BLOCK,  ISL_FORMAT_ASTC_LDR_2D_12X12_
> FLT16),
> -   fmt(VK_FORMAT_B8G8R8_UNORM,            ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_B8G8R8_SNORM,            ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_B8G8R8_USCALED,          ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_B8G8R8_SSCALED,          ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_B8G8R8_UINT,             ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_B8G8R8_SINT,             ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_B8G8R8_SRGB,             ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_B8G8R8A8_UNORM,          ISL_FORMAT_B8G8R8A8_UNORM),
> -   fmt(VK_FORMAT_B8G8R8A8_SNORM,          ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_B8G8R8A8_USCALED,        ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_B8G8R8A8_SSCALED,        ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_B8G8R8A8_UINT,           ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_B8G8R8A8_SINT,           ISL_FORMAT_UNSUPPORTED),
> -   fmt(VK_FORMAT_B8G8R8A8_SRGB,           ISL_FORMAT_B8G8R8A8_UNORM_
> SRGB),
> +   fmt_unsupported(VK_FORMAT_UNDEFINED),
> +   fmt_unsupported(VK_FORMAT_R4G4_UNORM_PACK8),
> +   fmt1(VK_FORMAT_R4G4B4A4_UNORM_PACK16,
>  ISL_FORMAT_A4B4G4R4_UNORM),
> +   swiz_fmt1(VK_FORMAT_B4G4R4A4_UNORM_PACK16,
> ISL_FORMAT_A4B4G4R4_UNORM,  BGRA),
> +   fmt1(VK_FORMAT_R5G6B5_UNORM_PACK16,
>  ISL_FORMAT_B5G6R5_UNORM),
> +   swiz_fmt1(VK_FORMAT_B5G6R5_UNORM_PACK16,
> ISL_FORMAT_B5G6R5_UNORM, BGRA),
> +   fmt1(VK_FORMAT_R5G5B5A1_UNORM_PACK16,
>  ISL_FORMAT_A1B5G5R5_UNORM),
> +   fmt_unsupported(VK_FORMAT_B5G5R5A1_UNORM_PACK16),
> +   fmt1(VK_FORMAT_A1R5G5B5_UNORM_PACK16,
>  ISL_FORMAT_B5G5R5A1_UNORM),
> +   fmt1(VK_FORMAT_R8_UNORM,                          ISL_FORMAT_R8_UNORM),
> +   fmt1(VK_FORMAT_R8_SNORM,                          ISL_FORMAT_R8_SNORM),
> +   fmt1(VK_FORMAT_R8_USCALED,
> ISL_FORMAT_R8_USCALED),
> +   fmt1(VK_FORMAT_R8_SSCALED,
> ISL_FORMAT_R8_SSCALED),
> +   fmt1(VK_FORMAT_R8_UINT,                           ISL_FORMAT_R8_UINT),
> +   fmt1(VK_FORMAT_R8_SINT,                           ISL_FORMAT_R8_SINT),
> +   swiz_fmt1(VK_FORMAT_R8_SRGB,
> ISL_FORMAT_L8_UNORM_SRGB,
> +                                                     _ISL_SWIZZLE(RED,
> ZERO, ZERO, ONE)),
> +   fmt1(VK_FORMAT_R8G8_UNORM,
> ISL_FORMAT_R8G8_UNORM),
> +   fmt1(VK_FORMAT_R8G8_SNORM,
> ISL_FORMAT_R8G8_SNORM),
> +   fmt1(VK_FORMAT_R8G8_USCALED,
> ISL_FORMAT_R8G8_USCALED),
> +   fmt1(VK_FORMAT_R8G8_SSCALED,
> ISL_FORMAT_R8G8_SSCALED),
> +   fmt1(VK_FORMAT_R8G8_UINT,
>  ISL_FORMAT_R8G8_UINT),
> +   fmt1(VK_FORMAT_R8G8_SINT,
>  ISL_FORMAT_R8G8_SINT),
> +   fmt_unsupported(VK_FORMAT_R8G8_SRGB),             /* L8A8_UNORM_SRGB
> */
> +   fmt1(VK_FORMAT_R8G8B8_UNORM,
> ISL_FORMAT_R8G8B8_UNORM),
> +   fmt1(VK_FORMAT_R8G8B8_SNORM,
> ISL_FORMAT_R8G8B8_SNORM),
> +   fmt1(VK_FORMAT_R8G8B8_USCALED,
> ISL_FORMAT_R8G8B8_USCALED),
> +   fmt1(VK_FORMAT_R8G8B8_SSCALED,
> ISL_FORMAT_R8G8B8_SSCALED),
> +   fmt1(VK_FORMAT_R8G8B8_UINT,
>  ISL_FORMAT_R8G8B8_UINT),
> +   fmt1(VK_FORMAT_R8G8B8_SINT,
>  ISL_FORMAT_R8G8B8_SINT),
> +   fmt1(VK_FORMAT_R8G8B8_SRGB,
>  ISL_FORMAT_R8G8B8_UNORM_SRGB),
> +   fmt1(VK_FORMAT_R8G8B8A8_UNORM,
> ISL_FORMAT_R8G8B8A8_UNORM),
> +   fmt1(VK_FORMAT_R8G8B8A8_SNORM,
> ISL_FORMAT_R8G8B8A8_SNORM),
> +   fmt1(VK_FORMAT_R8G8B8A8_USCALED,
> ISL_FORMAT_R8G8B8A8_USCALED),
> +   fmt1(VK_FORMAT_R8G8B8A8_SSCALED,
> ISL_FORMAT_R8G8B8A8_SSCALED),
> +   fmt1(VK_FORMAT_R8G8B8A8_UINT,
>  ISL_FORMAT_R8G8B8A8_UINT),
> +   fmt1(VK_FORMAT_R8G8B8A8_SINT,
>  ISL_FORMAT_R8G8B8A8_SINT),
> +   fmt1(VK_FORMAT_R8G8B8A8_SRGB,
>  ISL_FORMAT_R8G8B8A8_UNORM_SRGB),
> +   fmt1(VK_FORMAT_A8B8G8R8_UNORM_PACK32,
>  ISL_FORMAT_R8G8B8A8_UNORM),
> +   fmt1(VK_FORMAT_A8B8G8R8_SNORM_PACK32,
>  ISL_FORMAT_R8G8B8A8_SNORM),
> +   fmt1(VK_FORMAT_A8B8G8R8_USCALED_PACK32,
>  ISL_FORMAT_R8G8B8A8_USCALED),
> +   fmt1(VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
>  ISL_FORMAT_R8G8B8A8_SSCALED),
> +   fmt1(VK_FORMAT_A8B8G8R8_UINT_PACK32,
> ISL_FORMAT_R8G8B8A8_UINT),
> +   fmt1(VK_FORMAT_A8B8G8R8_SINT_PACK32,
> ISL_FORMAT_R8G8B8A8_SINT),
> +   fmt1(VK_FORMAT_A8B8G8R8_SRGB_PACK32,
> ISL_FORMAT_R8G8B8A8_UNORM_SRGB),
> +   fmt1(VK_FORMAT_A2R10G10B10_UNORM_PACK32,
> ISL_FORMAT_B10G10R10A2_UNORM),
> +   fmt1(VK_FORMAT_A2R10G10B10_SNORM_PACK32,
> ISL_FORMAT_B10G10R10A2_SNORM),
> +   fmt1(VK_FORMAT_A2R10G10B10_USCALED_PACK32,
> ISL_FORMAT_B10G10R10A2_USCALED),
> +   fmt1(VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
> ISL_FORMAT_B10G10R10A2_SSCALED),
> +   fmt1(VK_FORMAT_A2R10G10B10_UINT_PACK32,
>  ISL_FORMAT_B10G10R10A2_UINT),
> +   fmt1(VK_FORMAT_A2R10G10B10_SINT_PACK32,
>  ISL_FORMAT_B10G10R10A2_SINT),
> +   fmt1(VK_FORMAT_A2B10G10R10_UNORM_PACK32,
> ISL_FORMAT_R10G10B10A2_UNORM),
> +   fmt1(VK_FORMAT_A2B10G10R10_SNORM_PACK32,
> ISL_FORMAT_R10G10B10A2_SNORM),
> +   fmt1(VK_FORMAT_A2B10G10R10_USCALED_PACK32,
> ISL_FORMAT_R10G10B10A2_USCALED),
> +   fmt1(VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
> ISL_FORMAT_R10G10B10A2_SSCALED),
> +   fmt1(VK_FORMAT_A2B10G10R10_UINT_PACK32,
>  ISL_FORMAT_R10G10B10A2_UINT),
> +   fmt1(VK_FORMAT_A2B10G10R10_SINT_PACK32,
>  ISL_FORMAT_R10G10B10A2_SINT),
> +   fmt1(VK_FORMAT_R16_UNORM,
>  ISL_FORMAT_R16_UNORM),
> +   fmt1(VK_FORMAT_R16_SNORM,
>  ISL_FORMAT_R16_SNORM),
> +   fmt1(VK_FORMAT_R16_USCALED,
>  ISL_FORMAT_R16_USCALED),
> +   fmt1(VK_FORMAT_R16_SSCALED,
>  ISL_FORMAT_R16_SSCALED),
> +   fmt1(VK_FORMAT_R16_UINT,                          ISL_FORMAT_R16_UINT),
> +   fmt1(VK_FORMAT_R16_SINT,                          ISL_FORMAT_R16_SINT),
> +   fmt1(VK_FORMAT_R16_SFLOAT,
> ISL_FORMAT_R16_FLOAT),
> +   fmt1(VK_FORMAT_R16G16_UNORM,
> ISL_FORMAT_R16G16_UNORM),
> +   fmt1(VK_FORMAT_R16G16_SNORM,
> ISL_FORMAT_R16G16_SNORM),
> +   fmt1(VK_FORMAT_R16G16_USCALED,
> ISL_FORMAT_R16G16_USCALED),
> +   fmt1(VK_FORMAT_R16G16_SSCALED,
> ISL_FORMAT_R16G16_SSCALED),
> +   fmt1(VK_FORMAT_R16G16_UINT,
>  ISL_FORMAT_R16G16_UINT),
> +   fmt1(VK_FORMAT_R16G16_SINT,
>  ISL_FORMAT_R16G16_SINT),
> +   fmt1(VK_FORMAT_R16G16_SFLOAT,
>  ISL_FORMAT_R16G16_FLOAT),
> +   fmt1(VK_FORMAT_R16G16B16_UNORM,
>  ISL_FORMAT_R16G16B16_UNORM),
> +   fmt1(VK_FORMAT_R16G16B16_SNORM,
>  ISL_FORMAT_R16G16B16_SNORM),
> +   fmt1(VK_FORMAT_R16G16B16_USCALED,
>  ISL_FORMAT_R16G16B16_USCALED),
> +   fmt1(VK_FORMAT_R16G16B16_SSCALED,
>  ISL_FORMAT_R16G16B16_SSCALED),
> +   fmt1(VK_FORMAT_R16G16B16_UINT,
> ISL_FORMAT_R16G16B16_UINT),
> +   fmt1(VK_FORMAT_R16G16B16_SINT,
> ISL_FORMAT_R16G16B16_SINT),
> +   fmt1(VK_FORMAT_R16G16B16_SFLOAT,
> ISL_FORMAT_R16G16B16_FLOAT),
> +   fmt1(VK_FORMAT_R16G16B16A16_UNORM,
> ISL_FORMAT_R16G16B16A16_UNORM),
> +   fmt1(VK_FORMAT_R16G16B16A16_SNORM,
> ISL_FORMAT_R16G16B16A16_SNORM),
> +   fmt1(VK_FORMAT_R16G16B16A16_USCALED,
> ISL_FORMAT_R16G16B16A16_USCALED),
> +   fmt1(VK_FORMAT_R16G16B16A16_SSCALED,
> ISL_FORMAT_R16G16B16A16_SSCALED),
> +   fmt1(VK_FORMAT_R16G16B16A16_UINT,
>  ISL_FORMAT_R16G16B16A16_UINT),
> +   fmt1(VK_FORMAT_R16G16B16A16_SINT,
>  ISL_FORMAT_R16G16B16A16_SINT),
> +   fmt1(VK_FORMAT_R16G16B16A16_SFLOAT,
>  ISL_FORMAT_R16G16B16A16_FLOAT),
> +   fmt1(VK_FORMAT_R32_UINT,                          ISL_FORMAT_R32_UINT),
> +   fmt1(VK_FORMAT_R32_SINT,                          ISL_FORMAT_R32_SINT),
> +   fmt1(VK_FORMAT_R32_SFLOAT,
> ISL_FORMAT_R32_FLOAT),
> +   fmt1(VK_FORMAT_R32G32_UINT,
>  ISL_FORMAT_R32G32_UINT),
> +   fmt1(VK_FORMAT_R32G32_SINT,
>  ISL_FORMAT_R32G32_SINT),
> +   fmt1(VK_FORMAT_R32G32_SFLOAT,
>  ISL_FORMAT_R32G32_FLOAT),
> +   fmt1(VK_FORMAT_R32G32B32_UINT,
> ISL_FORMAT_R32G32B32_UINT),
> +   fmt1(VK_FORMAT_R32G32B32_SINT,
> ISL_FORMAT_R32G32B32_SINT),
> +   fmt1(VK_FORMAT_R32G32B32_SFLOAT,
> ISL_FORMAT_R32G32B32_FLOAT),
> +   fmt1(VK_FORMAT_R32G32B32A32_UINT,
>  ISL_FORMAT_R32G32B32A32_UINT),
> +   fmt1(VK_FORMAT_R32G32B32A32_SINT,
>  ISL_FORMAT_R32G32B32A32_SINT),
> +   fmt1(VK_FORMAT_R32G32B32A32_SFLOAT,
>  ISL_FORMAT_R32G32B32A32_FLOAT),
> +   fmt1(VK_FORMAT_R64_UINT,
> ISL_FORMAT_R64_PASSTHRU),
> +   fmt1(VK_FORMAT_R64_SINT,
> ISL_FORMAT_R64_PASSTHRU),
> +   fmt1(VK_FORMAT_R64_SFLOAT,
> ISL_FORMAT_R64_PASSTHRU),
> +   fmt1(VK_FORMAT_R64G64_UINT,
>  ISL_FORMAT_R64G64_PASSTHRU),
> +   fmt1(VK_FORMAT_R64G64_SINT,
>  ISL_FORMAT_R64G64_PASSTHRU),
> +   fmt1(VK_FORMAT_R64G64_SFLOAT,
>  ISL_FORMAT_R64G64_PASSTHRU),
> +   fmt1(VK_FORMAT_R64G64B64_UINT,
> ISL_FORMAT_R64G64B64_PASSTHRU),
> +   fmt1(VK_FORMAT_R64G64B64_SINT,
> ISL_FORMAT_R64G64B64_PASSTHRU),
> +   fmt1(VK_FORMAT_R64G64B64_SFLOAT,
> ISL_FORMAT_R64G64B64_PASSTHRU),
> +   fmt1(VK_FORMAT_R64G64B64A64_UINT,
>  ISL_FORMAT_R64G64B64A64_PASSTHRU),
> +   fmt1(VK_FORMAT_R64G64B64A64_SINT,
>  ISL_FORMAT_R64G64B64A64_PASSTHRU),
> +   fmt1(VK_FORMAT_R64G64B64A64_SFLOAT,
>  ISL_FORMAT_R64G64B64A64_PASSTHRU),
> +   fmt1(VK_FORMAT_B10G11R11_UFLOAT_PACK32,
>  ISL_FORMAT_R11G11B10_FLOAT),
> +   fmt1(VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
> ISL_FORMAT_R9G9B9E5_SHAREDEXP),
> +
> +   fmt1(VK_FORMAT_D16_UNORM,
>  ISL_FORMAT_R16_UNORM),
> +   fmt1(VK_FORMAT_X8_D24_UNORM_PACK32,
>  ISL_FORMAT_R24_UNORM_X8_TYPELESS),
> +   fmt1(VK_FORMAT_D32_SFLOAT,
> ISL_FORMAT_R32_FLOAT),
> +   fmt1(VK_FORMAT_S8_UINT,                           ISL_FORMAT_R8_UINT),
> +   fmt_unsupported(VK_FORMAT_D16_UNORM_S8_UINT),
> +   fmt2(VK_FORMAT_D24_UNORM_S8_UINT,
>  ISL_FORMAT_R24_UNORM_X8_TYPELESS, ISL_FORMAT_R8_UINT),
> +   fmt2(VK_FORMAT_D32_SFLOAT_S8_UINT,
> ISL_FORMAT_R32_FLOAT, ISL_FORMAT_R8_UINT),
> +
> +   swiz_fmt1(VK_FORMAT_BC1_RGB_UNORM_BLOCK,
> ISL_FORMAT_BC1_UNORM, RGB1),
> +   swiz_fmt1(VK_FORMAT_BC1_RGB_SRGB_BLOCK,
>  ISL_FORMAT_BC1_UNORM_SRGB, RGB1),
> +   fmt1(VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
> ISL_FORMAT_BC1_UNORM),
> +   fmt1(VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
>  ISL_FORMAT_BC1_UNORM_SRGB),
> +   fmt1(VK_FORMAT_BC2_UNORM_BLOCK,
>  ISL_FORMAT_BC2_UNORM),
> +   fmt1(VK_FORMAT_BC2_SRGB_BLOCK,
> ISL_FORMAT_BC2_UNORM_SRGB),
> +   fmt1(VK_FORMAT_BC3_UNORM_BLOCK,
>  ISL_FORMAT_BC3_UNORM),
> +   fmt1(VK_FORMAT_BC3_SRGB_BLOCK,
> ISL_FORMAT_BC3_UNORM_SRGB),
> +   fmt1(VK_FORMAT_BC4_UNORM_BLOCK,
>  ISL_FORMAT_BC4_UNORM),
> +   fmt1(VK_FORMAT_BC4_SNORM_BLOCK,
>  ISL_FORMAT_BC4_SNORM),
> +   fmt1(VK_FORMAT_BC5_UNORM_BLOCK,
>  ISL_FORMAT_BC5_UNORM),
> +   fmt1(VK_FORMAT_BC5_SNORM_BLOCK,
>  ISL_FORMAT_BC5_SNORM),
> +   fmt1(VK_FORMAT_BC6H_UFLOAT_BLOCK,
>  ISL_FORMAT_BC6H_UF16),
> +   fmt1(VK_FORMAT_BC6H_SFLOAT_BLOCK,
>  ISL_FORMAT_BC6H_SF16),
> +   fmt1(VK_FORMAT_BC7_UNORM_BLOCK,
>  ISL_FORMAT_BC7_UNORM),
> +   fmt1(VK_FORMAT_BC7_SRGB_BLOCK,
> ISL_FORMAT_BC7_UNORM_SRGB),
> +   fmt1(VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
>  ISL_FORMAT_ETC2_RGB8),
> +   fmt1(VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
> ISL_FORMAT_ETC2_SRGB8),
> +   fmt1(VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
>  ISL_FORMAT_ETC2_RGB8_PTA),
> +   fmt1(VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
> ISL_FORMAT_ETC2_SRGB8_PTA),
> +   fmt1(VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
>  ISL_FORMAT_ETC2_EAC_RGBA8),
> +   fmt1(VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
> ISL_FORMAT_ETC2_EAC_SRGB8_A8),
> +   fmt1(VK_FORMAT_EAC_R11_UNORM_BLOCK,               ISL_FORMAT_EAC_R11),
> +   fmt1(VK_FORMAT_EAC_R11_SNORM_BLOCK,
>  ISL_FORMAT_EAC_SIGNED_R11),
> +   fmt1(VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
> ISL_FORMAT_EAC_RG11),
> +   fmt1(VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
> ISL_FORMAT_EAC_SIGNED_RG11),
> +   fmt1(VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_4X4_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_5X4_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_5X5_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_6X5_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_6X6_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_8X5_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_8X6_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_8X8_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_10X5_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_10X6_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_10X8_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_10X10_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_12X10_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_12X12_U8SRGB),
> +   fmt1(VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_4X4_FLT16),
> +   fmt1(VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_5X4_FLT16),
> +   fmt1(VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_5X5_FLT16),
> +   fmt1(VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_6X5_FLT16),
> +   fmt1(VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_6X6_FLT16),
> +   fmt1(VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_8X5_FLT16),
> +   fmt1(VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_8X6_FLT16),
> +   fmt1(VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_8X8_FLT16),
> +   fmt1(VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_10X5_FLT16),
> +   fmt1(VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_10X6_FLT16),
> +   fmt1(VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
>  ISL_FORMAT_ASTC_LDR_2D_10X8_FLT16),
> +   fmt1(VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_10X10_FLT16),
> +   fmt1(VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_12X10_FLT16),
> +   fmt1(VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
> ISL_FORMAT_ASTC_LDR_2D_12X12_FLT16),
> +   fmt_unsupported(VK_FORMAT_B8G8R8_UNORM),
> +   fmt_unsupported(VK_FORMAT_B8G8R8_SNORM),
> +   fmt_unsupported(VK_FORMAT_B8G8R8_USCALED),
> +   fmt_unsupported(VK_FORMAT_B8G8R8_SSCALED),
> +   fmt_unsupported(VK_FORMAT_B8G8R8_UINT),
> +   fmt_unsupported(VK_FORMAT_B8G8R8_SINT),
> +   fmt_unsupported(VK_FORMAT_B8G8R8_SRGB),
> +   fmt1(VK_FORMAT_B8G8R8A8_UNORM,
> ISL_FORMAT_B8G8R8A8_UNORM),
> +   fmt_unsupported(VK_FORMAT_B8G8R8A8_SNORM),
> +   fmt_unsupported(VK_FORMAT_B8G8R8A8_USCALED),
> +   fmt_unsupported(VK_FORMAT_B8G8R8A8_SSCALED),
> +   fmt_unsupported(VK_FORMAT_B8G8R8A8_UINT),
> +   fmt_unsupported(VK_FORMAT_B8G8R8A8_SINT),
> +   fmt1(VK_FORMAT_B8G8R8A8_SRGB,
>  ISL_FORMAT_B8G8R8A8_UNORM_SRGB),
>  };
>
> +#undef _fmt
> +#undef swiz_fmt1
> +#undef fmt1
>  #undef fmt
>
>  static const struct {
> @@ -258,53 +287,55 @@ static const struct {
>     [0] = { .formats = main_formats, .n_formats =
> ARRAY_SIZE(main_formats), },
>  };
>
> -static struct anv_format
> -vk_to_anv_format(VkFormat vk_format)
> +const struct anv_format *
> +anv_get_format(VkFormat vk_format)
>  {
>     uint32_t enum_offset = VK_ENUM_OFFSET(vk_format);
>     uint32_t ext_number = VK_ENUM_EXTENSION(vk_format);
>
>     if (ext_number >= ARRAY_SIZE(anv_formats) ||
>         enum_offset >= anv_formats[ext_number].n_formats)
> -      return (struct anv_format) { .isl_format = ISL_FORMAT_UNSUPPORTED };
> +      return NULL;
>
> -   return anv_formats[ext_number].formats[enum_offset];
> -}
> +   const struct anv_format *format =
> +      &anv_formats[ext_number].formats[enum_offset];
> +   if (format->planes[0].isl_format == ISL_FORMAT_UNSUPPORTED)
> +      return NULL;
>
> -static bool
> -format_supported(VkFormat vk_format)
> -{
> -   return vk_to_anv_format(vk_format).isl_format !=
> ISL_FORMAT_UNSUPPORTED;
> +   return format;
>  }
>
>  /**
>   * Exactly one bit must be set in \a aspect.
>   */
> -struct anv_format
> -anv_get_format(const struct gen_device_info *devinfo, VkFormat vk_format,
> -               VkImageAspectFlags aspect, VkImageTiling tiling)
> +struct anv_format_plane
> +anv_get_format_plane(const struct gen_device_info *devinfo, VkFormat
> vk_format,
> +                     VkImageAspectFlags aspect, VkImageTiling tiling)
>  {
> -   struct anv_format format = vk_to_anv_format(vk_format);
> +   const struct anv_format *format = anv_get_format(vk_format);
> +   struct anv_format_plane plane_format = {
> +      .isl_format = ISL_FORMAT_UNSUPPORTED,
> +   };
>
> -   if (format.isl_format == ISL_FORMAT_UNSUPPORTED)
> -      return format;
> +   if (format == NULL)
> +      return plane_format;
>
> -   if (aspect == VK_IMAGE_ASPECT_STENCIL_BIT) {
> -      assert(vk_format_aspects(vk_format) & VK_IMAGE_ASPECT_STENCIL_BIT);
> -      format.isl_format = ISL_FORMAT_R8_UINT;
> -      return format;
> -   }
> +   uint32_t plane = anv_image_aspect_to_plane(vk_format_aspects(vk_format),
> aspect);
> +   plane_format = format->planes[plane];
> +   if (plane_format.isl_format == ISL_FORMAT_UNSUPPORTED)
> +      return plane_format;
>
> -   if (aspect & VK_IMAGE_ASPECT_DEPTH_BIT) {
> -      assert(vk_format_aspects(vk_format) & VK_IMAGE_ASPECT_DEPTH_BIT);
> -      return format;
> +   if (aspect & (VK_IMAGE_ASPECT_DEPTH_BIT |
> VK_IMAGE_ASPECT_STENCIL_BIT)) {
> +      assert(vk_format_aspects(vk_format) &
> +             (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT));
> +      return plane_format;
>     }
>
>     assert(aspect == VK_IMAGE_ASPECT_COLOR_BIT);
>     assert(vk_format_aspects(vk_format) == VK_IMAGE_ASPECT_COLOR_BIT);
>
>     const struct isl_format_layout *isl_layout =
> -      isl_format_get_layout(format.isl_format);
> +      isl_format_get_layout(plane_format.isl_format);
>
>     if (tiling == VK_IMAGE_TILING_OPTIMAL &&
>         !util_is_power_of_two(isl_layout->bpb)) {
> @@ -313,13 +344,14 @@ anv_get_format(const struct gen_device_info
> *devinfo, VkFormat vk_format,
>         * this by switching them over to RGBX or RGBA formats under the
>         * hood.
>         */
> -      enum isl_format rgbx = isl_format_rgb_to_rgbx(format.isl_format);
> +      enum isl_format rgbx = isl_format_rgb_to_rgbx(plane_
> format.isl_format);
>        if (rgbx != ISL_FORMAT_UNSUPPORTED &&
>            isl_format_supports_rendering(devinfo, rgbx)) {
> -         format.isl_format = rgbx;
> +         plane_format.isl_format = rgbx;
>        } else {
> -         format.isl_format = isl_format_rgb_to_rgba(format.isl_format);
> -         format.swizzle = ISL_SWIZZLE(RED, GREEN, BLUE, ONE);
> +         plane_format.isl_format =
> +            isl_format_rgb_to_rgba(plane_format.isl_format);
> +         plane_format.swizzle = ISL_SWIZZLE(RED, GREEN, BLUE, ONE);
>        }
>     }
>
> @@ -327,20 +359,18 @@ anv_get_format(const struct gen_device_info
> *devinfo, VkFormat vk_format,
>      * back to a format with a more complex swizzle.
>      */
>     if (vk_format == VK_FORMAT_B4G4R4A4_UNORM_PACK16 && devinfo->gen < 8)
> {
> -      return (struct anv_format) {
> -         .isl_format = ISL_FORMAT_B4G4R4A4_UNORM,
> -         .swizzle = ISL_SWIZZLE(GREEN, RED, ALPHA, BLUE),
> -      };
> +      plane_format.isl_format = ISL_FORMAT_B4G4R4A4_UNORM;
> +      plane_format.swizzle = ISL_SWIZZLE(GREEN, RED, ALPHA, BLUE);
>     }
>
> -   return format;
> +   return plane_format;
>  }
>
>  // Format capabilities
>
>  static VkFormatFeatureFlags
>  get_image_format_properties(const struct gen_device_info *devinfo,
> -                            enum isl_format base, struct anv_format
> format)
> +                            enum isl_format base, struct anv_format_plane
> format)
>  {
>     if (format.isl_format == ISL_FORMAT_UNSUPPORTED)
>        return 0;
> @@ -410,19 +440,20 @@ get_buffer_format_properties(const struct
> gen_device_info *devinfo,
>
>  static void
>  anv_physical_device_get_format_properties(struct anv_physical_device
> *physical_device,
> -                                          VkFormat format,
> +                                          VkFormat vk_format,
>                                            VkFormatProperties
> *out_properties)
>  {
>     int gen = physical_device->info.gen * 10;
>     if (physical_device->info.is_haswell)
>        gen += 5;
>
> +   const struct anv_format *format = anv_get_format(vk_format);
>     VkFormatFeatureFlags linear = 0, tiled = 0, buffer = 0;
> -   if (!format_supported(format)) {
> +   if (format == NULL) {
>        /* Nothing to do here */
> -   } else if (vk_format_is_depth_or_stencil(format)) {
> +   } else if (vk_format_is_depth_or_stencil(vk_format)) {
>        tiled |= VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT;
> -      if (vk_format_aspects(format) == VK_IMAGE_ASPECT_DEPTH_BIT ||
> +      if (vk_format_aspects(vk_format) == VK_IMAGE_ASPECT_DEPTH_BIT ||
>            physical_device->info.gen >= 8)
>           tiled |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
>
> @@ -431,13 +462,13 @@ anv_physical_device_get_format_properties(struct
> anv_physical_device *physical_d
>                 VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR |
>                 VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR;
>     } else {
> -      struct anv_format linear_fmt, tiled_fmt;
> -      linear_fmt = anv_get_format(&physical_device->info, format,
> -                                  VK_IMAGE_ASPECT_COLOR_BIT,
> -                                  VK_IMAGE_TILING_LINEAR);
> -      tiled_fmt = anv_get_format(&physical_device->info, format,
> -                                 VK_IMAGE_ASPECT_COLOR_BIT,
> -                                 VK_IMAGE_TILING_OPTIMAL);
> +      struct anv_format_plane linear_fmt, tiled_fmt;
> +      linear_fmt = anv_get_format_plane(&physical_device->info,
> vk_format,
> +                                        VK_IMAGE_ASPECT_COLOR_BIT,
> +                                        VK_IMAGE_TILING_LINEAR);
> +      tiled_fmt = anv_get_format_plane(&physical_device->info, vk_format,
> +                                       VK_IMAGE_ASPECT_COLOR_BIT,
> +                                       VK_IMAGE_TILING_OPTIMAL);
>
>        linear = get_image_format_properties(&physical_device->info,
>                                             linear_fmt.isl_format,
> linear_fmt);
> @@ -515,8 +546,9 @@ anv_get_image_format_properties(
>     uint32_t maxMipLevels;
>     uint32_t maxArraySize;
>     VkSampleCountFlags sampleCounts = VK_SAMPLE_COUNT_1_BIT;
> +   const struct anv_format *format = anv_get_format(info->format);
>
> -   if (!format_supported(info->format))
> +   if (format == NULL)
>        goto unsupported;
>
>     anv_physical_device_get_format_properties(physical_device,
> info->format,
> @@ -570,7 +602,7 @@ anv_get_image_format_properties(
>      *    * This field cannot be ASTC format if the Surface Type is
> SURFTYPE_1D.
>      */
>     if (info->type == VK_IMAGE_TYPE_1D &&
> -       isl_format_is_compressed(vk_to_anv_format(info->format).isl_format))
> {
> +       isl_format_is_compressed(format->planes[0].isl_format)) {
>         goto unsupported;
>     }
>
> diff --git a/src/intel/vulkan/anv_image.c b/src/intel/vulkan/anv_image.c
> index 7561b9b52b4..cec3768a380 100644
> --- a/src/intel/vulkan/anv_image.c
> +++ b/src/intel/vulkan/anv_image.c
> @@ -264,8 +264,8 @@ make_surface(const struct anv_device *dev,
>     image->extent = anv_sanitize_image_extent(vk_info->imageType,
>                                               vk_info->extent);
>
> -   enum isl_format format = anv_get_isl_format(&dev->info,
> vk_info->format,
> -                                               aspect, vk_info->tiling);
> +   enum isl_format format =
> +      anv_get_isl_format(&dev->info, vk_info->format, aspect,
> vk_info->tiling);
>

spurious change


>     assert(format != ISL_FORMAT_UNSUPPORTED);
>
>     /* If an image is created as BLOCK_TEXEL_VIEW_COMPATIBLE, then we need
> to
> @@ -972,8 +972,12 @@ anv_CreateImageView(VkDevice _device,
>     iview->aspect_mask = pCreateInfo->subresourceRange.aspectMask;
>     iview->vk_format = pCreateInfo->format;
>
> -   struct anv_format format = anv_get_format(&device->info,
> pCreateInfo->format,
> -                                             range->aspectMask,
> image->tiling);
> +   struct anv_format_plane format =
> +      anv_get_format_plane(&device->info, pCreateInfo->format,
> +                           range->aspectMask ==
> (VK_IMAGE_ASPECT_DEPTH_BIT |
> +
>  VK_IMAGE_ASPECT_STENCIL_BIT) ?
> +                           VK_IMAGE_ASPECT_DEPTH_BIT : range->aspectMask,
> +                           image->tiling);
>
>     iview->isl = (struct isl_view) {
>        .format = format.isl_format,
> diff --git a/src/intel/vulkan/anv_private.h b/src/intel/vulkan/anv_
> private.h
> index b58c803530c..261ec9bb7c6 100644
> --- a/src/intel/vulkan/anv_private.h
> +++ b/src/intel/vulkan/anv_private.h
> @@ -2169,20 +2169,60 @@ anv_pipeline_compile_cs(struct anv_pipeline
> *pipeline,
>                          const char *entrypoint,
>                          const VkSpecializationInfo *spec_info);
>
> -struct anv_format {
> +struct anv_format_plane {
>     enum isl_format isl_format:16;
>     struct isl_swizzle swizzle;
>  };
>
> -struct anv_format
> -anv_get_format(const struct gen_device_info *devinfo, VkFormat format,
> -               VkImageAspectFlags aspect, VkImageTiling tiling);
> +
> +struct anv_format {
> +   struct anv_format_plane planes[3];
> +   uint8_t n_planes;
> +};
> +
> +static inline uint32_t
> +anv_image_aspect_to_plane(VkImageAspectFlags image_aspects,
> +                          VkImageAspectFlags aspect_mask)
> +{
> +   switch (aspect_mask) {
> +   case VK_IMAGE_ASPECT_COLOR_BIT:
> +   case VK_IMAGE_ASPECT_DEPTH_BIT:
> +   case VK_IMAGE_ASPECT_PLANE_0_BIT_KHR:
> +      return 0;
> +   case VK_IMAGE_ASPECT_STENCIL_BIT:
> +      if ((image_aspects & VK_IMAGE_ASPECT_DEPTH_BIT) == 0)
> +         return 0;
> +      /* Fall-through */
> +   case VK_IMAGE_ASPECT_PLANE_1_BIT_KHR:
> +      return 1;
> +   case VK_IMAGE_ASPECT_PLANE_2_BIT_KHR:
> +      return 2;
> +   default:
> +      unreachable("invalid image aspect");
> +   }
> +}
> +
> +const struct anv_format *
> +anv_get_format(VkFormat format);
> +
> +static inline uint32_t
> +anv_get_format_planes(VkFormat vk_format)
> +{
> +   const struct anv_format *format = anv_get_format(vk_format);
> +
> +   return format != NULL ? format->n_planes : 0;
> +}
> +
> +struct anv_format_plane
> +anv_get_format_plane(const struct gen_device_info *devinfo, VkFormat
> vk_format,
> +                     VkImageAspectFlags aspect, VkImageTiling tiling);
>
>  static inline enum isl_format
> -anv_get_isl_format(const struct gen_device_info *devinfo, VkFormat
> vk_format,
> -                   VkImageAspectFlags aspect, VkImageTiling tiling)
> +anv_get_isl_format(const struct gen_device_info *devinfo,
> +                   VkFormat vk_format, VkImageAspectFlags aspect,
> +                   VkImageTiling tiling)
>

Now a spurrious change


>  {
> -   return anv_get_format(devinfo, vk_format, aspect, tiling).isl_format;
> +   return anv_get_format_plane(devinfo, vk_format, aspect,
> tiling).isl_format;
>  }
>
>  static inline struct isl_swizzle
> diff --git a/src/intel/vulkan/genX_pipeline.c b/src/intel/vulkan/genX_
> pipeline.c
> index c2fa9c0ff7f..62c522e1ea6 100644
> --- a/src/intel/vulkan/genX_pipeline.c
> +++ b/src/intel/vulkan/genX_pipeline.c
> @@ -120,10 +120,9 @@ emit_vertex_input(struct anv_pipeline *pipeline,
>     for (uint32_t i = 0; i < info->vertexAttributeDescriptionCount; i++) {
>        const VkVertexInputAttributeDescription *desc =
>           &info->pVertexAttributeDescriptions[i];
> -      enum isl_format format = anv_get_isl_format(&pipeline->
> device->info,
> -                                                  desc->format,
> -
> VK_IMAGE_ASPECT_COLOR_BIT,
> -                                                  VK_IMAGE_TILING_LINEAR);
> +      enum isl_format format =
> +         anv_get_isl_format(&pipeline->device->info, desc->format,
> +                            VK_IMAGE_ASPECT_COLOR_BIT,
> VK_IMAGE_TILING_LINEAR);
>

Now a spurrious change


>
>        assert(desc->binding < MAX_VBS);
>
> --
> 2.14.2
>
> _______________________________________________
> mesa-dev mailing list
> mesa-dev at lists.freedesktop.org
> https://lists.freedesktop.org/mailman/listinfo/mesa-dev
>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <https://lists.freedesktop.org/archives/mesa-dev/attachments/20171004/69bc7fc0/attachment-0001.html>


More information about the mesa-dev mailing list