i965: Add support for 16-bit unorm L, A, and I textures.
While not required by any particular spec version, mplayer was asking for L16 and hoping for actual L16 without checking. The 8 bits allocated led to 10-bit planar video data stored in the lower 10 bits giving only 2 bits of precision in video. While it was an amusing effect, give them what they actually wanted instead. Bugzilla: https://bugs.freedesktop.org/show_bug.cgi?id=41461 Reviewed-by: Kenneth Graunke <kenneth@whitecape.org>
This commit is contained in:
parent
b5444a6ebd
commit
e56aecf249
|
@ -80,6 +80,9 @@ brw_format_for_mesa_format(gl_format mesa_format)
|
|||
[MESA_FORMAT_I8] = BRW_SURFACEFORMAT_I8_UNORM,
|
||||
[MESA_FORMAT_A8] = BRW_SURFACEFORMAT_A8_UNORM,
|
||||
[MESA_FORMAT_AL88] = BRW_SURFACEFORMAT_L8A8_UNORM,
|
||||
[MESA_FORMAT_L16] = BRW_SURFACEFORMAT_L16_UNORM,
|
||||
[MESA_FORMAT_A16] = BRW_SURFACEFORMAT_A16_UNORM,
|
||||
[MESA_FORMAT_I16] = BRW_SURFACEFORMAT_I16_UNORM,
|
||||
[MESA_FORMAT_AL1616] = BRW_SURFACEFORMAT_L16A16_UNORM,
|
||||
[MESA_FORMAT_R8] = BRW_SURFACEFORMAT_R8_UNORM,
|
||||
[MESA_FORMAT_R16] = BRW_SURFACEFORMAT_R16_UNORM,
|
||||
|
|
|
@ -640,8 +640,12 @@ intelInitContext(struct intel_context *intel,
|
|||
ctx->TextureFormatSupported[MESA_FORMAT_A8] = true;
|
||||
ctx->TextureFormatSupported[MESA_FORMAT_I8] = true;
|
||||
ctx->TextureFormatSupported[MESA_FORMAT_AL88] = true;
|
||||
if (intel->gen >= 4)
|
||||
if (intel->gen >= 4) {
|
||||
ctx->TextureFormatSupported[MESA_FORMAT_L16] = true;
|
||||
ctx->TextureFormatSupported[MESA_FORMAT_A16] = true;
|
||||
ctx->TextureFormatSupported[MESA_FORMAT_I16] = true;
|
||||
ctx->TextureFormatSupported[MESA_FORMAT_AL1616] = true;
|
||||
}
|
||||
|
||||
/* Depth and stencil */
|
||||
ctx->TextureFormatSupported[MESA_FORMAT_S8_Z24] = true;
|
||||
|
|
Loading…
Reference in New Issue