r300g: fix texture border for 16-bits-per-channel formats

This is kinda hacky, but it's hard to come up with a generic solution for
all formats when only a few are used in practice (I mostly get B8G8R8*8).
This commit is contained in:
Marek Olšák 2010-10-05 02:52:03 +02:00
parent 6e61853590
commit 8449a4772a
1 changed files with 9 additions and 4 deletions

View File

@ -620,14 +620,19 @@ static uint32_t r300_get_border_color(enum pipe_format format,
}
break;
default:
/* I think the fat formats (16, 32) are specified
* as the 8-bit ones. I am not sure how compressed formats
* work here. */
case 8:
r = ((float_to_ubyte(border_swizzled[0]) & 0xff) << 0) |
((float_to_ubyte(border_swizzled[1]) & 0xff) << 8) |
((float_to_ubyte(border_swizzled[2]) & 0xff) << 16) |
((float_to_ubyte(border_swizzled[3]) & 0xff) << 24);
break;
case 16:
r = ((float_to_ubyte(border_swizzled[2]) & 0xff) << 0) |
((float_to_ubyte(border_swizzled[1]) & 0xff) << 8) |
((float_to_ubyte(border_swizzled[0]) & 0xff) << 16) |
((float_to_ubyte(border_swizzled[3]) & 0xff) << 24);
break;
}
return r;