glx: stop using hardcoded array sizes for bitfields
Replicate the solution used for OpenGL extensions by introducing __GLX_EXT_BYTES. Suggested-by: Adam Jackson <ajax@redhat.com> Reviewed-by: Adam Jackson <ajax@redhat.com> Reviewed-by: Ian Romanick <ian.d.romanick@intel.com> Signed-off-by: Martin Peres <martin.peres@mupuf.org> Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/7212>
This commit is contained in:
parent
af25f47bdc
commit
1331b86299
|
@ -544,7 +544,7 @@ struct glx_screen
|
|||
* libGL.
|
||||
*/
|
||||
/*@{ */
|
||||
unsigned char direct_support[8];
|
||||
unsigned char direct_support[__GLX_EXT_BYTES];
|
||||
GLboolean ext_list_first_time;
|
||||
/*@} */
|
||||
|
||||
|
|
|
@ -324,9 +324,9 @@ static const struct extension_info known_gl_extensions[] = {
|
|||
|
||||
|
||||
/* global bit-fields of available extensions and their characteristics */
|
||||
static unsigned char client_glx_support[8];
|
||||
static unsigned char client_glx_only[8];
|
||||
static unsigned char direct_glx_only[8];
|
||||
static unsigned char client_glx_support[__GLX_EXT_BYTES];
|
||||
static unsigned char client_glx_only[__GLX_EXT_BYTES];
|
||||
static unsigned char direct_glx_only[__GLX_EXT_BYTES];
|
||||
static unsigned char client_gl_support[__GL_EXT_BYTES];
|
||||
static unsigned char client_gl_only[__GL_EXT_BYTES];
|
||||
|
||||
|
@ -334,7 +334,7 @@ static unsigned char client_gl_only[__GL_EXT_BYTES];
|
|||
* Bits representing the set of extensions that are enabled by default in all
|
||||
* direct rendering drivers.
|
||||
*/
|
||||
static unsigned char direct_glx_support[8];
|
||||
static unsigned char direct_glx_support[__GLX_EXT_BYTES];
|
||||
|
||||
/**
|
||||
* Highest core GL version that can be supported for indirect rendering.
|
||||
|
@ -648,8 +648,8 @@ __glXCalculateUsableExtensions(struct glx_screen * psc,
|
|||
GLboolean display_is_direct_capable,
|
||||
int minor_version)
|
||||
{
|
||||
unsigned char server_support[8];
|
||||
unsigned char usable[8];
|
||||
unsigned char server_support[__GLX_EXT_BYTES];
|
||||
unsigned char usable[__GLX_EXT_BYTES];
|
||||
unsigned i;
|
||||
|
||||
__glXExtensionsCtr();
|
||||
|
@ -694,7 +694,7 @@ __glXCalculateUsableExtensions(struct glx_screen * psc,
|
|||
*/
|
||||
|
||||
if (display_is_direct_capable) {
|
||||
for (i = 0; i < 8; i++) {
|
||||
for (i = 0; i < __GLX_EXT_BYTES; i++) {
|
||||
usable[i] = (client_glx_support[i] & client_glx_only[i])
|
||||
| (client_glx_support[i] & psc->direct_support[i] &
|
||||
server_support[i])
|
||||
|
@ -703,7 +703,7 @@ __glXCalculateUsableExtensions(struct glx_screen * psc,
|
|||
}
|
||||
}
|
||||
else {
|
||||
for (i = 0; i < 8; i++) {
|
||||
for (i = 0; i < __GLX_EXT_BYTES; i++) {
|
||||
usable[i] = (client_glx_support[i] & client_glx_only[i])
|
||||
| (client_glx_support[i] & server_support[i]);
|
||||
}
|
||||
|
|
|
@ -74,8 +74,12 @@ enum
|
|||
SGI_make_current_read_bit,
|
||||
SGI_swap_control_bit,
|
||||
SGI_video_sync_bit,
|
||||
|
||||
__NUM_GLX_EXTS,
|
||||
};
|
||||
|
||||
#define __GLX_EXT_BYTES ((__NUM_GLX_EXTS + 7) / 8)
|
||||
|
||||
/* From the GLX perspective, the ARB and EXT extensions are identical. Use a
|
||||
* single bit for both.
|
||||
*/
|
||||
|
|
Loading…
Reference in New Issue