From 273725e86697b051fbb14546cee70f9c84774810 Mon Sep 17 00:00:00 2001 From: Cameron Cawley Date: Wed, 8 Jan 2025 18:45:43 +0000 Subject: [PATCH] Use the correct pixel formats for OpenGL on big endian --- src/render/opengl/SDL_render_gl.c | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/src/render/opengl/SDL_render_gl.c b/src/render/opengl/SDL_render_gl.c index 81c930237c..da1ccbfb6e 100644 --- a/src/render/opengl/SDL_render_gl.c +++ b/src/render/opengl/SDL_render_gl.c @@ -409,14 +409,14 @@ static bool GL_SupportsBlendMode(SDL_Renderer *renderer, SDL_BlendMode blendMode static bool convert_format(Uint32 pixel_format, GLint *internalFormat, GLenum *format, GLenum *type) { switch (pixel_format) { - case SDL_PIXELFORMAT_ARGB8888: - case SDL_PIXELFORMAT_XRGB8888: + case SDL_PIXELFORMAT_BGRA32: + case SDL_PIXELFORMAT_BGRX32: *internalFormat = GL_RGBA8; *format = GL_BGRA; *type = GL_UNSIGNED_BYTE; // previously GL_UNSIGNED_INT_8_8_8_8_REV, seeing if this is better in modern times. break; - case SDL_PIXELFORMAT_ABGR8888: - case SDL_PIXELFORMAT_XBGR8888: + case SDL_PIXELFORMAT_RGBA32: + case SDL_PIXELFORMAT_RGBX32: *internalFormat = GL_RGBA8; *format = GL_RGBA; *type = GL_UNSIGNED_BYTE; // previously GL_UNSIGNED_INT_8_8_8_8_REV, seeing if this is better in modern times. @@ -625,7 +625,7 @@ static bool GL_CreateTexture(SDL_Renderer *renderer, SDL_Texture *texture, SDL_P } #endif - if (texture->format == SDL_PIXELFORMAT_ABGR8888 || texture->format == SDL_PIXELFORMAT_ARGB8888) { + if (texture->format == SDL_PIXELFORMAT_RGBA32 || texture->format == SDL_PIXELFORMAT_BGRA32) { data->shader = SHADER_RGBA; } else { data->shader = SHADER_RGB; @@ -1500,7 +1500,7 @@ static bool GL_RunCommandQueue(SDL_Renderer *renderer, SDL_RenderCommand *cmd, v static SDL_Surface *GL_RenderReadPixels(SDL_Renderer *renderer, const SDL_Rect *rect) { GL_RenderData *data = (GL_RenderData *)renderer->internal; - SDL_PixelFormat format = renderer->target ? renderer->target->format : SDL_PIXELFORMAT_ARGB8888; + SDL_PixelFormat format = renderer->target ? renderer->target->format : SDL_PIXELFORMAT_RGBA32; GLint internalFormat; GLenum targetFormat, type; SDL_Surface *surface; @@ -1712,10 +1712,9 @@ static bool GL_CreateRenderer(SDL_Renderer *renderer, SDL_Window *window, SDL_Pr renderer->window = window; renderer->name = GL_RenderDriver.name; - SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_ARGB8888); - SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_ABGR8888); - SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_XRGB8888); - SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_XBGR8888); + SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_RGBA32); + /* TODO: Check for required extensions on OpenGL 1.1 systems? */ + SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_BGRA32); data->context = SDL_GL_CreateContext(window); if (!data->context) { @@ -1803,6 +1802,10 @@ static bool GL_CreateRenderer(SDL_Renderer *renderer, SDL_Window *window, SDL_Pr data->shaders = GL_CreateShaderContext(); SDL_LogInfo(SDL_LOG_CATEGORY_RENDER, "OpenGL shaders: %s", data->shaders ? "ENABLED" : "DISABLED"); + if (data->shaders) { + SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_RGBX32); + SDL_AddSupportedTextureFormat(renderer, SDL_PIXELFORMAT_BGRX32); + } #ifdef SDL_HAVE_YUV // We support YV12 textures using 3 textures and a shader if (data->shaders && data->num_texture_units >= 3) {