OpenGL2: Use RGBA16F format for HDR.

RGB16F is not 4-byte aligned and not supported by certain hardware.
This commit is contained in:
SmileTheory 2013-11-20 00:48:18 -08:00
parent e686010d73
commit d63d7ba6bf
2 changed files with 3 additions and 3 deletions

View file

@ -386,7 +386,7 @@ void FBO_Init(void)
hdrFormat = GL_RGBA8;
if (r_hdr->integer && glRefConfig.framebufferObject && glRefConfig.textureFloat)
{
hdrFormat = GL_RGB16F_ARB;
hdrFormat = GL_RGBA16F_ARB;
}
qglGetIntegerv(GL_MAX_SAMPLES_EXT, &multisample);

View file

@ -2922,7 +2922,7 @@ void R_CreateBuiltinImages( void ) {
hdrFormat = GL_RGBA8;
if (r_hdr->integer && glRefConfig.framebufferObject && glRefConfig.textureFloat)
hdrFormat = GL_RGB16F_ARB;
hdrFormat = GL_RGBA16F_ARB;
rgbFormat = GL_RGBA8;
@ -2941,7 +2941,7 @@ void R_CreateBuiltinImages( void ) {
unsigned short sdata[4];
void *p;
if (hdrFormat == GL_RGB16F_ARB)
if (hdrFormat == GL_RGBA16F_ARB)
{
sdata[0] = FloatToHalf(0.0f);
sdata[1] = FloatToHalf(0.45f);