gzdoom/src/gl/system/gl_interface.h
Christoph Oelckers 7d3beb665b - rewrote vertex buffer code to require GL_ARB_BUFFER_STORAGE extension.
This means it won't work anymore on anything that doesn't support OpenGL 4.0, but I don't think this is a problem. On older NVidia cards performance gains could not be seen and on older AMDs using the vertex buffer was even worse as long as it got mixed with immediate mode rendering.
2014-05-10 21:47:07 +02:00

48 lines
772 B
C

#ifndef R_RENDER
#define R_RENDER
#include "basictypes.h"
enum RenderFlags
{
// [BB] Added texture compression flags.
RFL_TEXTURE_COMPRESSION=8,
RFL_TEXTURE_COMPRESSION_S3TC=16,
RFL_FRAMEBUFFER = 32,
RFL_BUFFER_STORAGE = 64,
};
enum TexMode
{
TMF_MASKBIT = 1,
TMF_OPAQUEBIT = 2,
TMF_INVERTBIT = 4,
TM_MODULATE = 0,
TM_MASK = TMF_MASKBIT,
TM_OPAQUE = TMF_OPAQUEBIT,
TM_INVERT = TMF_INVERTBIT,
//TM_INVERTMASK = TMF_MASKBIT | TMF_INVERTBIT
TM_INVERTOPAQUE = TMF_INVERTBIT | TMF_OPAQUEBIT,
};
struct RenderContext
{
unsigned int flags;
unsigned int shadermodel;
unsigned int maxuniforms;
float version;
int max_texturesize;
char * vendorstring;
int MaxLights() const
{
return maxuniforms>=2048? 128:64;
}
};
extern RenderContext gl;
#endif