2000-05-10 20:33:16 +00:00
|
|
|
/*
|
2000-05-11 16:03:29 +00:00
|
|
|
gl_rsurf.c
|
2000-05-10 20:33:16 +00:00
|
|
|
|
2000-05-22 06:58:14 +00:00
|
|
|
surface-related refresh code
|
2000-05-10 20:33:16 +00:00
|
|
|
|
2000-05-11 16:03:29 +00:00
|
|
|
Copyright (C) 1996-1997 Id Software, Inc.
|
2000-05-25 04:16:41 +00:00
|
|
|
Copyright (C) 2000 Joseph Carter <knghtbrd@debian.org>
|
2000-05-10 20:33:16 +00:00
|
|
|
|
2000-05-11 16:03:29 +00:00
|
|
|
This program is free software; you can redistribute it and/or
|
|
|
|
modify it under the terms of the GNU General Public License
|
|
|
|
as published by the Free Software Foundation; either version 2
|
|
|
|
of the License, or (at your option) any later version.
|
2000-05-10 20:33:16 +00:00
|
|
|
|
2000-05-11 16:03:29 +00:00
|
|
|
This program is distributed in the hope that it will be useful,
|
|
|
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
|
2000-05-10 20:33:16 +00:00
|
|
|
|
2000-05-11 16:03:29 +00:00
|
|
|
See the GNU General Public License for more details.
|
|
|
|
|
|
|
|
You should have received a copy of the GNU General Public License
|
|
|
|
along with this program; if not, write to:
|
|
|
|
|
|
|
|
Free Software Foundation, Inc.
|
|
|
|
59 Temple Place - Suite 330
|
|
|
|
Boston, MA 02111-1307, USA
|
|
|
|
|
|
|
|
$Id$
|
2000-05-10 20:33:16 +00:00
|
|
|
*/
|
2000-05-22 06:58:14 +00:00
|
|
|
|
2000-05-17 10:03:19 +00:00
|
|
|
#ifdef HAVE_CONFIG_H
|
2000-05-23 17:05:34 +00:00
|
|
|
# include "config.h"
|
2000-05-17 10:03:19 +00:00
|
|
|
#endif
|
2000-05-23 17:05:34 +00:00
|
|
|
|
2000-05-12 19:17:02 +00:00
|
|
|
#include <stdio.h>
|
2000-05-14 18:08:53 +00:00
|
|
|
#include <string.h>
|
|
|
|
#include <math.h>
|
2000-05-12 19:17:02 +00:00
|
|
|
|
|
|
|
#include "bothdefs.h" // needed by: common.h, net.h, client.h
|
2000-05-21 08:24:45 +00:00
|
|
|
#include "qargs.h"
|
2000-05-12 19:17:02 +00:00
|
|
|
#include "bspfile.h" // needed by: glquake.h
|
|
|
|
#include "vid.h"
|
|
|
|
#include "sys.h"
|
|
|
|
#include "zone.h" // needed by: client.h, gl_model.h
|
|
|
|
#include "mathlib.h" // needed by: protocol.h, render.h, client.h,
|
|
|
|
// modelgen.h, glmodel.h
|
|
|
|
#include "wad.h"
|
|
|
|
#include "draw.h"
|
|
|
|
#include "cvar.h"
|
|
|
|
#include "net.h" // needed by: client.h
|
|
|
|
#include "protocol.h" // needed by: client.h
|
|
|
|
#include "cmd.h"
|
|
|
|
#include "sbar.h"
|
|
|
|
#include "render.h" // needed by: client.h, gl_model.h, glquake.h
|
|
|
|
#include "client.h" // need cls in this file
|
2000-05-13 20:47:01 +00:00
|
|
|
#include "model.h" // needed by: glquake.h
|
2000-05-12 19:17:02 +00:00
|
|
|
#include "console.h"
|
|
|
|
#include "glquake.h"
|
|
|
|
|
|
|
|
extern double realtime;
|
2000-05-10 11:29:38 +00:00
|
|
|
int skytexturenum;
|
|
|
|
|
|
|
|
#ifndef GL_RGBA4
|
|
|
|
#define GL_RGBA4 0
|
|
|
|
#endif
|
|
|
|
|
|
|
|
|
2000-05-25 04:16:41 +00:00
|
|
|
int lightmap_bytes; // 1 or 4
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
int lightmap_textures;
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
unsigned blocklights[18*18*3];
|
2000-05-25 04:16:41 +00:00
|
|
|
|
|
|
|
cvar_t *gl_colorlights;
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
#define BLOCK_WIDTH 128
|
|
|
|
#define BLOCK_HEIGHT 128
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
#define MAX_LIGHTMAPS 256
|
2000-05-10 11:29:38 +00:00
|
|
|
int active_lightmaps;
|
|
|
|
|
|
|
|
typedef struct glRect_s {
|
|
|
|
unsigned char l,t,w,h;
|
|
|
|
} glRect_t;
|
|
|
|
|
|
|
|
glpoly_t *lightmap_polys[MAX_LIGHTMAPS];
|
|
|
|
qboolean lightmap_modified[MAX_LIGHTMAPS];
|
|
|
|
glRect_t lightmap_rectchange[MAX_LIGHTMAPS];
|
|
|
|
|
|
|
|
int allocated[MAX_LIGHTMAPS][BLOCK_WIDTH];
|
|
|
|
|
|
|
|
// the lightmap texture data needs to be kept in
|
|
|
|
// main memory so texsubimage can update properly
|
|
|
|
byte lightmaps[4*MAX_LIGHTMAPS*BLOCK_WIDTH*BLOCK_HEIGHT];
|
|
|
|
|
|
|
|
// For gl_texsort 0
|
|
|
|
msurface_t *skychain = NULL;
|
|
|
|
msurface_t *waterchain = NULL;
|
|
|
|
|
|
|
|
void R_RenderDynamicLightmaps (msurface_t *fa);
|
|
|
|
|
|
|
|
/*
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
R_AddDynamicLights
|
|
|
|
|
|
|
|
LordHavoc's redesigned this function completely
|
2000-05-10 11:29:38 +00:00
|
|
|
*/
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
void
|
|
|
|
R_AddDynamicLights (msurface_t *surf)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
|
|
|
int lnum;
|
|
|
|
int sd, td;
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
float dist, rada, radb;
|
|
|
|
float red, green, blue, brightness;
|
2000-05-10 11:29:38 +00:00
|
|
|
vec3_t impact, local;
|
|
|
|
int s, t;
|
|
|
|
int i;
|
|
|
|
int smax, tmax;
|
|
|
|
mtexinfo_t *tex;
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
unsigned *bl;
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
smax = (surf->extents[0]>>4)+1;
|
|
|
|
tmax = (surf->extents[1]>>4)+1;
|
|
|
|
tex = surf->texinfo;
|
|
|
|
|
|
|
|
for (lnum=0 ; lnum<MAX_DLIGHTS ; lnum++)
|
|
|
|
{
|
|
|
|
if ( !(surf->dlightbits & (1<<lnum) ) )
|
|
|
|
continue; // not lit by this light
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
dist = DotProduct (cl_dlights[lnum].origin, surf->plane->normal)
|
|
|
|
- surf->plane->dist;
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
for (i=0 ; i<3 ; i++)
|
|
|
|
impact[i] = cl_dlights[lnum].origin[i] -
|
|
|
|
surf->plane->normal[i]*dist;
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
local[0] = DotProduct (impact, tex->vecs[0]) + tex->vecs[0][3]
|
|
|
|
- surf->texturemins[0];
|
|
|
|
local[1] = DotProduct (impact, tex->vecs[1]) + tex->vecs[1][3]
|
|
|
|
- surf->texturemins[1];
|
|
|
|
|
|
|
|
s = bound (0, local[0]+0.5, (smax-1)*16);
|
|
|
|
t = bound (0, local[1]+0.5, (tmax-1)*16);
|
|
|
|
|
|
|
|
sd = local[0] - s;
|
|
|
|
td = local[1] - t;
|
|
|
|
dist *= dist;
|
2000-05-10 11:29:38 +00:00
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// get brightest color's value
|
|
|
|
red = cl_dlights[lnum].color[0];
|
|
|
|
green = cl_dlights[lnum].color[1];
|
|
|
|
blue = cl_dlights[lnum].color[2];
|
|
|
|
brightness = max(red, max(green, blue));
|
2000-05-10 11:29:38 +00:00
|
|
|
|
2000-06-04 12:56:28 +00:00
|
|
|
rada = (cl_dlights[lnum].radius * cl_dlights[lnum].radius * 8) / (4.0/16.0); // comparison to min acceptable light
|
|
|
|
if (rada*brightness >= (sd*sd + td*td + dist)) {
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
radb = cl_dlights[lnum].radius
|
|
|
|
* cl_dlights[lnum].radius * (256.0*4.0);
|
|
|
|
bl = blocklights;
|
|
|
|
for (t = 0; t < tmax; t++)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
td = local[1] - t*16;
|
|
|
|
td = td*td + dist;
|
|
|
|
if (rada >= td) // any visible this line?
|
2000-05-25 04:16:41 +00:00
|
|
|
{
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
for (s = 0; s < smax; s++)
|
|
|
|
{
|
|
|
|
sd = local[0] - s*16;
|
2000-06-04 12:56:28 +00:00
|
|
|
if (rada >= (sd*sd+td)) // minimum light
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
{
|
|
|
|
brightness = radb / (sd*sd+td);
|
|
|
|
*bl++ += brightness * red;
|
|
|
|
*bl++ += brightness * green;
|
|
|
|
*bl++ += brightness * blue;
|
|
|
|
} else
|
|
|
|
bl+=3; // skip pixel
|
|
|
|
}
|
|
|
|
} else
|
|
|
|
bl+=smax*3; // skip line
|
2000-05-10 11:29:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
R_BuildLightMap
|
2000-05-10 11:29:38 +00:00
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
Combine and scale multiple lightmaps
|
|
|
|
After talking it over with LordHavoc, I've decided to switch to using
|
|
|
|
GL_RGB for colored lights and averaging them out for plain white
|
|
|
|
lighting if needed. Much cleaner that way. --KB
|
2000-05-10 11:29:38 +00:00
|
|
|
*/
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
void
|
|
|
|
R_BuildLightMap (msurface_t *surf, byte *dest, int stride)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
|
|
|
int smax, tmax;
|
|
|
|
int t;
|
|
|
|
int i, j, size;
|
|
|
|
byte *lightmap;
|
|
|
|
unsigned scale;
|
|
|
|
int maps;
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
float t2;
|
|
|
|
unsigned *bl;
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
surf->cached_dlight = (surf->dlightframe == r_framecount);
|
|
|
|
|
|
|
|
smax = (surf->extents[0]>>4)+1;
|
|
|
|
tmax = (surf->extents[1]>>4)+1;
|
|
|
|
size = smax*tmax;
|
|
|
|
lightmap = surf->samples;
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// set to full bright if no light data
|
|
|
|
if (/*r_fullbright->value ||*/ !cl.worldmodel->lightdata)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
bl = blocklights;
|
2000-05-10 11:29:38 +00:00
|
|
|
for (i=0 ; i<size ; i++)
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
{
|
|
|
|
*bl++ = 255*256;
|
|
|
|
*bl++ = 255*256;
|
|
|
|
*bl++ = 255*256;
|
|
|
|
}
|
2000-05-10 11:29:38 +00:00
|
|
|
goto store;
|
|
|
|
}
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// clear to no light
|
|
|
|
bl = blocklights;
|
2000-05-10 11:29:38 +00:00
|
|
|
for (i=0 ; i<size ; i++)
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
{
|
|
|
|
*bl++ = 0;
|
|
|
|
*bl++ = 0;
|
|
|
|
*bl++ = 0;
|
|
|
|
}
|
|
|
|
bl = blocklights;
|
2000-05-10 11:29:38 +00:00
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// add all the lightmaps
|
2000-05-10 11:29:38 +00:00
|
|
|
if (lightmap)
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
for (maps = 0;
|
|
|
|
maps < MAXLIGHTMAPS && surf->styles[maps] != 255;
|
|
|
|
maps++)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
|
|
|
scale = d_lightstylevalue[surf->styles[maps]];
|
|
|
|
surf->cached_light[maps] = scale; // 8.8 fraction
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
bl = blocklights;
|
2000-05-10 11:29:38 +00:00
|
|
|
for (i=0 ; i<size ; i++)
|
2000-05-25 04:16:41 +00:00
|
|
|
{
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
*bl++ += *lightmap++ * scale;
|
|
|
|
*bl++ += *lightmap++ * scale;
|
|
|
|
*bl++ += *lightmap++ * scale;
|
2000-05-25 04:16:41 +00:00
|
|
|
}
|
2000-05-10 11:29:38 +00:00
|
|
|
}
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// add all the dynamic lights
|
2000-05-10 11:29:38 +00:00
|
|
|
if (surf->dlightframe == r_framecount)
|
|
|
|
R_AddDynamicLights (surf);
|
|
|
|
|
|
|
|
store:
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// bound and shift
|
2000-05-25 04:16:41 +00:00
|
|
|
if (gl_colorlights->value)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
stride -= smax * 3;
|
|
|
|
bl = blocklights;
|
|
|
|
for (i = 0; i < tmax; i++, dest += stride)
|
2000-05-10 11:29:38 +00:00
|
|
|
for (j=0 ; j<smax ; j++)
|
|
|
|
{
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
t = (int) *bl++ >> 8;
|
|
|
|
*dest++ = bound(0, t, 255);
|
|
|
|
t = (int) *bl++ >> 8;
|
|
|
|
*dest++ = bound(0, t, 255);
|
|
|
|
t = (int) *bl++ >> 8;
|
|
|
|
*dest++ = bound(0, t, 255);
|
2000-05-10 11:29:38 +00:00
|
|
|
}
|
2000-05-25 04:16:41 +00:00
|
|
|
} else {
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
stride -= smax;
|
|
|
|
bl = blocklights;
|
|
|
|
for (i = 0; i < tmax; i++, dest += stride)
|
2000-05-10 11:29:38 +00:00
|
|
|
for (j=0 ; j<smax ; j++)
|
|
|
|
{
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
t = (int) *bl++ >> 8;
|
|
|
|
t2 = bound(0, t, 255);
|
|
|
|
t = (int) *bl++ >> 8;
|
|
|
|
t2 += bound(0, t, 255);
|
|
|
|
t = (int) *bl++ >> 8;
|
|
|
|
t2 += bound(0, t, 255);
|
|
|
|
t2 *= (1.0/3.0);
|
|
|
|
*dest++ = t2;
|
2000-05-10 11:29:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
===============
|
|
|
|
R_TextureAnimation
|
|
|
|
|
|
|
|
Returns the proper texture for a given time and base texture
|
|
|
|
===============
|
|
|
|
*/
|
|
|
|
texture_t *R_TextureAnimation (texture_t *base)
|
|
|
|
{
|
|
|
|
int reletive;
|
|
|
|
int count;
|
|
|
|
|
|
|
|
if (currententity->frame)
|
|
|
|
{
|
|
|
|
if (base->alternate_anims)
|
|
|
|
base = base->alternate_anims;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!base->anim_total)
|
|
|
|
return base;
|
|
|
|
|
|
|
|
reletive = (int)(cl.time*10) % base->anim_total;
|
|
|
|
|
|
|
|
count = 0;
|
|
|
|
while (base->anim_min > reletive || base->anim_max <= reletive)
|
|
|
|
{
|
|
|
|
base = base->anim_next;
|
|
|
|
if (!base)
|
|
|
|
Sys_Error ("R_TextureAnimation: broken cycle");
|
|
|
|
if (++count > 100)
|
|
|
|
Sys_Error ("R_TextureAnimation: infinite cycle");
|
|
|
|
}
|
|
|
|
|
|
|
|
return base;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
=============================================================
|
|
|
|
|
|
|
|
BRUSH MODELS
|
|
|
|
|
|
|
|
=============================================================
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
|
|
extern int solidskytexture;
|
|
|
|
extern int alphaskytexture;
|
|
|
|
extern float speedscale; // for top sky and bottom sky
|
|
|
|
|
|
|
|
void DrawGLWaterPoly (glpoly_t *p);
|
|
|
|
void DrawGLWaterPolyLightmap (glpoly_t *p);
|
|
|
|
|
2000-06-09 21:34:21 +00:00
|
|
|
lpMTexFUNC qglMTexCoord2f = NULL;
|
|
|
|
lpSelTexFUNC qglSelectTexture = NULL;
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
qboolean mtexenabled = false;
|
|
|
|
|
|
|
|
void GL_SelectTexture (GLenum target);
|
|
|
|
|
|
|
|
void GL_DisableMultitexture(void)
|
|
|
|
{
|
|
|
|
if (mtexenabled) {
|
|
|
|
glDisable(GL_TEXTURE_2D);
|
2000-06-09 21:34:21 +00:00
|
|
|
GL_SelectTexture(0);
|
2000-05-10 11:29:38 +00:00
|
|
|
mtexenabled = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void GL_EnableMultitexture(void)
|
|
|
|
{
|
|
|
|
if (gl_mtexable) {
|
2000-06-09 21:34:21 +00:00
|
|
|
GL_SelectTexture(1);
|
2000-05-10 11:29:38 +00:00
|
|
|
glEnable(GL_TEXTURE_2D);
|
|
|
|
mtexenabled = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
================
|
|
|
|
R_DrawSequentialPoly
|
|
|
|
|
|
|
|
Systems that have fast state and texture changes can
|
|
|
|
just do everything as it passes with no need to sort
|
|
|
|
================
|
|
|
|
*/
|
|
|
|
void R_DrawSequentialPoly (msurface_t *s)
|
|
|
|
{
|
|
|
|
glpoly_t *p;
|
|
|
|
float *v;
|
|
|
|
int i;
|
|
|
|
texture_t *t;
|
2000-06-03 22:32:53 +00:00
|
|
|
vec3_t nv;
|
|
|
|
glRect_t *theRect;
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
//
|
|
|
|
// normal lightmaped poly
|
|
|
|
//
|
|
|
|
|
|
|
|
if (! (s->flags & (SURF_DRAWSKY|SURF_DRAWTURB|SURF_UNDERWATER) ) )
|
|
|
|
{
|
|
|
|
R_RenderDynamicLightmaps (s);
|
2000-06-03 22:32:53 +00:00
|
|
|
if (gl_mtexable)
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
{
|
2000-06-03 22:32:53 +00:00
|
|
|
p = s->polys;
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
t = R_TextureAnimation (s->texinfo->texture);
|
|
|
|
// Binds world to texture env 0
|
2000-06-09 21:34:21 +00:00
|
|
|
GL_SelectTexture(0);
|
2000-05-10 11:29:38 +00:00
|
|
|
GL_Bind (t->gl_texturenum);
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE);
|
2000-05-10 11:29:38 +00:00
|
|
|
// Binds lightmap to texenv 1
|
|
|
|
GL_EnableMultitexture(); // Same as SelectTexture (TEXTURE1)
|
|
|
|
GL_Bind (lightmap_textures + s->lightmaptexturenum);
|
|
|
|
i = s->lightmaptexturenum;
|
|
|
|
if (lightmap_modified[i])
|
|
|
|
{
|
|
|
|
lightmap_modified[i] = false;
|
|
|
|
theRect = &lightmap_rectchange[i];
|
|
|
|
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, theRect->t,
|
|
|
|
BLOCK_WIDTH, theRect->h, gl_lightmap_format, GL_UNSIGNED_BYTE,
|
|
|
|
lightmaps+(i* BLOCK_HEIGHT + theRect->t) *BLOCK_WIDTH*lightmap_bytes);
|
|
|
|
theRect->l = BLOCK_WIDTH;
|
|
|
|
theRect->t = BLOCK_HEIGHT;
|
|
|
|
theRect->h = 0;
|
|
|
|
theRect->w = 0;
|
|
|
|
}
|
2000-06-06 11:34:50 +00:00
|
|
|
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
|
2000-05-10 11:29:38 +00:00
|
|
|
glBegin(GL_POLYGON);
|
|
|
|
v = p->verts[0];
|
|
|
|
for (i=0 ; i<p->numverts ; i++, v+= VERTEXSIZE)
|
|
|
|
{
|
2000-06-09 21:34:21 +00:00
|
|
|
qglMTexCoord2f (gl_mtex_enum + 0, v[3], v[4]);
|
|
|
|
qglMTexCoord2f (gl_mtex_enum + 1, v[5], v[6]);
|
2000-05-10 11:29:38 +00:00
|
|
|
glVertex3fv (v);
|
|
|
|
}
|
|
|
|
glEnd ();
|
2000-06-03 22:32:53 +00:00
|
|
|
return;
|
2000-05-10 11:29:38 +00:00
|
|
|
} else {
|
|
|
|
p = s->polys;
|
|
|
|
|
|
|
|
t = R_TextureAnimation (s->texinfo->texture);
|
|
|
|
GL_Bind (t->gl_texturenum);
|
|
|
|
glBegin (GL_POLYGON);
|
|
|
|
v = p->verts[0];
|
|
|
|
for (i=0 ; i<p->numverts ; i++, v+= VERTEXSIZE)
|
|
|
|
{
|
|
|
|
glTexCoord2f (v[3], v[4]);
|
|
|
|
glVertex3fv (v);
|
|
|
|
}
|
|
|
|
glEnd ();
|
|
|
|
|
|
|
|
GL_Bind (lightmap_textures + s->lightmaptexturenum);
|
|
|
|
glBegin (GL_POLYGON);
|
|
|
|
v = p->verts[0];
|
|
|
|
for (i=0 ; i<p->numverts ; i++, v+= VERTEXSIZE)
|
|
|
|
{
|
|
|
|
glTexCoord2f (v[5], v[6]);
|
|
|
|
glVertex3fv (v);
|
|
|
|
}
|
|
|
|
glEnd ();
|
|
|
|
}
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// subdivided water surface warp
|
|
|
|
//
|
|
|
|
|
|
|
|
if (s->flags & SURF_DRAWTURB)
|
|
|
|
{
|
|
|
|
GL_DisableMultitexture();
|
|
|
|
GL_Bind (s->texinfo->texture->gl_texturenum);
|
|
|
|
EmitWaterPolys (s);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// subdivided sky warp
|
|
|
|
//
|
|
|
|
if (s->flags & SURF_DRAWSKY)
|
|
|
|
{
|
|
|
|
GL_DisableMultitexture();
|
|
|
|
GL_Bind (solidskytexture);
|
|
|
|
speedscale = realtime*8;
|
|
|
|
speedscale -= (int)speedscale & ~127;
|
|
|
|
|
|
|
|
EmitSkyPolys (s);
|
|
|
|
|
|
|
|
GL_Bind (alphaskytexture);
|
|
|
|
speedscale = realtime*16;
|
|
|
|
speedscale -= (int)speedscale & ~127;
|
|
|
|
EmitSkyPolys (s);
|
|
|
|
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// underwater warped with lightmap
|
|
|
|
//
|
|
|
|
R_RenderDynamicLightmaps (s);
|
2000-06-03 22:32:53 +00:00
|
|
|
if (gl_mtexable)
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
{
|
2000-06-03 22:32:53 +00:00
|
|
|
p = s->polys;
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
t = R_TextureAnimation (s->texinfo->texture);
|
2000-06-09 21:34:21 +00:00
|
|
|
GL_SelectTexture(0);
|
2000-05-10 11:29:38 +00:00
|
|
|
GL_Bind (t->gl_texturenum);
|
|
|
|
GL_EnableMultitexture();
|
|
|
|
GL_Bind (lightmap_textures + s->lightmaptexturenum);
|
|
|
|
i = s->lightmaptexturenum;
|
|
|
|
if (lightmap_modified[i])
|
|
|
|
{
|
|
|
|
lightmap_modified[i] = false;
|
|
|
|
theRect = &lightmap_rectchange[i];
|
|
|
|
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, theRect->t,
|
|
|
|
BLOCK_WIDTH, theRect->h, gl_lightmap_format, GL_UNSIGNED_BYTE,
|
|
|
|
lightmaps+(i* BLOCK_HEIGHT + theRect->t) *BLOCK_WIDTH*lightmap_bytes);
|
|
|
|
theRect->l = BLOCK_WIDTH;
|
|
|
|
theRect->t = BLOCK_HEIGHT;
|
|
|
|
theRect->h = 0;
|
|
|
|
theRect->w = 0;
|
|
|
|
}
|
2000-06-07 07:23:23 +00:00
|
|
|
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
|
2000-05-10 11:29:38 +00:00
|
|
|
glBegin (GL_TRIANGLE_FAN);
|
|
|
|
v = p->verts[0];
|
|
|
|
for (i=0 ; i<p->numverts ; i++, v+= VERTEXSIZE)
|
|
|
|
{
|
2000-06-09 21:34:21 +00:00
|
|
|
qglMTexCoord2f (gl_mtex_enum + 0, v[3], v[4]);
|
|
|
|
qglMTexCoord2f (gl_mtex_enum + 1, v[5], v[6]);
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
nv[0] = v[0] + 8*sin(v[1]*0.05+realtime)*sin(v[2]*0.05+realtime);
|
|
|
|
nv[1] = v[1] + 8*sin(v[0]*0.05+realtime)*sin(v[2]*0.05+realtime);
|
|
|
|
nv[2] = v[2];
|
|
|
|
|
|
|
|
glVertex3fv (nv);
|
|
|
|
}
|
|
|
|
glEnd ();
|
|
|
|
} else {
|
|
|
|
p = s->polys;
|
|
|
|
|
|
|
|
t = R_TextureAnimation (s->texinfo->texture);
|
|
|
|
GL_Bind (t->gl_texturenum);
|
|
|
|
DrawGLWaterPoly (p);
|
|
|
|
|
|
|
|
GL_Bind (lightmap_textures + s->lightmaptexturenum);
|
|
|
|
DrawGLWaterPolyLightmap (p);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
================
|
|
|
|
DrawGLWaterPoly
|
|
|
|
|
|
|
|
Warp the vertex coordinates
|
|
|
|
================
|
|
|
|
*/
|
|
|
|
void DrawGLWaterPoly (glpoly_t *p)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
float *v;
|
|
|
|
vec3_t nv;
|
|
|
|
|
|
|
|
GL_DisableMultitexture();
|
|
|
|
|
|
|
|
glBegin (GL_TRIANGLE_FAN);
|
|
|
|
v = p->verts[0];
|
|
|
|
for (i=0 ; i<p->numverts ; i++, v+= VERTEXSIZE)
|
|
|
|
{
|
|
|
|
glTexCoord2f (v[3], v[4]);
|
|
|
|
|
|
|
|
nv[0] = v[0] + 8*sin(v[1]*0.05+realtime)*sin(v[2]*0.05+realtime);
|
|
|
|
nv[1] = v[1] + 8*sin(v[0]*0.05+realtime)*sin(v[2]*0.05+realtime);
|
|
|
|
nv[2] = v[2];
|
|
|
|
|
|
|
|
glVertex3fv (nv);
|
|
|
|
}
|
|
|
|
glEnd ();
|
|
|
|
}
|
|
|
|
|
|
|
|
void DrawGLWaterPolyLightmap (glpoly_t *p)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
float *v;
|
|
|
|
vec3_t nv;
|
|
|
|
|
|
|
|
GL_DisableMultitexture();
|
|
|
|
|
|
|
|
glBegin (GL_TRIANGLE_FAN);
|
|
|
|
v = p->verts[0];
|
|
|
|
for (i=0 ; i<p->numverts ; i++, v+= VERTEXSIZE)
|
|
|
|
{
|
|
|
|
glTexCoord2f (v[5], v[6]);
|
|
|
|
|
|
|
|
nv[0] = v[0] + 8*sin(v[1]*0.05+realtime)*sin(v[2]*0.05+realtime);
|
|
|
|
nv[1] = v[1] + 8*sin(v[0]*0.05+realtime)*sin(v[2]*0.05+realtime);
|
|
|
|
nv[2] = v[2];
|
|
|
|
|
|
|
|
glVertex3fv (nv);
|
|
|
|
}
|
|
|
|
glEnd ();
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
================
|
|
|
|
DrawGLPoly
|
|
|
|
================
|
|
|
|
*/
|
|
|
|
void DrawGLPoly (glpoly_t *p)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
float *v;
|
|
|
|
|
|
|
|
glBegin (GL_POLYGON);
|
|
|
|
v = p->verts[0];
|
|
|
|
for (i=0 ; i<p->numverts ; i++, v+= VERTEXSIZE)
|
|
|
|
{
|
|
|
|
glTexCoord2f (v[3], v[4]);
|
|
|
|
glVertex3fv (v);
|
|
|
|
}
|
|
|
|
glEnd ();
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
================
|
|
|
|
R_BlendLightmaps
|
|
|
|
================
|
|
|
|
*/
|
|
|
|
void R_BlendLightmaps (void)
|
|
|
|
{
|
|
|
|
int i, j;
|
|
|
|
glpoly_t *p;
|
|
|
|
float *v;
|
|
|
|
glRect_t *theRect;
|
|
|
|
|
2000-05-16 04:47:41 +00:00
|
|
|
if (!gl_texsort->value)
|
2000-05-10 11:29:38 +00:00
|
|
|
return;
|
|
|
|
|
|
|
|
glDepthMask (0); // don't bother writing Z
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
glBlendFunc (GL_ZERO, GL_SRC_COLOR);
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
for (i=0 ; i<MAX_LIGHTMAPS ; i++)
|
|
|
|
{
|
|
|
|
p = lightmap_polys[i];
|
|
|
|
if (!p)
|
|
|
|
continue;
|
|
|
|
GL_Bind(lightmap_textures+i);
|
|
|
|
if (lightmap_modified[i])
|
|
|
|
{
|
|
|
|
lightmap_modified[i] = false;
|
|
|
|
theRect = &lightmap_rectchange[i];
|
|
|
|
// theRect->l = 0;
|
|
|
|
// theRect->t = 0;
|
|
|
|
// theRect->w = BLOCK_WIDTH;
|
|
|
|
// theRect->h = BLOCK_HEIGHT;
|
|
|
|
// glTexImage2D (GL_TEXTURE_2D, 0, lightmap_bytes
|
|
|
|
// , BLOCK_WIDTH, BLOCK_HEIGHT, 0,
|
|
|
|
// gl_lightmap_format, GL_UNSIGNED_BYTE, lightmaps+i*BLOCK_WIDTH*BLOCK_HEIGHT*lightmap_bytes);
|
|
|
|
// glTexImage2D (GL_TEXTURE_2D, 0, lightmap_bytes
|
|
|
|
// , BLOCK_WIDTH, theRect->h, 0,
|
|
|
|
// gl_lightmap_format, GL_UNSIGNED_BYTE, lightmaps+(i*BLOCK_HEIGHT+theRect->t)*BLOCK_WIDTH*lightmap_bytes);
|
|
|
|
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, theRect->t,
|
|
|
|
BLOCK_WIDTH, theRect->h, gl_lightmap_format, GL_UNSIGNED_BYTE,
|
|
|
|
lightmaps+(i* BLOCK_HEIGHT + theRect->t) *BLOCK_WIDTH*lightmap_bytes);
|
|
|
|
theRect->l = BLOCK_WIDTH;
|
|
|
|
theRect->t = BLOCK_HEIGHT;
|
|
|
|
theRect->h = 0;
|
|
|
|
theRect->w = 0;
|
|
|
|
}
|
|
|
|
for ( ; p ; p=p->chain)
|
|
|
|
{
|
|
|
|
// if (p->flags & SURF_UNDERWATER)
|
|
|
|
// DrawGLWaterPolyLightmap (p);
|
|
|
|
if (((r_viewleaf->contents==CONTENTS_EMPTY && (p->flags & SURF_UNDERWATER)) ||
|
|
|
|
(r_viewleaf->contents!=CONTENTS_EMPTY && !(p->flags & SURF_UNDERWATER)))
|
|
|
|
&& !(p->flags & SURF_DONTWARP))
|
|
|
|
DrawGLWaterPolyLightmap (p);
|
|
|
|
else
|
|
|
|
{
|
|
|
|
glBegin (GL_POLYGON);
|
|
|
|
v = p->verts[0];
|
|
|
|
for (j=0 ; j<p->numverts ; j++, v+= VERTEXSIZE)
|
|
|
|
{
|
|
|
|
glTexCoord2f (v[5], v[6]);
|
|
|
|
glVertex3fv (v);
|
|
|
|
}
|
|
|
|
glEnd ();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2000-05-25 04:16:41 +00:00
|
|
|
// Return to normal blending --KB
|
|
|
|
glBlendFunc (GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
glDepthMask (1); // back to normal Z buffering
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
================
|
|
|
|
R_RenderBrushPoly
|
|
|
|
================
|
|
|
|
*/
|
|
|
|
void R_RenderBrushPoly (msurface_t *fa)
|
|
|
|
{
|
|
|
|
texture_t *t;
|
|
|
|
byte *base;
|
|
|
|
int maps;
|
|
|
|
glRect_t *theRect;
|
|
|
|
int smax, tmax;
|
|
|
|
|
|
|
|
c_brush_polys++;
|
|
|
|
|
|
|
|
if (fa->flags & SURF_DRAWSKY)
|
|
|
|
{ // warp texture, no lightmaps
|
|
|
|
EmitBothSkyLayers (fa);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
t = R_TextureAnimation (fa->texinfo->texture);
|
|
|
|
GL_Bind (t->gl_texturenum);
|
|
|
|
|
|
|
|
if (fa->flags & SURF_DRAWTURB)
|
|
|
|
{ // warp texture, no lightmaps
|
|
|
|
EmitWaterPolys (fa);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (((r_viewleaf->contents==CONTENTS_EMPTY && (fa->flags & SURF_UNDERWATER)) ||
|
|
|
|
(r_viewleaf->contents!=CONTENTS_EMPTY && !(fa->flags & SURF_UNDERWATER)))
|
|
|
|
&& !(fa->flags & SURF_DONTWARP))
|
|
|
|
DrawGLWaterPoly (fa->polys);
|
|
|
|
else
|
|
|
|
DrawGLPoly (fa->polys);
|
|
|
|
|
|
|
|
// add the poly to the proper lightmap chain
|
|
|
|
|
|
|
|
fa->polys->chain = lightmap_polys[fa->lightmaptexturenum];
|
|
|
|
lightmap_polys[fa->lightmaptexturenum] = fa->polys;
|
|
|
|
|
|
|
|
// check for lightmap modification
|
|
|
|
for (maps = 0 ; maps < MAXLIGHTMAPS && fa->styles[maps] != 255 ;
|
|
|
|
maps++)
|
|
|
|
if (d_lightstylevalue[fa->styles[maps]] != fa->cached_light[maps])
|
|
|
|
goto dynamic;
|
|
|
|
|
|
|
|
if (fa->dlightframe == r_framecount // dynamic this frame
|
|
|
|
|| fa->cached_dlight) // dynamic previously
|
|
|
|
{
|
|
|
|
dynamic:
|
2000-05-16 04:47:41 +00:00
|
|
|
if (r_dynamic->value)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
|
|
|
lightmap_modified[fa->lightmaptexturenum] = true;
|
|
|
|
theRect = &lightmap_rectchange[fa->lightmaptexturenum];
|
|
|
|
if (fa->light_t < theRect->t) {
|
|
|
|
if (theRect->h)
|
|
|
|
theRect->h += theRect->t - fa->light_t;
|
|
|
|
theRect->t = fa->light_t;
|
|
|
|
}
|
|
|
|
if (fa->light_s < theRect->l) {
|
|
|
|
if (theRect->w)
|
|
|
|
theRect->w += theRect->l - fa->light_s;
|
|
|
|
theRect->l = fa->light_s;
|
|
|
|
}
|
|
|
|
smax = (fa->extents[0]>>4)+1;
|
|
|
|
tmax = (fa->extents[1]>>4)+1;
|
|
|
|
if ((theRect->w + theRect->l) < (fa->light_s + smax))
|
|
|
|
theRect->w = (fa->light_s-theRect->l)+smax;
|
|
|
|
if ((theRect->h + theRect->t) < (fa->light_t + tmax))
|
|
|
|
theRect->h = (fa->light_t-theRect->t)+tmax;
|
|
|
|
base = lightmaps + fa->lightmaptexturenum*lightmap_bytes*BLOCK_WIDTH*BLOCK_HEIGHT;
|
|
|
|
base += fa->light_t * BLOCK_WIDTH * lightmap_bytes + fa->light_s * lightmap_bytes;
|
|
|
|
R_BuildLightMap (fa, base, BLOCK_WIDTH*lightmap_bytes);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
================
|
|
|
|
R_RenderDynamicLightmaps
|
|
|
|
Multitexture
|
|
|
|
================
|
|
|
|
*/
|
|
|
|
void R_RenderDynamicLightmaps (msurface_t *fa)
|
|
|
|
{
|
|
|
|
byte *base;
|
|
|
|
int maps;
|
|
|
|
glRect_t *theRect;
|
|
|
|
int smax, tmax;
|
|
|
|
|
|
|
|
c_brush_polys++;
|
|
|
|
|
|
|
|
if (fa->flags & ( SURF_DRAWSKY | SURF_DRAWTURB) )
|
|
|
|
return;
|
|
|
|
|
|
|
|
fa->polys->chain = lightmap_polys[fa->lightmaptexturenum];
|
|
|
|
lightmap_polys[fa->lightmaptexturenum] = fa->polys;
|
|
|
|
|
|
|
|
// check for lightmap modification
|
|
|
|
for (maps = 0 ; maps < MAXLIGHTMAPS && fa->styles[maps] != 255 ;
|
|
|
|
maps++)
|
|
|
|
if (d_lightstylevalue[fa->styles[maps]] != fa->cached_light[maps])
|
|
|
|
goto dynamic;
|
|
|
|
|
|
|
|
if (fa->dlightframe == r_framecount // dynamic this frame
|
|
|
|
|| fa->cached_dlight) // dynamic previously
|
|
|
|
{
|
|
|
|
dynamic:
|
2000-05-16 04:47:41 +00:00
|
|
|
if (r_dynamic->value)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
|
|
|
lightmap_modified[fa->lightmaptexturenum] = true;
|
|
|
|
theRect = &lightmap_rectchange[fa->lightmaptexturenum];
|
|
|
|
if (fa->light_t < theRect->t) {
|
|
|
|
if (theRect->h)
|
|
|
|
theRect->h += theRect->t - fa->light_t;
|
|
|
|
theRect->t = fa->light_t;
|
|
|
|
}
|
|
|
|
if (fa->light_s < theRect->l) {
|
|
|
|
if (theRect->w)
|
|
|
|
theRect->w += theRect->l - fa->light_s;
|
|
|
|
theRect->l = fa->light_s;
|
|
|
|
}
|
|
|
|
smax = (fa->extents[0]>>4)+1;
|
|
|
|
tmax = (fa->extents[1]>>4)+1;
|
|
|
|
if ((theRect->w + theRect->l) < (fa->light_s + smax))
|
|
|
|
theRect->w = (fa->light_s-theRect->l)+smax;
|
|
|
|
if ((theRect->h + theRect->t) < (fa->light_t + tmax))
|
|
|
|
theRect->h = (fa->light_t-theRect->t)+tmax;
|
|
|
|
base = lightmaps + fa->lightmaptexturenum*lightmap_bytes*BLOCK_WIDTH*BLOCK_HEIGHT;
|
|
|
|
base += fa->light_t * BLOCK_WIDTH * lightmap_bytes + fa->light_s * lightmap_bytes;
|
|
|
|
R_BuildLightMap (fa, base, BLOCK_WIDTH*lightmap_bytes);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
================
|
|
|
|
R_MirrorChain
|
|
|
|
================
|
|
|
|
*/
|
|
|
|
void R_MirrorChain (msurface_t *s)
|
|
|
|
{
|
|
|
|
if (mirror)
|
|
|
|
return;
|
|
|
|
mirror = true;
|
|
|
|
mirror_plane = s->plane;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
================
|
|
|
|
R_DrawWaterSurfaces
|
|
|
|
================
|
|
|
|
*/
|
|
|
|
void R_DrawWaterSurfaces (void)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
msurface_t *s;
|
|
|
|
texture_t *t;
|
|
|
|
|
2000-05-16 04:47:41 +00:00
|
|
|
if (r_wateralpha->value == 1.0 && gl_texsort->value)
|
2000-05-10 11:29:38 +00:00
|
|
|
return;
|
|
|
|
|
|
|
|
//
|
|
|
|
// go back to the world matrix
|
|
|
|
//
|
|
|
|
|
|
|
|
glLoadMatrixf (r_world_matrix);
|
|
|
|
|
2000-05-16 04:47:41 +00:00
|
|
|
if (r_wateralpha->value < 1.0) {
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
glColor4f (0.5, 0.5, 0.5, r_wateralpha->value);
|
2000-05-10 11:29:38 +00:00
|
|
|
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
|
2000-06-09 21:34:21 +00:00
|
|
|
} else
|
|
|
|
glColor3f (0.5, 0.5, 0.5);
|
2000-05-10 11:29:38 +00:00
|
|
|
|
2000-05-16 04:47:41 +00:00
|
|
|
if (!gl_texsort->value) {
|
2000-05-10 11:29:38 +00:00
|
|
|
if (!waterchain)
|
|
|
|
return;
|
|
|
|
|
|
|
|
for ( s = waterchain ; s ; s=s->texturechain) {
|
|
|
|
GL_Bind (s->texinfo->texture->gl_texturenum);
|
|
|
|
EmitWaterPolys (s);
|
|
|
|
}
|
|
|
|
|
|
|
|
waterchain = NULL;
|
|
|
|
} else {
|
|
|
|
|
|
|
|
for (i=0 ; i<cl.worldmodel->numtextures ; i++)
|
|
|
|
{
|
|
|
|
t = cl.worldmodel->textures[i];
|
|
|
|
if (!t)
|
|
|
|
continue;
|
|
|
|
s = t->texturechain;
|
|
|
|
if (!s)
|
|
|
|
continue;
|
|
|
|
if ( !(s->flags & SURF_DRAWTURB ) )
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// set modulate mode explicitly
|
|
|
|
|
|
|
|
GL_Bind (t->gl_texturenum);
|
|
|
|
|
|
|
|
for ( ; s ; s=s->texturechain)
|
|
|
|
EmitWaterPolys (s);
|
|
|
|
|
|
|
|
t->texturechain = NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
if (r_wateralpha->value < 1.0)
|
|
|
|
glColor3f (0.5, 0.5, 0.5);
|
2000-05-10 11:29:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
================
|
|
|
|
DrawTextureChains
|
|
|
|
================
|
|
|
|
*/
|
|
|
|
void DrawTextureChains (void)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
msurface_t *s;
|
|
|
|
texture_t *t;
|
|
|
|
|
2000-05-16 04:47:41 +00:00
|
|
|
if (!gl_texsort->value) {
|
2000-05-10 11:29:38 +00:00
|
|
|
GL_DisableMultitexture();
|
|
|
|
|
2000-06-03 22:47:05 +00:00
|
|
|
if (skychain)
|
|
|
|
{
|
2000-05-10 11:29:38 +00:00
|
|
|
R_DrawSkyChain(skychain);
|
|
|
|
skychain = NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (i=0 ; i<cl.worldmodel->numtextures ; i++)
|
|
|
|
{
|
|
|
|
t = cl.worldmodel->textures[i];
|
|
|
|
if (!t)
|
|
|
|
continue;
|
|
|
|
s = t->texturechain;
|
|
|
|
if (!s)
|
|
|
|
continue;
|
|
|
|
if (i == skytexturenum)
|
|
|
|
R_DrawSkyChain (s);
|
2000-05-16 04:47:41 +00:00
|
|
|
else if (i == mirrortexturenum && r_mirroralpha->value != 1.0)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
|
|
|
R_MirrorChain (s);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
2000-05-16 04:47:41 +00:00
|
|
|
if ((s->flags & SURF_DRAWTURB) && r_wateralpha->value != 1.0)
|
2000-05-10 11:29:38 +00:00
|
|
|
continue; // draw translucent water later
|
|
|
|
for ( ; s ; s=s->texturechain)
|
|
|
|
R_RenderBrushPoly (s);
|
|
|
|
}
|
|
|
|
|
|
|
|
t->texturechain = NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
=================
|
|
|
|
R_DrawBrushModel
|
|
|
|
=================
|
|
|
|
*/
|
|
|
|
void R_DrawBrushModel (entity_t *e)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
int k;
|
|
|
|
vec3_t mins, maxs;
|
|
|
|
msurface_t *psurf;
|
|
|
|
float dot;
|
|
|
|
mplane_t *pplane;
|
|
|
|
model_t *clmodel;
|
|
|
|
qboolean rotated;
|
|
|
|
|
|
|
|
currententity = e;
|
|
|
|
currenttexture = -1;
|
|
|
|
|
|
|
|
clmodel = e->model;
|
|
|
|
|
|
|
|
if (e->angles[0] || e->angles[1] || e->angles[2])
|
|
|
|
{
|
|
|
|
rotated = true;
|
|
|
|
for (i=0 ; i<3 ; i++)
|
|
|
|
{
|
|
|
|
mins[i] = e->origin[i] - clmodel->radius;
|
|
|
|
maxs[i] = e->origin[i] + clmodel->radius;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
rotated = false;
|
|
|
|
VectorAdd (e->origin, clmodel->mins, mins);
|
|
|
|
VectorAdd (e->origin, clmodel->maxs, maxs);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (R_CullBox (mins, maxs))
|
|
|
|
return;
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
glColor3f (1.0, 1.0, 1.0);
|
2000-05-10 11:29:38 +00:00
|
|
|
memset (lightmap_polys, 0, sizeof(lightmap_polys));
|
|
|
|
|
|
|
|
VectorSubtract (r_refdef.vieworg, e->origin, modelorg);
|
|
|
|
if (rotated)
|
|
|
|
{
|
|
|
|
vec3_t temp;
|
|
|
|
vec3_t forward, right, up;
|
|
|
|
|
|
|
|
VectorCopy (modelorg, temp);
|
|
|
|
AngleVectors (e->angles, forward, right, up);
|
|
|
|
modelorg[0] = DotProduct (temp, forward);
|
|
|
|
modelorg[1] = -DotProduct (temp, right);
|
|
|
|
modelorg[2] = DotProduct (temp, up);
|
|
|
|
}
|
|
|
|
|
|
|
|
psurf = &clmodel->surfaces[clmodel->firstmodelsurface];
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// calculate dynamic lighting for bmodel if it's not an
|
|
|
|
// instanced model
|
2000-05-16 04:47:41 +00:00
|
|
|
if (clmodel->firstmodelsurface != 0 && !gl_flashblend->value)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
|
|
|
for (k=0 ; k<MAX_DLIGHTS ; k++)
|
|
|
|
{
|
|
|
|
if ((cl_dlights[k].die < cl.time) ||
|
|
|
|
(!cl_dlights[k].radius))
|
|
|
|
continue;
|
|
|
|
|
|
|
|
R_MarkLights (&cl_dlights[k], 1<<k,
|
|
|
|
clmodel->nodes + clmodel->hulls[0].firstclipnode);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
glPushMatrix ();
|
|
|
|
e->angles[0] = -e->angles[0]; // stupid quake bug
|
2000-05-10 11:29:38 +00:00
|
|
|
R_RotateForEntity (e);
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
e->angles[0] = -e->angles[0]; // stupid quake bug
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
//
|
|
|
|
// draw texture
|
|
|
|
//
|
|
|
|
for (i=0 ; i<clmodel->nummodelsurfaces ; i++, psurf++)
|
|
|
|
{
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// find which side of the node we are on
|
2000-05-10 11:29:38 +00:00
|
|
|
pplane = psurf->plane;
|
|
|
|
|
|
|
|
dot = DotProduct (modelorg, pplane->normal) - pplane->dist;
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// draw the polygon
|
2000-05-10 11:29:38 +00:00
|
|
|
if (((psurf->flags & SURF_PLANEBACK) && (dot < -BACKFACE_EPSILON)) ||
|
|
|
|
(!(psurf->flags & SURF_PLANEBACK) && (dot > BACKFACE_EPSILON)))
|
|
|
|
{
|
2000-05-16 04:47:41 +00:00
|
|
|
if (gl_texsort->value)
|
2000-05-10 11:29:38 +00:00
|
|
|
R_RenderBrushPoly (psurf);
|
|
|
|
else
|
|
|
|
R_DrawSequentialPoly (psurf);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
R_BlendLightmaps ();
|
|
|
|
|
|
|
|
glPopMatrix ();
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
=============================================================
|
|
|
|
|
|
|
|
WORLD MODEL
|
|
|
|
|
|
|
|
=============================================================
|
|
|
|
*/
|
|
|
|
|
|
|
|
/*
|
|
|
|
================
|
|
|
|
R_RecursiveWorldNode
|
|
|
|
================
|
|
|
|
*/
|
|
|
|
void R_RecursiveWorldNode (mnode_t *node)
|
|
|
|
{
|
|
|
|
int c, side;
|
|
|
|
mplane_t *plane;
|
|
|
|
msurface_t *surf, **mark;
|
|
|
|
mleaf_t *pleaf;
|
|
|
|
double dot;
|
|
|
|
|
|
|
|
if (node->contents == CONTENTS_SOLID)
|
|
|
|
return; // solid
|
|
|
|
|
|
|
|
if (node->visframe != r_visframecount)
|
|
|
|
return;
|
|
|
|
if (R_CullBox (node->minmaxs, node->minmaxs+3))
|
|
|
|
return;
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// if a leaf node, draw stuff
|
2000-05-10 11:29:38 +00:00
|
|
|
if (node->contents < 0)
|
|
|
|
{
|
|
|
|
pleaf = (mleaf_t *)node;
|
|
|
|
|
|
|
|
mark = pleaf->firstmarksurface;
|
|
|
|
c = pleaf->nummarksurfaces;
|
|
|
|
|
|
|
|
if (c)
|
|
|
|
{
|
|
|
|
do
|
|
|
|
{
|
|
|
|
(*mark)->visframe = r_framecount;
|
|
|
|
mark++;
|
|
|
|
} while (--c);
|
|
|
|
}
|
|
|
|
|
|
|
|
// deal with model fragments in this leaf
|
|
|
|
if (pleaf->efrags)
|
|
|
|
R_StoreEfrags (&pleaf->efrags);
|
|
|
|
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// node is just a decision point, so go down the apropriate sides
|
2000-05-10 11:29:38 +00:00
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// find which side of the node we are on
|
2000-05-10 11:29:38 +00:00
|
|
|
plane = node->plane;
|
|
|
|
|
|
|
|
switch (plane->type)
|
|
|
|
{
|
|
|
|
case PLANE_X:
|
|
|
|
dot = modelorg[0] - plane->dist;
|
|
|
|
break;
|
|
|
|
case PLANE_Y:
|
|
|
|
dot = modelorg[1] - plane->dist;
|
|
|
|
break;
|
|
|
|
case PLANE_Z:
|
|
|
|
dot = modelorg[2] - plane->dist;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
dot = DotProduct (modelorg, plane->normal) - plane->dist;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (dot >= 0)
|
|
|
|
side = 0;
|
|
|
|
else
|
|
|
|
side = 1;
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// recurse down the children, front side first
|
2000-05-10 11:29:38 +00:00
|
|
|
R_RecursiveWorldNode (node->children[side]);
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// draw stuff
|
2000-05-10 11:29:38 +00:00
|
|
|
c = node->numsurfaces;
|
|
|
|
|
|
|
|
if (c)
|
|
|
|
{
|
|
|
|
surf = cl.worldmodel->surfaces + node->firstsurface;
|
|
|
|
|
|
|
|
if (dot < 0 -BACKFACE_EPSILON)
|
|
|
|
side = SURF_PLANEBACK;
|
|
|
|
else if (dot > BACKFACE_EPSILON)
|
|
|
|
side = 0;
|
|
|
|
{
|
|
|
|
for ( ; c ; c--, surf++)
|
|
|
|
{
|
|
|
|
if (surf->visframe != r_framecount)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
// don't backface underwater surfaces, because they warp
|
|
|
|
// if ( !(surf->flags & SURF_UNDERWATER) && ( (dot < 0) ^ !!(surf->flags & SURF_PLANEBACK)) )
|
|
|
|
// continue; // wrong side
|
|
|
|
if ( !(((r_viewleaf->contents==CONTENTS_EMPTY && (surf->flags & SURF_UNDERWATER)) ||
|
|
|
|
(r_viewleaf->contents!=CONTENTS_EMPTY && !(surf->flags & SURF_UNDERWATER)))
|
|
|
|
&& !(surf->flags & SURF_DONTWARP)) && ( (dot < 0) ^ !!(surf->flags & SURF_PLANEBACK)) )
|
|
|
|
continue; // wrong side
|
|
|
|
|
|
|
|
// if sorting by texture, just store it out
|
2000-05-16 04:47:41 +00:00
|
|
|
if (gl_texsort->value)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
|
|
|
if (!mirror
|
|
|
|
|| surf->texinfo->texture != cl.worldmodel->textures[mirrortexturenum])
|
|
|
|
{
|
|
|
|
surf->texturechain = surf->texinfo->texture->texturechain;
|
|
|
|
surf->texinfo->texture->texturechain = surf;
|
|
|
|
}
|
|
|
|
} else if (surf->flags & SURF_DRAWSKY) {
|
|
|
|
surf->texturechain = skychain;
|
|
|
|
skychain = surf;
|
|
|
|
} else if (surf->flags & SURF_DRAWTURB) {
|
|
|
|
surf->texturechain = waterchain;
|
|
|
|
waterchain = surf;
|
|
|
|
} else
|
|
|
|
R_DrawSequentialPoly (surf);
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
// recurse down the back side
|
2000-05-10 11:29:38 +00:00
|
|
|
R_RecursiveWorldNode (node->children[!side]);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
=============
|
|
|
|
R_DrawWorld
|
|
|
|
=============
|
|
|
|
*/
|
|
|
|
void R_DrawWorld (void)
|
|
|
|
{
|
|
|
|
entity_t ent;
|
|
|
|
|
|
|
|
memset (&ent, 0, sizeof(ent));
|
|
|
|
ent.model = cl.worldmodel;
|
|
|
|
|
|
|
|
VectorCopy (r_refdef.vieworg, modelorg);
|
|
|
|
|
|
|
|
currententity = &ent;
|
|
|
|
currenttexture = -1;
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
glColor3f (1.0, 1.0, 1.0);
|
2000-05-10 11:29:38 +00:00
|
|
|
memset (lightmap_polys, 0, sizeof(lightmap_polys));
|
2000-05-18 04:34:26 +00:00
|
|
|
// Be sure to clear the skybox --KB
|
2000-05-10 11:29:38 +00:00
|
|
|
R_ClearSkyBox ();
|
|
|
|
|
|
|
|
R_RecursiveWorldNode (cl.worldmodel->nodes);
|
|
|
|
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
DrawTextureChains ();
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
R_BlendLightmaps ();
|
|
|
|
|
2000-05-24 09:42:28 +00:00
|
|
|
// Adjust the depth range and draw the skybox, ensuring it's behind
|
|
|
|
// everhting else. This fixes the problem where some things are
|
|
|
|
// drawn as sky when something else should be drawn. --KB
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
glColor3f (0.5, 0.5, 0.5);
|
2000-05-24 09:42:28 +00:00
|
|
|
glDepthRange (gldepthmax, gldepthmax);
|
2000-05-10 11:29:38 +00:00
|
|
|
R_DrawSkyBox ();
|
2000-05-24 09:42:28 +00:00
|
|
|
glDepthRange (gldepthmin, gldepthmax);
|
2000-05-10 11:29:38 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
===============
|
|
|
|
R_MarkLeaves
|
|
|
|
===============
|
|
|
|
*/
|
|
|
|
void R_MarkLeaves (void)
|
|
|
|
{
|
|
|
|
byte *vis;
|
|
|
|
mnode_t *node;
|
|
|
|
int i;
|
|
|
|
byte solid[4096];
|
|
|
|
|
2000-05-16 04:47:41 +00:00
|
|
|
if (r_oldviewleaf == r_viewleaf && !r_novis->value)
|
2000-05-10 11:29:38 +00:00
|
|
|
return;
|
|
|
|
|
|
|
|
if (mirror)
|
|
|
|
return;
|
|
|
|
|
|
|
|
r_visframecount++;
|
|
|
|
r_oldviewleaf = r_viewleaf;
|
|
|
|
|
2000-05-16 04:47:41 +00:00
|
|
|
if (r_novis->value)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
|
|
|
vis = solid;
|
|
|
|
memset (solid, 0xff, (cl.worldmodel->numleafs+7)>>3);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
vis = Mod_LeafPVS (r_viewleaf, cl.worldmodel);
|
|
|
|
|
|
|
|
for (i=0 ; i<cl.worldmodel->numleafs ; i++)
|
|
|
|
{
|
|
|
|
if (vis[i>>3] & (1<<(i&7)))
|
|
|
|
{
|
|
|
|
node = (mnode_t *)&cl.worldmodel->leafs[i+1];
|
|
|
|
do
|
|
|
|
{
|
|
|
|
if (node->visframe == r_visframecount)
|
|
|
|
break;
|
|
|
|
node->visframe = r_visframecount;
|
|
|
|
node = node->parent;
|
|
|
|
} while (node);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
=============================================================================
|
|
|
|
|
|
|
|
LIGHTMAP ALLOCATION
|
|
|
|
|
|
|
|
=============================================================================
|
|
|
|
*/
|
|
|
|
|
|
|
|
// returns a texture number and the position inside it
|
|
|
|
int AllocBlock (int w, int h, int *x, int *y)
|
|
|
|
{
|
|
|
|
int i, j;
|
|
|
|
int best, best2;
|
|
|
|
int texnum;
|
|
|
|
|
|
|
|
for (texnum=0 ; texnum<MAX_LIGHTMAPS ; texnum++)
|
|
|
|
{
|
|
|
|
best = BLOCK_HEIGHT;
|
|
|
|
|
|
|
|
for (i=0 ; i<BLOCK_WIDTH-w ; i++)
|
|
|
|
{
|
|
|
|
best2 = 0;
|
|
|
|
|
|
|
|
for (j=0 ; j<w ; j++)
|
|
|
|
{
|
|
|
|
if (allocated[texnum][i+j] >= best)
|
|
|
|
break;
|
|
|
|
if (allocated[texnum][i+j] > best2)
|
|
|
|
best2 = allocated[texnum][i+j];
|
|
|
|
}
|
|
|
|
if (j == w)
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
{
|
|
|
|
// this is a valid spot
|
2000-05-10 11:29:38 +00:00
|
|
|
*x = i;
|
|
|
|
*y = best = best2;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (best + h > BLOCK_HEIGHT)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
for (i=0 ; i<w ; i++)
|
|
|
|
allocated[texnum][*x + i] = best + h;
|
|
|
|
|
|
|
|
return texnum;
|
|
|
|
}
|
|
|
|
|
|
|
|
Sys_Error ("AllocBlock: full");
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
mvertex_t *r_pcurrentvertbase;
|
|
|
|
model_t *currentmodel;
|
|
|
|
|
|
|
|
int nColinElim;
|
|
|
|
|
|
|
|
/*
|
|
|
|
================
|
|
|
|
BuildSurfaceDisplayList
|
|
|
|
================
|
|
|
|
*/
|
|
|
|
void BuildSurfaceDisplayList (msurface_t *fa)
|
|
|
|
{
|
|
|
|
int i, lindex, lnumverts;
|
|
|
|
medge_t *pedges, *r_pedge;
|
|
|
|
int vertpage;
|
|
|
|
float *vec;
|
|
|
|
float s, t;
|
|
|
|
glpoly_t *poly;
|
|
|
|
|
|
|
|
// reconstruct the polygon
|
|
|
|
pedges = currentmodel->edges;
|
|
|
|
lnumverts = fa->numedges;
|
|
|
|
vertpage = 0;
|
|
|
|
|
|
|
|
//
|
|
|
|
// draw texture
|
|
|
|
//
|
|
|
|
poly = Hunk_Alloc (sizeof(glpoly_t) + (lnumverts-4) * VERTEXSIZE*sizeof(float));
|
|
|
|
poly->next = fa->polys;
|
|
|
|
poly->flags = fa->flags;
|
|
|
|
fa->polys = poly;
|
|
|
|
poly->numverts = lnumverts;
|
|
|
|
|
|
|
|
for (i=0 ; i<lnumverts ; i++)
|
|
|
|
{
|
|
|
|
lindex = currentmodel->surfedges[fa->firstedge + i];
|
|
|
|
|
|
|
|
if (lindex > 0)
|
|
|
|
{
|
|
|
|
r_pedge = &pedges[lindex];
|
|
|
|
vec = r_pcurrentvertbase[r_pedge->v[0]].position;
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
r_pedge = &pedges[-lindex];
|
|
|
|
vec = r_pcurrentvertbase[r_pedge->v[1]].position;
|
|
|
|
}
|
|
|
|
s = DotProduct (vec, fa->texinfo->vecs[0]) + fa->texinfo->vecs[0][3];
|
|
|
|
s /= fa->texinfo->texture->width;
|
|
|
|
|
|
|
|
t = DotProduct (vec, fa->texinfo->vecs[1]) + fa->texinfo->vecs[1][3];
|
|
|
|
t /= fa->texinfo->texture->height;
|
|
|
|
|
|
|
|
VectorCopy (vec, poly->verts[i]);
|
|
|
|
poly->verts[i][3] = s;
|
|
|
|
poly->verts[i][4] = t;
|
|
|
|
|
|
|
|
//
|
|
|
|
// lightmap texture coordinates
|
|
|
|
//
|
|
|
|
s = DotProduct (vec, fa->texinfo->vecs[0]) + fa->texinfo->vecs[0][3];
|
|
|
|
s -= fa->texturemins[0];
|
|
|
|
s += fa->light_s*16;
|
|
|
|
s += 8;
|
|
|
|
s /= BLOCK_WIDTH*16; //fa->texinfo->texture->width;
|
|
|
|
|
|
|
|
t = DotProduct (vec, fa->texinfo->vecs[1]) + fa->texinfo->vecs[1][3];
|
|
|
|
t -= fa->texturemins[1];
|
|
|
|
t += fa->light_t*16;
|
|
|
|
t += 8;
|
|
|
|
t /= BLOCK_HEIGHT*16; //fa->texinfo->texture->height;
|
|
|
|
|
|
|
|
poly->verts[i][5] = s;
|
|
|
|
poly->verts[i][6] = t;
|
|
|
|
}
|
|
|
|
|
|
|
|
//
|
|
|
|
// remove co-linear points - Ed
|
|
|
|
//
|
2000-05-16 04:47:41 +00:00
|
|
|
if (!gl_keeptjunctions->value && !(fa->flags & SURF_UNDERWATER) )
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
|
|
|
for (i = 0 ; i < lnumverts ; ++i)
|
|
|
|
{
|
|
|
|
vec3_t v1, v2;
|
|
|
|
float *prev, *this, *next;
|
|
|
|
|
|
|
|
prev = poly->verts[(i + lnumverts - 1) % lnumverts];
|
|
|
|
this = poly->verts[i];
|
|
|
|
next = poly->verts[(i + 1) % lnumverts];
|
|
|
|
|
|
|
|
VectorSubtract( this, prev, v1 );
|
|
|
|
VectorNormalize( v1 );
|
|
|
|
VectorSubtract( next, prev, v2 );
|
|
|
|
VectorNormalize( v2 );
|
|
|
|
|
|
|
|
// skip co-linear points
|
2000-05-14 17:04:13 +00:00
|
|
|
# define COLINEAR_EPSILON 0.001
|
2000-05-10 11:29:38 +00:00
|
|
|
if ((fabs( v1[0] - v2[0] ) <= COLINEAR_EPSILON) &&
|
|
|
|
(fabs( v1[1] - v2[1] ) <= COLINEAR_EPSILON) &&
|
|
|
|
(fabs( v1[2] - v2[2] ) <= COLINEAR_EPSILON))
|
|
|
|
{
|
|
|
|
int j;
|
|
|
|
for (j = i + 1; j < lnumverts; ++j)
|
|
|
|
{
|
|
|
|
int k;
|
|
|
|
for (k = 0; k < VERTEXSIZE; ++k)
|
|
|
|
poly->verts[j - 1][k] = poly->verts[j][k];
|
|
|
|
}
|
|
|
|
--lnumverts;
|
|
|
|
++nColinElim;
|
|
|
|
// retry next vertex next time, which is now current vertex
|
|
|
|
--i;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
poly->numverts = lnumverts;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
========================
|
|
|
|
GL_CreateSurfaceLightmap
|
|
|
|
========================
|
|
|
|
*/
|
|
|
|
void GL_CreateSurfaceLightmap (msurface_t *surf)
|
|
|
|
{
|
|
|
|
int smax, tmax;
|
|
|
|
byte *base;
|
|
|
|
|
|
|
|
if (surf->flags & (SURF_DRAWSKY|SURF_DRAWTURB))
|
|
|
|
return;
|
|
|
|
|
|
|
|
smax = (surf->extents[0]>>4)+1;
|
|
|
|
tmax = (surf->extents[1]>>4)+1;
|
|
|
|
|
|
|
|
surf->lightmaptexturenum = AllocBlock (smax, tmax, &surf->light_s, &surf->light_t);
|
|
|
|
base = lightmaps + surf->lightmaptexturenum*lightmap_bytes*BLOCK_WIDTH*BLOCK_HEIGHT;
|
|
|
|
base += (surf->light_t * BLOCK_WIDTH + surf->light_s) * lightmap_bytes;
|
|
|
|
R_BuildLightMap (surf, base, BLOCK_WIDTH*lightmap_bytes);
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/*
|
|
|
|
==================
|
|
|
|
GL_BuildLightmaps
|
|
|
|
|
|
|
|
Builds the lightmap texture
|
|
|
|
with all the surfaces from all brush models
|
|
|
|
==================
|
|
|
|
*/
|
|
|
|
void GL_BuildLightmaps (void)
|
|
|
|
{
|
|
|
|
int i, j;
|
|
|
|
model_t *m;
|
|
|
|
|
|
|
|
memset (allocated, 0, sizeof(allocated));
|
|
|
|
|
|
|
|
r_framecount = 1; // no dlightcache
|
|
|
|
|
|
|
|
if (!lightmap_textures)
|
|
|
|
{
|
|
|
|
lightmap_textures = texture_extension_number;
|
|
|
|
texture_extension_number += MAX_LIGHTMAPS;
|
|
|
|
}
|
|
|
|
|
2000-05-25 04:16:41 +00:00
|
|
|
gl_colorlights = Cvar_Get ("gl_colorlights", "1", CVAR_ROM,
|
|
|
|
"Whether to use RGBA lightmaps or not");
|
2000-05-10 11:29:38 +00:00
|
|
|
|
2000-05-25 04:16:41 +00:00
|
|
|
if (gl_colorlights->value)
|
2000-05-10 11:29:38 +00:00
|
|
|
{
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
gl_lightmap_format = GL_RGB;
|
|
|
|
lightmap_bytes = 3;
|
2000-05-25 04:16:41 +00:00
|
|
|
} else {
|
|
|
|
gl_lightmap_format = GL_LUMINANCE;
|
2000-05-10 11:29:38 +00:00
|
|
|
lightmap_bytes = 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (j=1 ; j<MAX_MODELS ; j++)
|
|
|
|
{
|
|
|
|
m = cl.model_precache[j];
|
|
|
|
if (!m)
|
|
|
|
break;
|
|
|
|
if (m->name[0] == '*')
|
|
|
|
continue;
|
|
|
|
r_pcurrentvertbase = m->vertexes;
|
|
|
|
currentmodel = m;
|
|
|
|
for (i=0 ; i<m->numsurfaces ; i++)
|
|
|
|
{
|
|
|
|
GL_CreateSurfaceLightmap (m->surfaces + i);
|
|
|
|
if ( m->surfaces[i].flags & SURF_DRAWTURB )
|
|
|
|
continue;
|
|
|
|
if ( m->surfaces[i].flags & SURF_DRAWSKY )
|
|
|
|
continue;
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
GL_CreateSurfaceLightmap (m->surfaces + i);
|
2000-05-10 11:29:38 +00:00
|
|
|
BuildSurfaceDisplayList (m->surfaces + i);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2000-05-16 04:47:41 +00:00
|
|
|
if (!gl_texsort->value)
|
2000-06-09 21:34:21 +00:00
|
|
|
GL_SelectTexture(1);
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
//
|
|
|
|
// upload all lightmaps that were filled
|
|
|
|
//
|
|
|
|
for (i=0 ; i<MAX_LIGHTMAPS ; i++)
|
|
|
|
{
|
|
|
|
if (!allocated[i][0])
|
|
|
|
break; // no more used
|
|
|
|
lightmap_modified[i] = false;
|
|
|
|
lightmap_rectchange[i].l = BLOCK_WIDTH;
|
|
|
|
lightmap_rectchange[i].t = BLOCK_HEIGHT;
|
|
|
|
lightmap_rectchange[i].w = 0;
|
|
|
|
lightmap_rectchange[i].h = 0;
|
|
|
|
GL_Bind(lightmap_textures + i);
|
|
|
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
|
|
|
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to
make work properly:
Win32 thing.. If you don't free textures explicitly, you can cause a
problem with nVidia drivers.
Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless
on a lightmap and the effect's not all that great. Plus people stuck with
16 bit OpenGL (any other 3dfx people out there?) will be quite pleased
with the improvement in image quality. This does include LordHavoc's
dynamic light optimization code which takes most of the pain out of having
gl_flashblend off.
All glColor*'s are now half of what they used to be, except where they
aren't. If that doesn't make sense, don't worry. If you see one that's
only half what you'd expect, don't worry---it probably is meant to be like
that.. (More below)
glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE.
Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This
seems at first like it might be a performance hit, but I swear it's much
more expensive to change blending modes and texture functions 20-30 times
every screen frame!
Win32 issue.. Even though we check for multitexture, we currently don't
use it. Reason is that I am planning to replace SGIS_multitexture with
the preferred ARB_multitexture extension which is supported in most GL 1.1
implementations and is a requirement for GL 1.2 anyway. I also wanted to
get rid of some duplicated code. Since Linux doesn't support multitexture
yet, I just commented out the code keeping me from compiling to get it to
work. Win32 should work without it until it's fixed, which shouldn't be
long since the differences between SGIS and ARB multitextures as far as
Quake is concerned is minimal AT BEST.
LordHavoc and I have been working tirelessly (well not quite, we both did
manage to sleep sometime during this ordeal) to fix the lighting in the GL
renderers! It looks DAMNED CLOSE to software's lighting now, including
the ability to overbright a color. You've gotta see this to know what I'm
talking about. That's why the glColor*'s are halved in most places. The
gamma table code and the general way it works is LordHavoc's design, but
over the course of re-implementing it in QF we did come up with a few more
small optimizations.
A lot of people have noticed that QF's fps count has gone to shit lately.
No promises that this undid whatever the problem was. That means there
could be a huge optimization lurking somewhere in the shadows, waiting for
us to fix it for a massive FPS boost. Even if there's not, the code in
this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as
it was (around 30, which is pathetic even for a Voodoo3 in Linux) but
still---60 is a big improvement over 30!
Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
|
|
|
glTexImage2D (GL_TEXTURE_2D, 0, lightmap_bytes, BLOCK_WIDTH,
|
|
|
|
BLOCK_HEIGHT, 0, gl_lightmap_format,
|
|
|
|
GL_UNSIGNED_BYTE, lightmaps +
|
|
|
|
i*BLOCK_WIDTH*BLOCK_HEIGHT*lightmap_bytes);
|
2000-05-10 11:29:38 +00:00
|
|
|
}
|
|
|
|
|
2000-05-16 04:47:41 +00:00
|
|
|
if (!gl_texsort->value)
|
2000-06-09 21:34:21 +00:00
|
|
|
GL_SelectTexture(0);
|
2000-05-10 11:29:38 +00:00
|
|
|
|
|
|
|
}
|
|
|
|
|