newtree/source/gl_ngraph.c

175 lines
4.0 KiB
C
Raw Normal View History

/*
gl_ngraph.c
(description)
Copyright (C) 1996-1997 Id Software, Inc.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to:
Free Software Foundation, Inc.
59 Temple Place - Suite 330
Boston, MA 02111-1307, USA
$Id$
*/
2000-05-22 06:58:14 +00:00
2000-05-17 10:03:19 +00:00
#ifdef HAVE_CONFIG_H
# include "config.h"
2000-05-17 10:03:19 +00:00
#endif
#include <string.h>
#include <stdio.h>
#include "bothdefs.h" // needed by: common.h, net.h, client.h
#include "bspfile.h" // needed by: glquake.h
#include "vid.h"
#include "sys.h"
#include "mathlib.h" // needed by: protocol.h, render.h, client.h,
// modelgen.h, glmodel.h
#include "wad.h"
#include "draw.h"
#include "cvar.h"
#include "menu.h"
#include "net.h" // needed by: client.h
#include "protocol.h" // needed by: client.h
#include "cmd.h"
#include "sbar.h"
#include "render.h" // needed by: client.h, gl_model.h, glquake.h
#include "client.h" // need cls in this file
2000-05-13 20:47:01 +00:00
#include "model.h" // needed by: glquake.h
#include "console.h"
#include "glquake.h"
2000-05-10 11:29:38 +00:00
extern byte *draw_chars; // 8*8 graphic characters
int netgraphtexture; // netgraph texture
#define NET_GRAPHHEIGHT 32
static byte ngraph_texels[NET_GRAPHHEIGHT][NET_TIMINGS];
static void R_LineGraph (int x, int h)
{
int i;
int s;
int color;
s = NET_GRAPHHEIGHT;
if (h == 10000)
color = 0x6f; // yellow
else if (h == 9999)
color = 0x4f; // red
else if (h == 9998)
color = 0xd0; // blue
else
color = 0xfe; // white
if (h>s)
h = s;
for (i=0 ; i<h ; i++)
if (i & 1)
ngraph_texels[NET_GRAPHHEIGHT - i - 1][x] = 0xff;
else
ngraph_texels[NET_GRAPHHEIGHT - i - 1][x] = (byte)color;
for ( ; i<s ; i++)
ngraph_texels[NET_GRAPHHEIGHT - i - 1][x] = (byte)0xff;
}
void Draw_CharToNetGraph (int x, int y, int num)
{
int row, col;
byte *source;
int drawline;
int nx;
row = num>>4;
col = num&15;
source = draw_chars + (row<<10) + (col<<3);
for (drawline = 8; drawline; drawline--, y++)
{
for (nx=0 ; nx<8 ; nx++)
if (source[nx] != 255)
ngraph_texels[y][nx+x] = 0x60 + source[nx];
source += 128;
}
}
/*
==============
R_NetGraph
==============
*/
void R_NetGraph (void)
{
int a, x, i, y;
int lost;
char st[80];
unsigned ngraph_pixels[NET_GRAPHHEIGHT][NET_TIMINGS];
x = 0;
lost = CL_CalcNet();
for (a=0 ; a<NET_TIMINGS ; a++)
{
i = (cls.netchan.outgoing_sequence-a) & NET_TIMINGSMASK;
R_LineGraph (NET_TIMINGS-1-a, packet_latency[i]);
}
// now load the netgraph texture into gl and draw it
for (y = 0; y < NET_GRAPHHEIGHT; y++)
for (x = 0; x < NET_TIMINGS; x++)
ngraph_pixels[y][x] = d_8to24table[ngraph_texels[y][x]];
x = -((vid.width - 320)>>1);
y = vid.height - sb_lines - 24 - NET_GRAPHHEIGHT - 1;
M_DrawTextBox (x, y, NET_TIMINGS/8, NET_GRAPHHEIGHT/8 + 1);
y += 8;
sprintf(st, "%3i%% packet loss", lost);
Draw_String8 (8, y, st);
2000-05-10 11:29:38 +00:00
y += 8;
glBindTexture (GL_TEXTURE_2D, netgraphtexture);
2000-05-10 11:29:38 +00:00
glTexImage2D (GL_TEXTURE_2D, 0, gl_alpha_format,
NET_TIMINGS, NET_GRAPHHEIGHT, 0, GL_RGBA,
GL_UNSIGNED_BYTE, ngraph_pixels);
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
x = 8;
This is a NON-TRIVIAL update which took LordHavoc and I about 3 days to make work properly: Win32 thing.. If you don't free textures explicitly, you can cause a problem with nVidia drivers. Colored lighting is now RGB instead of RGBA. The alpha is kinda pointless on a lightmap and the effect's not all that great. Plus people stuck with 16 bit OpenGL (any other 3dfx people out there?) will be quite pleased with the improvement in image quality. This does include LordHavoc's dynamic light optimization code which takes most of the pain out of having gl_flashblend off. All glColor*'s are now half of what they used to be, except where they aren't. If that doesn't make sense, don't worry. If you see one that's only half what you'd expect, don't worry---it probably is meant to be like that.. (More below) glDisable (GL_BLEND) is now a thing of the GL_PAST. As is GL_REPLACE. Instead, we _always_ use GL_MODULATE and leave GL_BLEND turned on. This seems at first like it might be a performance hit, but I swear it's much more expensive to change blending modes and texture functions 20-30 times every screen frame! Win32 issue.. Even though we check for multitexture, we currently don't use it. Reason is that I am planning to replace SGIS_multitexture with the preferred ARB_multitexture extension which is supported in most GL 1.1 implementations and is a requirement for GL 1.2 anyway. I also wanted to get rid of some duplicated code. Since Linux doesn't support multitexture yet, I just commented out the code keeping me from compiling to get it to work. Win32 should work without it until it's fixed, which shouldn't be long since the differences between SGIS and ARB multitextures as far as Quake is concerned is minimal AT BEST. LordHavoc and I have been working tirelessly (well not quite, we both did manage to sleep sometime during this ordeal) to fix the lighting in the GL renderers! It looks DAMNED CLOSE to software's lighting now, including the ability to overbright a color. You've gotta see this to know what I'm talking about. That's why the glColor*'s are halved in most places. The gamma table code and the general way it works is LordHavoc's design, but over the course of re-implementing it in QF we did come up with a few more small optimizations. A lot of people have noticed that QF's fps count has gone to shit lately. No promises that this undid whatever the problem was. That means there could be a huge optimization lurking somewhere in the shadows, waiting for us to fix it for a massive FPS boost. Even if there's not, the code in this commit DOUBLED MY FPS COUNT. Granted I was getting pathetic FPS as it was (around 30, which is pathetic even for a Voodoo3 in Linux) but still---60 is a big improvement over 30! Please be sure to "test" this code thuroughly.
2000-06-03 19:56:09 +00:00
glColor3f (0.5, 0.5, 0.5);
2000-05-10 11:29:38 +00:00
glBegin (GL_QUADS);
glTexCoord2f (0, 0);
glVertex2f (x, y);
glTexCoord2f (1, 0);
glVertex2f (x+NET_TIMINGS, y);
glTexCoord2f (1, 1);
glVertex2f (x+NET_TIMINGS, y+NET_GRAPHHEIGHT);
glTexCoord2f (0, 1);
glVertex2f (x, y+NET_GRAPHHEIGHT);
glEnd ();
}