mirror of
https://github.com/DarkPlacesEngine/gmqcc.git
synced 2025-01-18 14:21:36 +00:00
Add quotes to the known control sequences... darn. Add stringification via #
This commit is contained in:
parent
0330b082a2
commit
a014480987
2 changed files with 72 additions and 5 deletions
63
ftepp.c
63
ftepp.c
|
@ -438,6 +438,47 @@ static bool macro_params_find(ppmacro *macro, const char *name, size_t *idx)
|
|||
return false;
|
||||
}
|
||||
|
||||
static void ftepp_stringify_token(ftepp_t *ftepp, pptoken *token)
|
||||
{
|
||||
char chs[2];
|
||||
const char *ch;
|
||||
chs[1] = 0;
|
||||
switch (token->token) {
|
||||
case TOKEN_STRINGCONST:
|
||||
ch = token->value;
|
||||
while (*ch) {
|
||||
switch (*ch) {
|
||||
case '\\': ftepp_out(ftepp, "\\\\", false); break;
|
||||
case '"': ftepp_out(ftepp, "\\\"", false); break;
|
||||
default:
|
||||
chs[0] = *ch;
|
||||
ftepp_out(ftepp, chs, false);
|
||||
break;
|
||||
}
|
||||
++ch;
|
||||
}
|
||||
break;
|
||||
case TOKEN_WHITE:
|
||||
ftepp_out(ftepp, " ", false);
|
||||
break;
|
||||
case TOKEN_EOL:
|
||||
ftepp_out(ftepp, "\\n", false);
|
||||
break;
|
||||
default:
|
||||
ftepp_out(ftepp, token->value, false);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static void ftepp_stringify(ftepp_t *ftepp, macroparam *param)
|
||||
{
|
||||
size_t i;
|
||||
ftepp_out(ftepp, "\"", false);
|
||||
for (i = 0; i < vec_size(param->tokens); ++i)
|
||||
ftepp_stringify_token(ftepp, param->tokens[i]);
|
||||
ftepp_out(ftepp, "\"", false);
|
||||
}
|
||||
|
||||
static bool ftepp_preprocess(ftepp_t *ftepp);
|
||||
static bool ftepp_macro_expand(ftepp_t *ftepp, ppmacro *macro, macroparam *params)
|
||||
{
|
||||
|
@ -448,6 +489,8 @@ static bool ftepp_macro_expand(ftepp_t *ftepp, ppmacro *macro, macroparam *param
|
|||
size_t o, pi, pv;
|
||||
lex_file *inlex;
|
||||
|
||||
int nextok;
|
||||
|
||||
/* really ... */
|
||||
if (!vec_size(macro->output))
|
||||
return true;
|
||||
|
@ -473,10 +516,22 @@ static bool ftepp_macro_expand(ftepp_t *ftepp, ppmacro *macro, macroparam *param
|
|||
}
|
||||
break;
|
||||
case '#':
|
||||
if (o + 1 < vec_size(macro->output) && macro->output[o+1]->token == '#') {
|
||||
/* raw concatenation */
|
||||
++o;
|
||||
break;
|
||||
if (o + 1 < vec_size(macro->output)) {
|
||||
nextok = macro->output[o+1]->token;
|
||||
if (nextok == '#') {
|
||||
/* raw concatenation */
|
||||
++o;
|
||||
break;
|
||||
}
|
||||
if ( (nextok == TOKEN_IDENT ||
|
||||
nextok == TOKEN_KEYWORD ||
|
||||
nextok == TOKEN_TYPENAME) &&
|
||||
macro_params_find(macro, macro->output[o+1]->value, &pi))
|
||||
{
|
||||
++o;
|
||||
ftepp_stringify(ftepp, ¶ms[pi]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
ftepp_out(ftepp, "#", false);
|
||||
break;
|
||||
|
|
14
lexer.c
14
lexer.c
|
@ -573,7 +573,17 @@ static int GMQCC_WARN lex_finish_string(lex_file *lex, int quote)
|
|||
if (ch == quote)
|
||||
return TOKEN_STRINGCONST;
|
||||
|
||||
if (!lex->flags.preprocessing && ch == '\\') {
|
||||
if (lex->flags.preprocessing && ch == '\\') {
|
||||
lex_tokench(lex, ch);
|
||||
ch = lex_getch(lex);
|
||||
if (ch == EOF) {
|
||||
lexerror(lex, "unexpected end of file");
|
||||
lex_ungetch(lex, EOF); /* next token to be TOKEN_EOF */
|
||||
return (lex->tok.ttype = TOKEN_ERROR);
|
||||
}
|
||||
lex_tokench(lex, ch);
|
||||
}
|
||||
else if (ch == '\\') {
|
||||
ch = lex_getch(lex);
|
||||
if (ch == EOF) {
|
||||
lexerror(lex, "unexpected end of file");
|
||||
|
@ -583,6 +593,8 @@ static int GMQCC_WARN lex_finish_string(lex_file *lex, int quote)
|
|||
|
||||
switch (ch) {
|
||||
case '\\': break;
|
||||
case '\'': break;
|
||||
case '"': break;
|
||||
case 'a': ch = '\a'; break;
|
||||
case 'b': ch = '\b'; break;
|
||||
case 'r': ch = '\r'; break;
|
||||
|
|
Loading…
Reference in a new issue