mirror of
https://github.com/DarkPlacesEngine/gmqcc.git
synced 2025-02-20 18:32:01 +00:00
for the lexer 3 dots now become TOKEN_DOTS
This commit is contained in:
parent
84149c371d
commit
deb7ccb830
2 changed files with 34 additions and 1 deletions
32
lexer.c
32
lexer.c
|
@ -774,7 +774,6 @@ int lex_do(lex_file *lex)
|
|||
case '^':
|
||||
case '~':
|
||||
case ',':
|
||||
case '.':
|
||||
case '!':
|
||||
if (!lex_tokench(lex, ch) ||
|
||||
!lex_endtoken(lex))
|
||||
|
@ -785,6 +784,37 @@ int lex_do(lex_file *lex)
|
|||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
if (ch == '.')
|
||||
{
|
||||
if (!lex_tokench(lex, ch))
|
||||
return (lex->tok->ttype = TOKEN_FATAL);
|
||||
/* peak ahead once */
|
||||
nextch = lex_getch(lex);
|
||||
if (nextch != '.') {
|
||||
lex_ungetch(lex, nextch);
|
||||
if (!lex_endtoken(lex))
|
||||
return (lex->tok->ttype = TOKEN_FATAL);
|
||||
return (lex->tok->ttype = ch);
|
||||
}
|
||||
/* peak ahead again */
|
||||
nextch = lex_getch(lex);
|
||||
if (nextch != '.') {
|
||||
lex_ungetch(lex, nextch);
|
||||
lex_ungetch(lex, nextch);
|
||||
if (!lex_endtoken(lex))
|
||||
return (lex->tok->ttype = TOKEN_FATAL);
|
||||
return (lex->tok->ttype = ch);
|
||||
}
|
||||
/* fill the token to be "..." */
|
||||
if (!lex_tokench(lex, ch) ||
|
||||
!lex_tokench(lex, ch) ||
|
||||
!lex_endtoken(lex))
|
||||
{
|
||||
return (lex->tok->ttype = TOKEN_FATAL);
|
||||
}
|
||||
return (lex->tok->ttype = TOKEN_DOTS);
|
||||
}
|
||||
}
|
||||
|
||||
if (ch == ',' || ch == '.') {
|
||||
|
|
3
lexer.h
3
lexer.h
|
@ -45,6 +45,8 @@ enum {
|
|||
|
||||
TOKEN_KEYWORD, /* loop */
|
||||
|
||||
TOKEN_DOTS, /* 3 dots, ... */
|
||||
|
||||
TOKEN_STRINGCONST, /* not the typename but an actual "string" */
|
||||
TOKEN_CHARCONST,
|
||||
TOKEN_VECTORCONST,
|
||||
|
@ -66,6 +68,7 @@ static const char *_tokennames[] = {
|
|||
"TOKEN_TYPENAME",
|
||||
"TOKEN_OPERATOR",
|
||||
"TOKEN_KEYWORD",
|
||||
"TOKEN_DOTS",
|
||||
"TOKEN_STRINGCONST",
|
||||
"TOKEN_CHARCONST",
|
||||
"TOKEN_VECTORCONST",
|
||||
|
|
Loading…
Reference in a new issue