mirror of
https://git.do.srb2.org/STJr/SRB2.git
synced 2024-11-25 13:51:43 +00:00
Tokenizer: pad with 2 NULL bytes
This commit is contained in:
parent
da1b579cee
commit
b8023aeef9
1 changed files with 6 additions and 3 deletions
|
@ -15,10 +15,13 @@
|
||||||
tokenizer_t *Tokenizer_Open(const char *inputString, size_t len, unsigned numTokens)
|
tokenizer_t *Tokenizer_Open(const char *inputString, size_t len, unsigned numTokens)
|
||||||
{
|
{
|
||||||
tokenizer_t *tokenizer = Z_Malloc(sizeof(tokenizer_t), PU_STATIC, NULL);
|
tokenizer_t *tokenizer = Z_Malloc(sizeof(tokenizer_t), PU_STATIC, NULL);
|
||||||
const size_t lenpan = len+1;
|
const size_t lenpan = 2;
|
||||||
|
|
||||||
tokenizer->zdup = malloc(lenpan);
|
tokenizer->zdup = malloc(len+lenpan);
|
||||||
tokenizer->zdup[len] = 0x00;
|
for (size_t i = 0; i < lenpan; i++)
|
||||||
|
{
|
||||||
|
tokenizer->zdup[len+i] = 0x00;
|
||||||
|
}
|
||||||
|
|
||||||
tokenizer->input = M_Memcpy(tokenizer->zdup, inputString, len);
|
tokenizer->input = M_Memcpy(tokenizer->zdup, inputString, len);
|
||||||
tokenizer->startPos = 0;
|
tokenizer->startPos = 0;
|
||||||
|
|
Loading…
Reference in a new issue