Indentation

This commit is contained in:
Dale Weiler 2012-04-14 02:45:28 -04:00
parent 25acfa1c7d
commit cc2c0ddbab

96
parse.c
View file

@ -316,54 +316,54 @@ int parse_tree(struct lex_file *file) {
* is a rather large macro, and is #undef after it's use below. * is a rather large macro, and is #undef after it's use below.
*/ */
#define PARSE_VEC_ELEMENT(NAME, BIT) \ #define PARSE_VEC_ELEMENT(NAME, BIT) \
token = lex_token(file); \ token = lex_token(file); \
if (token == ' ') { \ if (token == ' ') { \
token = lex_token(file); \ token = lex_token(file); \
} \ } \
if (token == '.') { \ if (token == '.') { \
compile_calc_d = 1; \ compile_calc_d = 1; \
} \ } \
if (!isdigit(token) && !compile_calc_d && token != '+' && token != '-') \ if (!isdigit(token) && !compile_calc_d && token != '+' && token != '-') \
error(ERROR_PARSE,"%s:%d Invalid constant initializer element %c for vector, must be numeric\n", file->name, file->line, NAME); \ error(ERROR_PARSE,"%s:%d Invalid constant initializer element %c for vector, must be numeric\n", file->name, file->line, NAME); \
if (token == '+') { \ if (token == '+') { \
compile_calc_s = '+'; \ compile_calc_s = '+'; \
} \ } \
if (token == '-' && !compile_calc_s) { \ if (token == '-' && !compile_calc_s) { \
compile_calc_s = '-'; \ compile_calc_s = '-'; \
} \ } \
while (isdigit(token) || token == '.' || token == '+' || token == '-') { \ while (isdigit(token) || token == '.' || token == '+' || token == '-') { \
*compile_eval++ = token; \ *compile_eval++ = token; \
token = lex_token(file); \ token = lex_token(file); \
if (token == '.' && compile_calc_d) { \ if (token == '.' && compile_calc_d) { \
error(ERROR_PARSE, "%s:%d Invalid constant initializer element %c for vector, must be numeric.\n", file->name, file->line, NAME); \ error(ERROR_PARSE, "%s:%d Invalid constant initializer element %c for vector, must be numeric.\n", file->name, file->line, NAME); \
token = lex_token(file); \ token = lex_token(file); \
} \ } \
if ((token == '-' || token == '+') && compile_calc_s) { \ if ((token == '-' || token == '+') && compile_calc_s) { \
error(ERROR_PARSE, "%s:%d Invalid constant initializer sign for vector element %c\n", file->name, file->line, NAME); \ error(ERROR_PARSE, "%s:%d Invalid constant initializer sign for vector element %c\n", file->name, file->line, NAME); \
token = lex_token(file); \ token = lex_token(file); \
} else if (token == '.' && !compile_calc_d) { \ } else if (token == '.' && !compile_calc_d) { \
compile_calc_d = 1; \ compile_calc_d = 1; \
} else if (token == '-' && !compile_calc_s) { \ } else if (token == '-' && !compile_calc_s) { \
compile_calc_s = '-'; \ compile_calc_s = '-'; \
} else if (token == '+' && !compile_calc_s) { \ } else if (token == '+' && !compile_calc_s) { \
compile_calc_s = '+'; \ compile_calc_s = '+'; \
} \ } \
} \ } \
if (token == ' ') { \ if (token == ' ') { \
token = lex_token(file); \ token = lex_token(file); \
} \ } \
if (NAME != 'z') { \ if (NAME != 'z') { \
if (token != ',' && token != ' ') { \ if (token != ',' && token != ' ') { \
error(ERROR_PARSE, "%s:%d invalid constant initializer element %c for vector (missing spaces, or comma delimited list?)\n", NAME, file->name, file->line); \ error(ERROR_PARSE, "%s:%d invalid constant initializer element %c for vector (missing spaces, or comma delimited list?)\n", NAME, file->name, file->line); \
} \ } \
} else if (token != '}') { \ } else if (token != '}') { \
error(ERROR_PARSE, "%s:%d Expected `}` on end of constant initialization for vector\n", file->name, file->line); \ error(ERROR_PARSE, "%s:%d Expected `}` on end of constant initialization for vector\n", file->name, file->line); \
} \ } \
compile_calc_##BIT = atof(compile_data); \ compile_calc_##BIT = atof(compile_data); \
compile_calc_d = 0; \ compile_calc_d = 0; \
compile_calc_s = 0; \ compile_calc_s = 0; \
compile_eval = &compile_data[0]; \ compile_eval = &compile_data[0]; \
memset(compile_data, 0, sizeof(compile_data)) memset(compile_data, 0, sizeof(compile_data))
/* /*
* Parse all elements using the macro above. * Parse all elements using the macro above.