mirror of
https://git.code.sf.net/p/quake/quakeforge
synced 2024-11-26 14:20:59 +00:00
[qfcc] Fix integer vector constants for clang
It seems clang loses track of the usage of the referenced unions by the time the code leaves the switch. Due to the misoptimization, "random" values would get into the vector constants. This puts the usages in the same blocks as the unions, causing clang to "get it right" (though I strongly suspect I was running into UB).
This commit is contained in:
parent
42287f1e0e
commit
59b73353dd
1 changed files with 21 additions and 10 deletions
|
@ -587,13 +587,16 @@ parse_int_vector (const char *token, int width)
|
||||||
}
|
}
|
||||||
t1 = tolower (t1);
|
t1 = tolower (t1);
|
||||||
t2 = tolower (t2);
|
t2 = tolower (t2);
|
||||||
|
expr_t *expr = 0;
|
||||||
switch (t1) {
|
switch (t1) {
|
||||||
case 'u':
|
case 'u':
|
||||||
if (t2 == 'l') {
|
if (t2 == 'l') {
|
||||||
type = &type_ulong;
|
type = &type_ulong;
|
||||||
|
type = vector_type (type, width);
|
||||||
|
expr = new_value_expr (new_type_value (type, data));
|
||||||
} else {
|
} else {
|
||||||
type = &type_uint;
|
type = &type_uint;
|
||||||
volatile union {
|
union {
|
||||||
pr_uint_t u[4];
|
pr_uint_t u[4];
|
||||||
pr_type_t t[PR_SIZEOF (ivec4)];
|
pr_type_t t[PR_SIZEOF (ivec4)];
|
||||||
} uint_data = {
|
} uint_data = {
|
||||||
|
@ -604,15 +607,19 @@ parse_int_vector (const char *token, int width)
|
||||||
long_data.l[3],
|
long_data.l[3],
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
data = (pr_type_t *) uint_data.t;
|
data = uint_data.t;
|
||||||
|
type = vector_type (type, width);
|
||||||
|
expr = new_value_expr (new_type_value (type, data));
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case 'l':
|
case 'l':
|
||||||
type = &type_long;
|
type = &type_long;
|
||||||
|
type = vector_type (type, width);
|
||||||
|
expr = new_value_expr (new_type_value (type, data));
|
||||||
break;
|
break;
|
||||||
case 'f':
|
case 'f':
|
||||||
type = &type_float;
|
type = &type_float;
|
||||||
volatile union {
|
union {
|
||||||
pr_float_t f[4];
|
pr_float_t f[4];
|
||||||
pr_type_t t[PR_SIZEOF (vec4)];
|
pr_type_t t[PR_SIZEOF (vec4)];
|
||||||
} float_data = {
|
} float_data = {
|
||||||
|
@ -623,11 +630,13 @@ parse_int_vector (const char *token, int width)
|
||||||
long_data.l[3],
|
long_data.l[3],
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
data = (pr_type_t *) float_data.t;
|
data = float_data.t;
|
||||||
|
type = vector_type (type, width);
|
||||||
|
expr = new_value_expr (new_type_value (type, data));
|
||||||
break;
|
break;
|
||||||
case 'd':
|
case 'd':
|
||||||
type = &type_double;
|
type = &type_double;
|
||||||
volatile union {
|
union {
|
||||||
pr_double_t d[4];
|
pr_double_t d[4];
|
||||||
pr_type_t t[PR_SIZEOF (dvec4)];
|
pr_type_t t[PR_SIZEOF (dvec4)];
|
||||||
} double_data = {
|
} double_data = {
|
||||||
|
@ -638,11 +647,13 @@ parse_int_vector (const char *token, int width)
|
||||||
long_data.l[3],
|
long_data.l[3],
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
data = (pr_type_t *) double_data.t;
|
data = double_data.t;
|
||||||
|
type = vector_type (type, width);
|
||||||
|
expr = new_value_expr (new_type_value (type, data));
|
||||||
break;
|
break;
|
||||||
case 0:
|
case 0:
|
||||||
type = &type_int;
|
type = &type_int;
|
||||||
volatile union {
|
union {
|
||||||
pr_int_t i[4];
|
pr_int_t i[4];
|
||||||
pr_type_t t[PR_SIZEOF (ivec4)];
|
pr_type_t t[PR_SIZEOF (ivec4)];
|
||||||
} int_data = {
|
} int_data = {
|
||||||
|
@ -653,11 +664,11 @@ parse_int_vector (const char *token, int width)
|
||||||
long_data.l[3],
|
long_data.l[3],
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
data = (pr_type_t *) int_data.t;
|
data = int_data.t;
|
||||||
|
type = vector_type (type, width);
|
||||||
|
expr = new_value_expr (new_type_value (type, data));
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
type = vector_type (type, width);
|
|
||||||
expr_t *expr = new_value_expr (new_type_value (type, data));
|
|
||||||
expr->implicit = !t1;
|
expr->implicit = !t1;
|
||||||
return expr;
|
return expr;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue