@@ -1452,7 +1452,7 @@ static bool fold_and(OptContext *ctx, TCGOp *op)
static bool fold_andc(OptContext *ctx, TCGOp *op)
{
- uint64_t z_mask, s_mask;
+ uint64_t z_mask, o_mask, s_mask, a_mask;
TempOptInfo *t1, *t2;
if (fold_const2(ctx, op) ||
@@ -1464,7 +1464,6 @@ static bool fold_andc(OptContext *ctx, TCGOp *op)
t1 = arg_info(op->args[1]);
t2 = arg_info(op->args[2]);
- z_mask = t1->z_mask;
if (ti_is_const(t2)) {
/* Fold andc r,x,i to and r,x,~i. */
@@ -1485,20 +1484,14 @@ static bool fold_andc(OptContext *ctx, TCGOp *op)
return fold_and(ctx, op);
}
- /*
- * Known-zeros does not imply known-ones. Therefore unless
- * arg2 is constant, we can't infer anything from it.
- */
- if (ti_is_const(t2)) {
- uint64_t v2 = ti_const_val(t2);
- if (fold_affected_mask(ctx, op, z_mask & v2)) {
- return true;
- }
- z_mask &= ~v2;
- }
-
+ z_mask = t1->z_mask & ~t2->o_mask;
+ o_mask = t1->o_mask & ~t2->z_mask;
s_mask = t1->s_mask & t2->s_mask;
- return fold_masks_zs(ctx, op, z_mask, s_mask);
+
+ /* Affected bits are those not known zero, masked by those known zero. */
+ a_mask = t1->z_mask & t2->z_mask;
+
+ return fold_masks_zosa(ctx, op, z_mask, o_mask, s_mask, a_mask);
}
static bool fold_bitsel_vec(OptContext *ctx, TCGOp *op)
Signed-off-by: Richard Henderson <richard.henderson@linaro.org> --- tcg/optimize.c | 23 ++++++++--------------- 1 file changed, 8 insertions(+), 15 deletions(-)