Avoid the use of the OptContext slots. Find TempOptInfo once.
Remove fold_masks as the function becomes unused.
Reviewed-by: Pierrick Bouvier <pierrick.bouvier@linaro.org>
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
tcg/optimize.c | 18 ++++++++----------
1 file changed, 8 insertions(+), 10 deletions(-)
diff --git a/tcg/optimize.c b/tcg/optimize.c
index 48324f122a..0cdbd1e262 100644
--- a/tcg/optimize.c
+++ b/tcg/optimize.c
@@ -1063,11 +1063,6 @@ static bool fold_masks_s(OptContext *ctx, TCGOp *op, uint64_t s_mask)
return fold_masks_zs(ctx, op, -1, s_mask);
}
-static bool fold_masks(OptContext *ctx, TCGOp *op)
-{
- return fold_masks_zs(ctx, op, ctx->z_mask, ctx->s_mask);
-}
-
/*
* An "affected" mask bit is 0 if and only if the result is identical
* to the first input. Thus if the entire mask is 0, the operation
@@ -2752,6 +2747,9 @@ static bool fold_tcg_st_memcopy(OptContext *ctx, TCGOp *op)
static bool fold_xor(OptContext *ctx, TCGOp *op)
{
+ uint64_t z_mask, s_mask;
+ TempOptInfo *t1, *t2;
+
if (fold_const2_commutative(ctx, op) ||
fold_xx_to_i(ctx, op, 0) ||
fold_xi_to_x(ctx, op, 0) ||
@@ -2759,11 +2757,11 @@ static bool fold_xor(OptContext *ctx, TCGOp *op)
return true;
}
- ctx->z_mask = arg_info(op->args[1])->z_mask
- | arg_info(op->args[2])->z_mask;
- ctx->s_mask = arg_info(op->args[1])->s_mask
- & arg_info(op->args[2])->s_mask;
- return fold_masks(ctx, op);
+ t1 = arg_info(op->args[1]);
+ t2 = arg_info(op->args[2]);
+ z_mask = t1->z_mask | t2->z_mask;
+ s_mask = t1->s_mask & t2->s_mask;
+ return fold_masks_zs(ctx, op, z_mask, s_mask);
}
static bool fold_bitsel_vec(OptContext *ctx, TCGOp *op)
--
2.43.0