[PATCH v2 13/36] tcg: Use tcg_constant_{i32, i64} with tcg int expanders

Richard Henderson posted 36 patches 5 years, 9 months ago
Maintainers: Paolo Bonzini <pbonzini@redhat.com>, David Gibson <david@gibson.dropbear.id.au>, Richard Henderson <richard.henderson@linaro.org>, Richard Henderson <rth@twiddle.net>, David Hildenbrand <david@redhat.com>, Aurelien Jarno <aurelien@aurel32.net>, Andrzej Zaborowski <balrogg@gmail.com>, Alistair Francis <Alistair.Francis@wdc.com>, Palmer Dabbelt <palmer@dabbelt.com>, Stefan Weil <sw@weilnetz.de>, Aleksandar Rikalo <aleksandar.rikalo@rt-rk.com>, Cornelia Huck <cohuck@redhat.com>, Aleksandar Markovic <aleksandar.qemu.devel@gmail.com>
[PATCH v2 13/36] tcg: Use tcg_constant_{i32, i64} with tcg int expanders
Posted by Richard Henderson 5 years, 9 months ago
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
---
 include/tcg/tcg-op.h |  13 +--
 tcg/tcg-op.c         | 216 ++++++++++++++++++++-----------------------
 2 files changed, 100 insertions(+), 129 deletions(-)

diff --git a/include/tcg/tcg-op.h b/include/tcg/tcg-op.h
index 230db6e022..11ed9192f7 100644
--- a/include/tcg/tcg-op.h
+++ b/include/tcg/tcg-op.h
@@ -271,6 +271,7 @@ void tcg_gen_mb(TCGBar);
 
 /* 32 bit ops */
 
+void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg);
 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2);
 void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2);
 void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2);
@@ -349,11 +350,6 @@ static inline void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
     }
 }
 
-static inline void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
-{
-    tcg_gen_op2i_i32(INDEX_op_movi_i32, ret, arg);
-}
-
 static inline void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2,
                                     tcg_target_long offset)
 {
@@ -467,6 +463,7 @@ static inline void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
 
 /* 64 bit ops */
 
+void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg);
 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2);
 void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2);
 void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2);
@@ -550,11 +547,6 @@ static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
     }
 }
 
-static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
-{
-    tcg_gen_op2i_i64(INDEX_op_movi_i64, ret, arg);
-}
-
 static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2,
                                     tcg_target_long offset)
 {
@@ -698,7 +690,6 @@ static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
 
 void tcg_gen_discard_i64(TCGv_i64 arg);
 void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg);
-void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg);
 void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset);
 void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset);
 void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset);
diff --git a/tcg/tcg-op.c b/tcg/tcg-op.c
index e2e25ebf7d..07eb661a07 100644
--- a/tcg/tcg-op.c
+++ b/tcg/tcg-op.c
@@ -104,15 +104,18 @@ void tcg_gen_mb(TCGBar mb_type)
 
 /* 32 bit ops */
 
+void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
+{
+    tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
+}
+
 void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
 {
     /* some cases can be optimized here */
     if (arg2 == 0) {
         tcg_gen_mov_i32(ret, arg1);
     } else {
-        TCGv_i32 t0 = tcg_const_i32(arg2);
-        tcg_gen_add_i32(ret, arg1, t0);
-        tcg_temp_free_i32(t0);
+        tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
     }
 }
 
@@ -122,9 +125,7 @@ void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
         /* Don't recurse with tcg_gen_neg_i32.  */
         tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
     } else {
-        TCGv_i32 t0 = tcg_const_i32(arg1);
-        tcg_gen_sub_i32(ret, t0, arg2);
-        tcg_temp_free_i32(t0);
+        tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
     }
 }
 
@@ -134,15 +135,12 @@ void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
     if (arg2 == 0) {
         tcg_gen_mov_i32(ret, arg1);
     } else {
-        TCGv_i32 t0 = tcg_const_i32(arg2);
-        tcg_gen_sub_i32(ret, arg1, t0);
-        tcg_temp_free_i32(t0);
+        tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
     }
 }
 
 void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
 {
-    TCGv_i32 t0;
     /* Some cases can be optimized here.  */
     switch (arg2) {
     case 0:
@@ -165,9 +163,8 @@ void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
         }
         break;
     }
-    t0 = tcg_const_i32(arg2);
-    tcg_gen_and_i32(ret, arg1, t0);
-    tcg_temp_free_i32(t0);
+
+    tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
 }
 
 void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
@@ -178,9 +175,7 @@ void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
     } else if (arg2 == 0) {
         tcg_gen_mov_i32(ret, arg1);
     } else {
-        TCGv_i32 t0 = tcg_const_i32(arg2);
-        tcg_gen_or_i32(ret, arg1, t0);
-        tcg_temp_free_i32(t0);
+        tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
     }
 }
 
@@ -193,9 +188,7 @@ void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
         /* Don't recurse with tcg_gen_not_i32.  */
         tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
     } else {
-        TCGv_i32 t0 = tcg_const_i32(arg2);
-        tcg_gen_xor_i32(ret, arg1, t0);
-        tcg_temp_free_i32(t0);
+        tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
     }
 }
 
@@ -205,9 +198,7 @@ void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
     if (arg2 == 0) {
         tcg_gen_mov_i32(ret, arg1);
     } else {
-        TCGv_i32 t0 = tcg_const_i32(arg2);
-        tcg_gen_shl_i32(ret, arg1, t0);
-        tcg_temp_free_i32(t0);
+        tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
     }
 }
 
@@ -217,9 +208,7 @@ void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
     if (arg2 == 0) {
         tcg_gen_mov_i32(ret, arg1);
     } else {
-        TCGv_i32 t0 = tcg_const_i32(arg2);
-        tcg_gen_shr_i32(ret, arg1, t0);
-        tcg_temp_free_i32(t0);
+        tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
     }
 }
 
@@ -229,9 +218,7 @@ void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
     if (arg2 == 0) {
         tcg_gen_mov_i32(ret, arg1);
     } else {
-        TCGv_i32 t0 = tcg_const_i32(arg2);
-        tcg_gen_sar_i32(ret, arg1, t0);
-        tcg_temp_free_i32(t0);
+        tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
     }
 }
 
@@ -250,9 +237,7 @@ void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
     if (cond == TCG_COND_ALWAYS) {
         tcg_gen_br(l);
     } else if (cond != TCG_COND_NEVER) {
-        TCGv_i32 t0 = tcg_const_i32(arg2);
-        tcg_gen_brcond_i32(cond, arg1, t0, l);
-        tcg_temp_free_i32(t0);
+        tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
     }
 }
 
@@ -271,9 +256,7 @@ void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
 void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
                           TCGv_i32 arg1, int32_t arg2)
 {
-    TCGv_i32 t0 = tcg_const_i32(arg2);
-    tcg_gen_setcond_i32(cond, ret, arg1, t0);
-    tcg_temp_free_i32(t0);
+    tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
 }
 
 void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
@@ -283,9 +266,7 @@ void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
     } else if (is_power_of_2(arg2)) {
         tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
     } else {
-        TCGv_i32 t0 = tcg_const_i32(arg2);
-        tcg_gen_mul_i32(ret, arg1, t0);
-        tcg_temp_free_i32(t0);
+        tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
     }
 }
 
@@ -433,9 +414,7 @@ void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
 
 void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
 {
-    TCGv_i32 t = tcg_const_i32(arg2);
-    tcg_gen_clz_i32(ret, arg1, t);
-    tcg_temp_free_i32(t);
+    tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
 }
 
 void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
@@ -468,10 +447,9 @@ void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
             tcg_gen_clzi_i32(t, t, 32);
             tcg_gen_xori_i32(t, t, 31);
         }
-        z = tcg_const_i32(0);
+        z = tcg_constant_i32(0);
         tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
         tcg_temp_free_i32(t);
-        tcg_temp_free_i32(z);
     } else {
         gen_helper_ctz_i32(ret, arg1, arg2);
     }
@@ -487,9 +465,7 @@ void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
         tcg_gen_ctpop_i32(ret, t);
         tcg_temp_free_i32(t);
     } else {
-        TCGv_i32 t = tcg_const_i32(arg2);
-        tcg_gen_ctz_i32(ret, arg1, t);
-        tcg_temp_free_i32(t);
+        tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
     }
 }
 
@@ -547,9 +523,7 @@ void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
     if (arg2 == 0) {
         tcg_gen_mov_i32(ret, arg1);
     } else if (TCG_TARGET_HAS_rot_i32) {
-        TCGv_i32 t0 = tcg_const_i32(arg2);
-        tcg_gen_rotl_i32(ret, arg1, t0);
-        tcg_temp_free_i32(t0);
+        tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
     } else {
         TCGv_i32 t0, t1;
         t0 = tcg_temp_new_i32();
@@ -653,9 +627,8 @@ void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
         tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
     } else if (TCG_TARGET_HAS_deposit_i32
                && TCG_TARGET_deposit_i32_valid(ofs, len)) {
-        TCGv_i32 zero = tcg_const_i32(0);
+        TCGv_i32 zero = tcg_constant_i32(0);
         tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
-        tcg_temp_free_i32(zero);
     } else {
         /* To help two-operand hosts we prefer to zero-extend first,
            which allows ARG to stay live.  */
@@ -1052,7 +1025,7 @@ void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
     } else {
         TCGv_i32 t0 = tcg_temp_new_i32();
         TCGv_i32 t1 = tcg_temp_new_i32();
-        TCGv_i32 t2 = tcg_const_i32(0x00ff00ff);
+        TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
 
                                         /* arg = abcd */
         tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
@@ -1067,7 +1040,6 @@ void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
 
         tcg_temp_free_i32(t0);
         tcg_temp_free_i32(t1);
-        tcg_temp_free_i32(t2);
     }
 }
 
@@ -1237,6 +1209,14 @@ void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
     tcg_temp_free_i64(t0);
     tcg_temp_free_i32(t1);
 }
+
+#else
+
+void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
+{
+    tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
+}
+
 #endif /* TCG_TARGET_REG_SIZE == 32 */
 
 void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
@@ -1244,10 +1224,12 @@ void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
     /* some cases can be optimized here */
     if (arg2 == 0) {
         tcg_gen_mov_i64(ret, arg1);
+    } else if (TCG_TARGET_REG_BITS == 64) {
+        tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
     } else {
-        TCGv_i64 t0 = tcg_const_i64(arg2);
-        tcg_gen_add_i64(ret, arg1, t0);
-        tcg_temp_free_i64(t0);
+        tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
+                         TCGV_LOW(arg1), TCGV_HIGH(arg1),
+                         tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
     }
 }
 
@@ -1256,10 +1238,12 @@ void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
     if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
         /* Don't recurse with tcg_gen_neg_i64.  */
         tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
+    } else if (TCG_TARGET_REG_BITS == 64) {
+        tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
     } else {
-        TCGv_i64 t0 = tcg_const_i64(arg1);
-        tcg_gen_sub_i64(ret, t0, arg2);
-        tcg_temp_free_i64(t0);
+        tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
+                         tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
+                         TCGV_LOW(arg2), TCGV_HIGH(arg2));
     }
 }
 
@@ -1268,17 +1252,17 @@ void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
     /* some cases can be optimized here */
     if (arg2 == 0) {
         tcg_gen_mov_i64(ret, arg1);
+    } else if (TCG_TARGET_REG_BITS == 64) {
+        tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
     } else {
-        TCGv_i64 t0 = tcg_const_i64(arg2);
-        tcg_gen_sub_i64(ret, arg1, t0);
-        tcg_temp_free_i64(t0);
+        tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
+                         TCGV_LOW(arg1), TCGV_HIGH(arg1),
+                         tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
     }
 }
 
 void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
 {
-    TCGv_i64 t0;
-
     if (TCG_TARGET_REG_BITS == 32) {
         tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
         tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
@@ -1313,9 +1297,8 @@ void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
         }
         break;
     }
-    t0 = tcg_const_i64(arg2);
-    tcg_gen_and_i64(ret, arg1, t0);
-    tcg_temp_free_i64(t0);
+
+    tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
 }
 
 void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
@@ -1331,9 +1314,7 @@ void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
     } else if (arg2 == 0) {
         tcg_gen_mov_i64(ret, arg1);
     } else {
-        TCGv_i64 t0 = tcg_const_i64(arg2);
-        tcg_gen_or_i64(ret, arg1, t0);
-        tcg_temp_free_i64(t0);
+        tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
     }
 }
 
@@ -1351,9 +1332,7 @@ void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
         /* Don't recurse with tcg_gen_not_i64.  */
         tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
     } else {
-        TCGv_i64 t0 = tcg_const_i64(arg2);
-        tcg_gen_xor_i64(ret, arg1, t0);
-        tcg_temp_free_i64(t0);
+        tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
     }
 }
 
@@ -1415,9 +1394,7 @@ void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
     } else if (arg2 == 0) {
         tcg_gen_mov_i64(ret, arg1);
     } else {
-        TCGv_i64 t0 = tcg_const_i64(arg2);
-        tcg_gen_shl_i64(ret, arg1, t0);
-        tcg_temp_free_i64(t0);
+        tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
     }
 }
 
@@ -1429,9 +1406,7 @@ void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
     } else if (arg2 == 0) {
         tcg_gen_mov_i64(ret, arg1);
     } else {
-        TCGv_i64 t0 = tcg_const_i64(arg2);
-        tcg_gen_shr_i64(ret, arg1, t0);
-        tcg_temp_free_i64(t0);
+        tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
     }
 }
 
@@ -1443,9 +1418,7 @@ void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
     } else if (arg2 == 0) {
         tcg_gen_mov_i64(ret, arg1);
     } else {
-        TCGv_i64 t0 = tcg_const_i64(arg2);
-        tcg_gen_sar_i64(ret, arg1, t0);
-        tcg_temp_free_i64(t0);
+        tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
     }
 }
 
@@ -1468,12 +1441,17 @@ void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
 
 void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
 {
-    if (cond == TCG_COND_ALWAYS) {
+    if (TCG_TARGET_REG_BITS == 64) {
+        tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
+    } else if (cond == TCG_COND_ALWAYS) {
         tcg_gen_br(l);
     } else if (cond != TCG_COND_NEVER) {
-        TCGv_i64 t0 = tcg_const_i64(arg2);
-        tcg_gen_brcond_i64(cond, arg1, t0, l);
-        tcg_temp_free_i64(t0);
+        l->refs++;
+        tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
+                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
+                          tcg_constant_i32(arg2),
+                          tcg_constant_i32(arg2 >> 32),
+                          cond, label_arg(l));
     }
 }
 
@@ -1499,9 +1477,19 @@ void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
 void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
                           TCGv_i64 arg1, int64_t arg2)
 {
-    TCGv_i64 t0 = tcg_const_i64(arg2);
-    tcg_gen_setcond_i64(cond, ret, arg1, t0);
-    tcg_temp_free_i64(t0);
+    if (TCG_TARGET_REG_BITS == 64) {
+        tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
+    } else if (cond == TCG_COND_ALWAYS) {
+        tcg_gen_movi_i64(ret, 1);
+    } else if (cond == TCG_COND_NEVER) {
+        tcg_gen_movi_i64(ret, 0);
+    } else {
+        tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
+                         TCGV_LOW(arg1), TCGV_HIGH(arg1),
+                         tcg_constant_i32(arg2),
+                         tcg_constant_i32(arg2 >> 32), cond);
+        tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
+    }
 }
 
 void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
@@ -1690,7 +1678,7 @@ void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
     } else {
         TCGv_i64 t0 = tcg_temp_new_i64();
         TCGv_i64 t1 = tcg_temp_new_i64();
-        TCGv_i64 t2 = tcg_const_i64(0x00ff00ff);
+        TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
 
                                         /* arg = ....abcd */
         tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .....abc */
@@ -1706,7 +1694,6 @@ void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
 
         tcg_temp_free_i64(t0);
         tcg_temp_free_i64(t1);
-        tcg_temp_free_i64(t2);
     }
 }
 
@@ -1850,16 +1837,16 @@ void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
     if (TCG_TARGET_REG_BITS == 32
         && TCG_TARGET_HAS_clz_i32
         && arg2 <= 0xffffffffu) {
-        TCGv_i32 t = tcg_const_i32((uint32_t)arg2 - 32);
-        tcg_gen_clz_i32(t, TCGV_LOW(arg1), t);
+        TCGv_i32 t = tcg_temp_new_i32();
+        tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
         tcg_gen_addi_i32(t, t, 32);
         tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
         tcg_temp_free_i32(t);
     } else {
-        TCGv_i64 t = tcg_const_i64(arg2);
-        tcg_gen_clz_i64(ret, arg1, t);
-        tcg_temp_free_i64(t);
+        TCGv_i64 t0 = tcg_const_i64(arg2);
+        tcg_gen_clz_i64(ret, arg1, t0);
+        tcg_temp_free_i64(t0);
     }
 }
 
@@ -1881,7 +1868,7 @@ void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
             tcg_gen_clzi_i64(t, t, 64);
             tcg_gen_xori_i64(t, t, 63);
         }
-        z = tcg_const_i64(0);
+        z = tcg_constant_i64(0);
         tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
         tcg_temp_free_i64(t);
         tcg_temp_free_i64(z);
@@ -1895,8 +1882,8 @@ void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
     if (TCG_TARGET_REG_BITS == 32
         && TCG_TARGET_HAS_ctz_i32
         && arg2 <= 0xffffffffu) {
-        TCGv_i32 t32 = tcg_const_i32((uint32_t)arg2 - 32);
-        tcg_gen_ctz_i32(t32, TCGV_HIGH(arg1), t32);
+        TCGv_i32 t32 = tcg_temp_new_i32();
+        tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
         tcg_gen_addi_i32(t32, t32, 32);
         tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
         tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
@@ -1911,9 +1898,9 @@ void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
         tcg_gen_ctpop_i64(ret, t);
         tcg_temp_free_i64(t);
     } else {
-        TCGv_i64 t64 = tcg_const_i64(arg2);
-        tcg_gen_ctz_i64(ret, arg1, t64);
-        tcg_temp_free_i64(t64);
+        TCGv_i64 t0 = tcg_const_i64(arg2);
+        tcg_gen_ctz_i64(ret, arg1, t0);
+        tcg_temp_free_i64(t0);
     }
 }
 
@@ -1969,9 +1956,7 @@ void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
     if (arg2 == 0) {
         tcg_gen_mov_i64(ret, arg1);
     } else if (TCG_TARGET_HAS_rot_i64) {
-        TCGv_i64 t0 = tcg_const_i64(arg2);
-        tcg_gen_rotl_i64(ret, arg1, t0);
-        tcg_temp_free_i64(t0);
+        tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
     } else {
         TCGv_i64 t0, t1;
         t0 = tcg_temp_new_i64();
@@ -2089,9 +2074,8 @@ void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
         tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
     } else if (TCG_TARGET_HAS_deposit_i64
                && TCG_TARGET_deposit_i64_valid(ofs, len)) {
-        TCGv_i64 zero = tcg_const_i64(0);
+        TCGv_i64 zero = tcg_constant_i64(0);
         tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
-        tcg_temp_free_i64(zero);
     } else {
         if (TCG_TARGET_REG_BITS == 32) {
             if (ofs >= 32) {
@@ -3102,9 +3086,8 @@ void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv, TCGv addr, TCGv_i32 cmpv,
 
 #ifdef CONFIG_SOFTMMU
         {
-            TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
-            gen(retv, cpu_env, addr, cmpv, newv, oi);
-            tcg_temp_free_i32(oi);
+            TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx);
+            gen(retv, cpu_env, addr, cmpv, newv, tcg_constant_i32(oi));
         }
 #else
         gen(retv, cpu_env, addr, cmpv, newv);
@@ -3147,9 +3130,8 @@ void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv, TCGv addr, TCGv_i64 cmpv,
 
 #ifdef CONFIG_SOFTMMU
         {
-            TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop, idx));
-            gen(retv, cpu_env, addr, cmpv, newv, oi);
-            tcg_temp_free_i32(oi);
+            TCGMemOpIdx oi = make_memop_idx(memop, idx);
+            gen(retv, cpu_env, addr, cmpv, newv, tcg_constant_i32(oi));
         }
 #else
         gen(retv, cpu_env, addr, cmpv, newv);
@@ -3210,9 +3192,8 @@ static void do_atomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
 
 #ifdef CONFIG_SOFTMMU
     {
-        TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
-        gen(ret, cpu_env, addr, val, oi);
-        tcg_temp_free_i32(oi);
+        TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx);
+        gen(ret, cpu_env, addr, val, tcg_constant_i32(oi));
     }
 #else
     gen(ret, cpu_env, addr, val);
@@ -3255,9 +3236,8 @@ static void do_atomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
 
 #ifdef CONFIG_SOFTMMU
         {
-            TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
-            gen(ret, cpu_env, addr, val, oi);
-            tcg_temp_free_i32(oi);
+            TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx);
+            gen(ret, cpu_env, addr, val, tcg_constant_i32(oi));
         }
 #else
         gen(ret, cpu_env, addr, val);
-- 
2.20.1


Re: [PATCH v2 13/36] tcg: Use tcg_constant_{i32,i64} with tcg int expanders
Posted by Alex Bennée 5 years, 9 months ago
Richard Henderson <richard.henderson@linaro.org> writes:

> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
> ---
>  include/tcg/tcg-op.h |  13 +--
>  tcg/tcg-op.c         | 216 ++++++++++++++++++++-----------------------
>  2 files changed, 100 insertions(+), 129 deletions(-)
>
> diff --git a/include/tcg/tcg-op.h b/include/tcg/tcg-op.h
> index 230db6e022..11ed9192f7 100644
> --- a/include/tcg/tcg-op.h
> +++ b/include/tcg/tcg-op.h
<snip>
> @@ -1468,12 +1441,17 @@ void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
>  
>  void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
>  {
> -    if (cond == TCG_COND_ALWAYS) {
> +    if (TCG_TARGET_REG_BITS == 64) {
> +        tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
> +    } else if (cond == TCG_COND_ALWAYS) {
>          tcg_gen_br(l);
>      } else if (cond != TCG_COND_NEVER) {
> -        TCGv_i64 t0 = tcg_const_i64(arg2);
> -        tcg_gen_brcond_i64(cond, arg1, t0, l);
> -        tcg_temp_free_i64(t0);
> +        l->refs++;

Hmm is this a separate fix?

> +        tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
> +                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
> +                          tcg_constant_i32(arg2),
> +                          tcg_constant_i32(arg2 >> 32),
> +                          cond, label_arg(l));
>      }
>  }
<snip>

otherwise lgtm:

Reviewed-by: Alex Bennée <alex.bennee@linaro.org>

-- 
Alex Bennée

Re: [PATCH v2 13/36] tcg: Use tcg_constant_{i32,i64} with tcg int expanders
Posted by Richard Henderson 5 years, 9 months ago
On 4/22/20 9:18 AM, Alex Bennée wrote:
>>  void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
>>  {
>> -    if (cond == TCG_COND_ALWAYS) {
>> +    if (TCG_TARGET_REG_BITS == 64) {
>> +        tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
>> +    } else if (cond == TCG_COND_ALWAYS) {
>>          tcg_gen_br(l);
>>      } else if (cond != TCG_COND_NEVER) {
>> -        TCGv_i64 t0 = tcg_const_i64(arg2);
>> -        tcg_gen_brcond_i64(cond, arg1, t0, l);
>> -        tcg_temp_free_i64(t0);
>> +        l->refs++;
> 
> Hmm is this a separate fix?

No, it's expanding what tcg_gen_brcond_i64 would do for TCG_TARGET_REG_BITS == 32.

>> +        tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
>> +                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
>> +                          tcg_constant_i32(arg2),
>> +                          tcg_constant_i32(arg2 >> 32),
>> +                          cond, label_arg(l));

Because we have two separate TCGv_i32, from tcg_constant_i32(), which cannot be
packaged up with TCGV_HIGH/LOW.


r~

Re: [PATCH v2 13/36] tcg: Use tcg_constant_{i32,i64} with tcg int expanders
Posted by Alex Bennée 5 years, 9 months ago
Richard Henderson <richard.henderson@linaro.org> writes:

> On 4/22/20 9:18 AM, Alex Bennée wrote:
>>>  void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
>>>  {
>>> -    if (cond == TCG_COND_ALWAYS) {
>>> +    if (TCG_TARGET_REG_BITS == 64) {
>>> +        tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
>>> +    } else if (cond == TCG_COND_ALWAYS) {
>>>          tcg_gen_br(l);
>>>      } else if (cond != TCG_COND_NEVER) {
>>> -        TCGv_i64 t0 = tcg_const_i64(arg2);
>>> -        tcg_gen_brcond_i64(cond, arg1, t0, l);
>>> -        tcg_temp_free_i64(t0);
>>> +        l->refs++;
>> 
>> Hmm is this a separate fix?
>
> No, it's expanding what tcg_gen_brcond_i64 would do for TCG_TARGET_REG_BITS == 32.
>
>>> +        tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
>>> +                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
>>> +                          tcg_constant_i32(arg2),
>>> +                          tcg_constant_i32(arg2 >> 32),
>>> +                          cond, label_arg(l));
>
> Because we have two separate TCGv_i32, from tcg_constant_i32(), which cannot be
> packaged up with TCGV_HIGH/LOW.
>
>
> r~

OK I see that now - the r-b stands ;-)

-- 
Alex Bennée

Re: [PATCH v2 13/36] tcg: Use tcg_constant_{i32,i64} with tcg int expanders
Posted by Alex Bennée 5 years, 9 months ago
Richard Henderson <richard.henderson@linaro.org> writes:

> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>

We have a regression. Setting up a build dir with:

  ../../configure --disable-tools --disable-docs --target-list=sparc-softmmu,sparc64-softmmu
  make -j30 && make check-acceptance

And then running a bisect between HEAD and master:

  git bisect run /bin/sh -c "cd builds/bisect && make -j30 && ./tests/venv/bin/avocado run ./tests/acceptance/boot_linux_console.py:BootLinuxConsole.test_sparc_ss20"

Fingers:

  a4d42b76dd29818e4f393c4c3eb59601b0015b2f is the first bad commit
  commit a4d42b76dd29818e4f393c4c3eb59601b0015b2f
  Author: Richard Henderson <richard.henderson@linaro.org>
  Date:   Tue Apr 21 18:16:59 2020 -0700

      tcg: Use tcg_constant_{i32,i64} with tcg int expanders

      Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
      Message-Id: <20200422011722.13287-14-richard.henderson@linaro.org>

  :040000 040000 45283ae0961f2794f5f15e09c29f160372fb5fae 92939e91645a5cf4fc36d475ff5dddd0839a7314 M      include
  :040000 040000 1083f94f8f045924fbf1e1f9c116f05827c25345 31a5dfc97636fcd0a114b910095b11cb767a22db M      tcg
  bisect run success

> ---
>  include/tcg/tcg-op.h |  13 +--
>  tcg/tcg-op.c         | 216 ++++++++++++++++++++-----------------------
>  2 files changed, 100 insertions(+), 129 deletions(-)
>
> diff --git a/include/tcg/tcg-op.h b/include/tcg/tcg-op.h
> index 230db6e022..11ed9192f7 100644
> --- a/include/tcg/tcg-op.h
> +++ b/include/tcg/tcg-op.h
> @@ -271,6 +271,7 @@ void tcg_gen_mb(TCGBar);
>  
>  /* 32 bit ops */
>  
> +void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg);
>  void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2);
>  void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2);
>  void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2);
> @@ -349,11 +350,6 @@ static inline void tcg_gen_mov_i32(TCGv_i32 ret, TCGv_i32 arg)
>      }
>  }
>  
> -static inline void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
> -{
> -    tcg_gen_op2i_i32(INDEX_op_movi_i32, ret, arg);
> -}
> -
>  static inline void tcg_gen_ld8u_i32(TCGv_i32 ret, TCGv_ptr arg2,
>                                      tcg_target_long offset)
>  {
> @@ -467,6 +463,7 @@ static inline void tcg_gen_not_i32(TCGv_i32 ret, TCGv_i32 arg)
>  
>  /* 64 bit ops */
>  
> +void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg);
>  void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2);
>  void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2);
>  void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2);
> @@ -550,11 +547,6 @@ static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
>      }
>  }
>  
> -static inline void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
> -{
> -    tcg_gen_op2i_i64(INDEX_op_movi_i64, ret, arg);
> -}
> -
>  static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2,
>                                      tcg_target_long offset)
>  {
> @@ -698,7 +690,6 @@ static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
>  
>  void tcg_gen_discard_i64(TCGv_i64 arg);
>  void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg);
> -void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg);
>  void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset);
>  void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset);
>  void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset);
> diff --git a/tcg/tcg-op.c b/tcg/tcg-op.c
> index e2e25ebf7d..07eb661a07 100644
> --- a/tcg/tcg-op.c
> +++ b/tcg/tcg-op.c
> @@ -104,15 +104,18 @@ void tcg_gen_mb(TCGBar mb_type)
>  
>  /* 32 bit ops */
>  
> +void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
> +{
> +    tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
> +}
> +
>  void tcg_gen_addi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
>  {
>      /* some cases can be optimized here */
>      if (arg2 == 0) {
>          tcg_gen_mov_i32(ret, arg1);
>      } else {
> -        TCGv_i32 t0 = tcg_const_i32(arg2);
> -        tcg_gen_add_i32(ret, arg1, t0);
> -        tcg_temp_free_i32(t0);
> +        tcg_gen_add_i32(ret, arg1, tcg_constant_i32(arg2));
>      }
>  }
>  
> @@ -122,9 +125,7 @@ void tcg_gen_subfi_i32(TCGv_i32 ret, int32_t arg1, TCGv_i32 arg2)
>          /* Don't recurse with tcg_gen_neg_i32.  */
>          tcg_gen_op2_i32(INDEX_op_neg_i32, ret, arg2);
>      } else {
> -        TCGv_i32 t0 = tcg_const_i32(arg1);
> -        tcg_gen_sub_i32(ret, t0, arg2);
> -        tcg_temp_free_i32(t0);
> +        tcg_gen_sub_i32(ret, tcg_constant_i32(arg1), arg2);
>      }
>  }
>  
> @@ -134,15 +135,12 @@ void tcg_gen_subi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
>      if (arg2 == 0) {
>          tcg_gen_mov_i32(ret, arg1);
>      } else {
> -        TCGv_i32 t0 = tcg_const_i32(arg2);
> -        tcg_gen_sub_i32(ret, arg1, t0);
> -        tcg_temp_free_i32(t0);
> +        tcg_gen_sub_i32(ret, arg1, tcg_constant_i32(arg2));
>      }
>  }
>  
>  void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
>  {
> -    TCGv_i32 t0;
>      /* Some cases can be optimized here.  */
>      switch (arg2) {
>      case 0:
> @@ -165,9 +163,8 @@ void tcg_gen_andi_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
>          }
>          break;
>      }
> -    t0 = tcg_const_i32(arg2);
> -    tcg_gen_and_i32(ret, arg1, t0);
> -    tcg_temp_free_i32(t0);
> +
> +    tcg_gen_and_i32(ret, arg1, tcg_constant_i32(arg2));
>  }
>  
>  void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
> @@ -178,9 +175,7 @@ void tcg_gen_ori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
>      } else if (arg2 == 0) {
>          tcg_gen_mov_i32(ret, arg1);
>      } else {
> -        TCGv_i32 t0 = tcg_const_i32(arg2);
> -        tcg_gen_or_i32(ret, arg1, t0);
> -        tcg_temp_free_i32(t0);
> +        tcg_gen_or_i32(ret, arg1, tcg_constant_i32(arg2));
>      }
>  }
>  
> @@ -193,9 +188,7 @@ void tcg_gen_xori_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
>          /* Don't recurse with tcg_gen_not_i32.  */
>          tcg_gen_op2_i32(INDEX_op_not_i32, ret, arg1);
>      } else {
> -        TCGv_i32 t0 = tcg_const_i32(arg2);
> -        tcg_gen_xor_i32(ret, arg1, t0);
> -        tcg_temp_free_i32(t0);
> +        tcg_gen_xor_i32(ret, arg1, tcg_constant_i32(arg2));
>      }
>  }
>  
> @@ -205,9 +198,7 @@ void tcg_gen_shli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
>      if (arg2 == 0) {
>          tcg_gen_mov_i32(ret, arg1);
>      } else {
> -        TCGv_i32 t0 = tcg_const_i32(arg2);
> -        tcg_gen_shl_i32(ret, arg1, t0);
> -        tcg_temp_free_i32(t0);
> +        tcg_gen_shl_i32(ret, arg1, tcg_constant_i32(arg2));
>      }
>  }
>  
> @@ -217,9 +208,7 @@ void tcg_gen_shri_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
>      if (arg2 == 0) {
>          tcg_gen_mov_i32(ret, arg1);
>      } else {
> -        TCGv_i32 t0 = tcg_const_i32(arg2);
> -        tcg_gen_shr_i32(ret, arg1, t0);
> -        tcg_temp_free_i32(t0);
> +        tcg_gen_shr_i32(ret, arg1, tcg_constant_i32(arg2));
>      }
>  }
>  
> @@ -229,9 +218,7 @@ void tcg_gen_sari_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
>      if (arg2 == 0) {
>          tcg_gen_mov_i32(ret, arg1);
>      } else {
> -        TCGv_i32 t0 = tcg_const_i32(arg2);
> -        tcg_gen_sar_i32(ret, arg1, t0);
> -        tcg_temp_free_i32(t0);
> +        tcg_gen_sar_i32(ret, arg1, tcg_constant_i32(arg2));
>      }
>  }
>  
> @@ -250,9 +237,7 @@ void tcg_gen_brcondi_i32(TCGCond cond, TCGv_i32 arg1, int32_t arg2, TCGLabel *l)
>      if (cond == TCG_COND_ALWAYS) {
>          tcg_gen_br(l);
>      } else if (cond != TCG_COND_NEVER) {
> -        TCGv_i32 t0 = tcg_const_i32(arg2);
> -        tcg_gen_brcond_i32(cond, arg1, t0, l);
> -        tcg_temp_free_i32(t0);
> +        tcg_gen_brcond_i32(cond, arg1, tcg_constant_i32(arg2), l);
>      }
>  }
>  
> @@ -271,9 +256,7 @@ void tcg_gen_setcond_i32(TCGCond cond, TCGv_i32 ret,
>  void tcg_gen_setcondi_i32(TCGCond cond, TCGv_i32 ret,
>                            TCGv_i32 arg1, int32_t arg2)
>  {
> -    TCGv_i32 t0 = tcg_const_i32(arg2);
> -    tcg_gen_setcond_i32(cond, ret, arg1, t0);
> -    tcg_temp_free_i32(t0);
> +    tcg_gen_setcond_i32(cond, ret, arg1, tcg_constant_i32(arg2));
>  }
>  
>  void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
> @@ -283,9 +266,7 @@ void tcg_gen_muli_i32(TCGv_i32 ret, TCGv_i32 arg1, int32_t arg2)
>      } else if (is_power_of_2(arg2)) {
>          tcg_gen_shli_i32(ret, arg1, ctz32(arg2));
>      } else {
> -        TCGv_i32 t0 = tcg_const_i32(arg2);
> -        tcg_gen_mul_i32(ret, arg1, t0);
> -        tcg_temp_free_i32(t0);
> +        tcg_gen_mul_i32(ret, arg1, tcg_constant_i32(arg2));
>      }
>  }
>  
> @@ -433,9 +414,7 @@ void tcg_gen_clz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
>  
>  void tcg_gen_clzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
>  {
> -    TCGv_i32 t = tcg_const_i32(arg2);
> -    tcg_gen_clz_i32(ret, arg1, t);
> -    tcg_temp_free_i32(t);
> +    tcg_gen_clz_i32(ret, arg1, tcg_constant_i32(arg2));
>  }
>  
>  void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
> @@ -468,10 +447,9 @@ void tcg_gen_ctz_i32(TCGv_i32 ret, TCGv_i32 arg1, TCGv_i32 arg2)
>              tcg_gen_clzi_i32(t, t, 32);
>              tcg_gen_xori_i32(t, t, 31);
>          }
> -        z = tcg_const_i32(0);
> +        z = tcg_constant_i32(0);
>          tcg_gen_movcond_i32(TCG_COND_EQ, ret, arg1, z, arg2, t);
>          tcg_temp_free_i32(t);
> -        tcg_temp_free_i32(z);
>      } else {
>          gen_helper_ctz_i32(ret, arg1, arg2);
>      }
> @@ -487,9 +465,7 @@ void tcg_gen_ctzi_i32(TCGv_i32 ret, TCGv_i32 arg1, uint32_t arg2)
>          tcg_gen_ctpop_i32(ret, t);
>          tcg_temp_free_i32(t);
>      } else {
> -        TCGv_i32 t = tcg_const_i32(arg2);
> -        tcg_gen_ctz_i32(ret, arg1, t);
> -        tcg_temp_free_i32(t);
> +        tcg_gen_ctz_i32(ret, arg1, tcg_constant_i32(arg2));
>      }
>  }
>  
> @@ -547,9 +523,7 @@ void tcg_gen_rotli_i32(TCGv_i32 ret, TCGv_i32 arg1, unsigned arg2)
>      if (arg2 == 0) {
>          tcg_gen_mov_i32(ret, arg1);
>      } else if (TCG_TARGET_HAS_rot_i32) {
> -        TCGv_i32 t0 = tcg_const_i32(arg2);
> -        tcg_gen_rotl_i32(ret, arg1, t0);
> -        tcg_temp_free_i32(t0);
> +        tcg_gen_rotl_i32(ret, arg1, tcg_constant_i32(arg2));
>      } else {
>          TCGv_i32 t0, t1;
>          t0 = tcg_temp_new_i32();
> @@ -653,9 +627,8 @@ void tcg_gen_deposit_z_i32(TCGv_i32 ret, TCGv_i32 arg,
>          tcg_gen_andi_i32(ret, arg, (1u << len) - 1);
>      } else if (TCG_TARGET_HAS_deposit_i32
>                 && TCG_TARGET_deposit_i32_valid(ofs, len)) {
> -        TCGv_i32 zero = tcg_const_i32(0);
> +        TCGv_i32 zero = tcg_constant_i32(0);
>          tcg_gen_op5ii_i32(INDEX_op_deposit_i32, ret, zero, arg, ofs, len);
> -        tcg_temp_free_i32(zero);
>      } else {
>          /* To help two-operand hosts we prefer to zero-extend first,
>             which allows ARG to stay live.  */
> @@ -1052,7 +1025,7 @@ void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
>      } else {
>          TCGv_i32 t0 = tcg_temp_new_i32();
>          TCGv_i32 t1 = tcg_temp_new_i32();
> -        TCGv_i32 t2 = tcg_const_i32(0x00ff00ff);
> +        TCGv_i32 t2 = tcg_constant_i32(0x00ff00ff);
>  
>                                          /* arg = abcd */
>          tcg_gen_shri_i32(t0, arg, 8);   /*  t0 = .abc */
> @@ -1067,7 +1040,6 @@ void tcg_gen_bswap32_i32(TCGv_i32 ret, TCGv_i32 arg)
>  
>          tcg_temp_free_i32(t0);
>          tcg_temp_free_i32(t1);
> -        tcg_temp_free_i32(t2);
>      }
>  }
>  
> @@ -1237,6 +1209,14 @@ void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
>      tcg_temp_free_i64(t0);
>      tcg_temp_free_i32(t1);
>  }
> +
> +#else
> +
> +void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
> +{
> +    tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
> +}
> +
>  #endif /* TCG_TARGET_REG_SIZE == 32 */
>  
>  void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
> @@ -1244,10 +1224,12 @@ void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
>      /* some cases can be optimized here */
>      if (arg2 == 0) {
>          tcg_gen_mov_i64(ret, arg1);
> +    } else if (TCG_TARGET_REG_BITS == 64) {
> +        tcg_gen_add_i64(ret, arg1, tcg_constant_i64(arg2));
>      } else {
> -        TCGv_i64 t0 = tcg_const_i64(arg2);
> -        tcg_gen_add_i64(ret, arg1, t0);
> -        tcg_temp_free_i64(t0);
> +        tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
> +                         TCGV_LOW(arg1), TCGV_HIGH(arg1),
> +                         tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
>      }
>  }
>  
> @@ -1256,10 +1238,12 @@ void tcg_gen_subfi_i64(TCGv_i64 ret, int64_t arg1, TCGv_i64 arg2)
>      if (arg1 == 0 && TCG_TARGET_HAS_neg_i64) {
>          /* Don't recurse with tcg_gen_neg_i64.  */
>          tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg2);
> +    } else if (TCG_TARGET_REG_BITS == 64) {
> +        tcg_gen_sub_i64(ret, tcg_constant_i64(arg1), arg2);
>      } else {
> -        TCGv_i64 t0 = tcg_const_i64(arg1);
> -        tcg_gen_sub_i64(ret, t0, arg2);
> -        tcg_temp_free_i64(t0);
> +        tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
> +                         tcg_constant_i32(arg1), tcg_constant_i32(arg1 >> 32),
> +                         TCGV_LOW(arg2), TCGV_HIGH(arg2));
>      }
>  }
>  
> @@ -1268,17 +1252,17 @@ void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
>      /* some cases can be optimized here */
>      if (arg2 == 0) {
>          tcg_gen_mov_i64(ret, arg1);
> +    } else if (TCG_TARGET_REG_BITS == 64) {
> +        tcg_gen_sub_i64(ret, arg1, tcg_constant_i64(arg2));
>      } else {
> -        TCGv_i64 t0 = tcg_const_i64(arg2);
> -        tcg_gen_sub_i64(ret, arg1, t0);
> -        tcg_temp_free_i64(t0);
> +        tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret),
> +                         TCGV_LOW(arg1), TCGV_HIGH(arg1),
> +                         tcg_constant_i32(arg2), tcg_constant_i32(arg2 >> 32));
>      }
>  }
>  
>  void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
>  {
> -    TCGv_i64 t0;
> -
>      if (TCG_TARGET_REG_BITS == 32) {
>          tcg_gen_andi_i32(TCGV_LOW(ret), TCGV_LOW(arg1), arg2);
>          tcg_gen_andi_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), arg2 >> 32);
> @@ -1313,9 +1297,8 @@ void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
>          }
>          break;
>      }
> -    t0 = tcg_const_i64(arg2);
> -    tcg_gen_and_i64(ret, arg1, t0);
> -    tcg_temp_free_i64(t0);
> +
> +    tcg_gen_and_i64(ret, arg1, tcg_constant_i64(arg2));
>  }
>  
>  void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
> @@ -1331,9 +1314,7 @@ void tcg_gen_ori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
>      } else if (arg2 == 0) {
>          tcg_gen_mov_i64(ret, arg1);
>      } else {
> -        TCGv_i64 t0 = tcg_const_i64(arg2);
> -        tcg_gen_or_i64(ret, arg1, t0);
> -        tcg_temp_free_i64(t0);
> +        tcg_gen_or_i64(ret, arg1, tcg_constant_i64(arg2));
>      }
>  }
>  
> @@ -1351,9 +1332,7 @@ void tcg_gen_xori_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
>          /* Don't recurse with tcg_gen_not_i64.  */
>          tcg_gen_op2_i64(INDEX_op_not_i64, ret, arg1);
>      } else {
> -        TCGv_i64 t0 = tcg_const_i64(arg2);
> -        tcg_gen_xor_i64(ret, arg1, t0);
> -        tcg_temp_free_i64(t0);
> +        tcg_gen_xor_i64(ret, arg1, tcg_constant_i64(arg2));
>      }
>  }
>  
> @@ -1415,9 +1394,7 @@ void tcg_gen_shli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
>      } else if (arg2 == 0) {
>          tcg_gen_mov_i64(ret, arg1);
>      } else {
> -        TCGv_i64 t0 = tcg_const_i64(arg2);
> -        tcg_gen_shl_i64(ret, arg1, t0);
> -        tcg_temp_free_i64(t0);
> +        tcg_gen_shl_i64(ret, arg1, tcg_constant_i64(arg2));
>      }
>  }
>  
> @@ -1429,9 +1406,7 @@ void tcg_gen_shri_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
>      } else if (arg2 == 0) {
>          tcg_gen_mov_i64(ret, arg1);
>      } else {
> -        TCGv_i64 t0 = tcg_const_i64(arg2);
> -        tcg_gen_shr_i64(ret, arg1, t0);
> -        tcg_temp_free_i64(t0);
> +        tcg_gen_shr_i64(ret, arg1, tcg_constant_i64(arg2));
>      }
>  }
>  
> @@ -1443,9 +1418,7 @@ void tcg_gen_sari_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
>      } else if (arg2 == 0) {
>          tcg_gen_mov_i64(ret, arg1);
>      } else {
> -        TCGv_i64 t0 = tcg_const_i64(arg2);
> -        tcg_gen_sar_i64(ret, arg1, t0);
> -        tcg_temp_free_i64(t0);
> +        tcg_gen_sar_i64(ret, arg1, tcg_constant_i64(arg2));
>      }
>  }
>  
> @@ -1468,12 +1441,17 @@ void tcg_gen_brcond_i64(TCGCond cond, TCGv_i64 arg1, TCGv_i64 arg2, TCGLabel *l)
>  
>  void tcg_gen_brcondi_i64(TCGCond cond, TCGv_i64 arg1, int64_t arg2, TCGLabel *l)
>  {
> -    if (cond == TCG_COND_ALWAYS) {
> +    if (TCG_TARGET_REG_BITS == 64) {
> +        tcg_gen_brcond_i64(cond, arg1, tcg_constant_i64(arg2), l);
> +    } else if (cond == TCG_COND_ALWAYS) {
>          tcg_gen_br(l);
>      } else if (cond != TCG_COND_NEVER) {
> -        TCGv_i64 t0 = tcg_const_i64(arg2);
> -        tcg_gen_brcond_i64(cond, arg1, t0, l);
> -        tcg_temp_free_i64(t0);
> +        l->refs++;
> +        tcg_gen_op6ii_i32(INDEX_op_brcond2_i32,
> +                          TCGV_LOW(arg1), TCGV_HIGH(arg1),
> +                          tcg_constant_i32(arg2),
> +                          tcg_constant_i32(arg2 >> 32),
> +                          cond, label_arg(l));
>      }
>  }
>  
> @@ -1499,9 +1477,19 @@ void tcg_gen_setcond_i64(TCGCond cond, TCGv_i64 ret,
>  void tcg_gen_setcondi_i64(TCGCond cond, TCGv_i64 ret,
>                            TCGv_i64 arg1, int64_t arg2)
>  {
> -    TCGv_i64 t0 = tcg_const_i64(arg2);
> -    tcg_gen_setcond_i64(cond, ret, arg1, t0);
> -    tcg_temp_free_i64(t0);
> +    if (TCG_TARGET_REG_BITS == 64) {
> +        tcg_gen_setcond_i64(cond, ret, arg1, tcg_constant_i64(arg2));
> +    } else if (cond == TCG_COND_ALWAYS) {
> +        tcg_gen_movi_i64(ret, 1);
> +    } else if (cond == TCG_COND_NEVER) {
> +        tcg_gen_movi_i64(ret, 0);
> +    } else {
> +        tcg_gen_op6i_i32(INDEX_op_setcond2_i32, TCGV_LOW(ret),
> +                         TCGV_LOW(arg1), TCGV_HIGH(arg1),
> +                         tcg_constant_i32(arg2),
> +                         tcg_constant_i32(arg2 >> 32), cond);
> +        tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
> +    }
>  }
>  
>  void tcg_gen_muli_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
> @@ -1690,7 +1678,7 @@ void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
>      } else {
>          TCGv_i64 t0 = tcg_temp_new_i64();
>          TCGv_i64 t1 = tcg_temp_new_i64();
> -        TCGv_i64 t2 = tcg_const_i64(0x00ff00ff);
> +        TCGv_i64 t2 = tcg_constant_i64(0x00ff00ff);
>  
>                                          /* arg = ....abcd */
>          tcg_gen_shri_i64(t0, arg, 8);   /*  t0 = .....abc */
> @@ -1706,7 +1694,6 @@ void tcg_gen_bswap32_i64(TCGv_i64 ret, TCGv_i64 arg)
>  
>          tcg_temp_free_i64(t0);
>          tcg_temp_free_i64(t1);
> -        tcg_temp_free_i64(t2);
>      }
>  }
>  
> @@ -1850,16 +1837,16 @@ void tcg_gen_clzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
>      if (TCG_TARGET_REG_BITS == 32
>          && TCG_TARGET_HAS_clz_i32
>          && arg2 <= 0xffffffffu) {
> -        TCGv_i32 t = tcg_const_i32((uint32_t)arg2 - 32);
> -        tcg_gen_clz_i32(t, TCGV_LOW(arg1), t);
> +        TCGv_i32 t = tcg_temp_new_i32();
> +        tcg_gen_clzi_i32(t, TCGV_LOW(arg1), arg2 - 32);
>          tcg_gen_addi_i32(t, t, 32);
>          tcg_gen_clz_i32(TCGV_LOW(ret), TCGV_HIGH(arg1), t);
>          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
>          tcg_temp_free_i32(t);
>      } else {
> -        TCGv_i64 t = tcg_const_i64(arg2);
> -        tcg_gen_clz_i64(ret, arg1, t);
> -        tcg_temp_free_i64(t);
> +        TCGv_i64 t0 = tcg_const_i64(arg2);
> +        tcg_gen_clz_i64(ret, arg1, t0);
> +        tcg_temp_free_i64(t0);
>      }
>  }
>  
> @@ -1881,7 +1868,7 @@ void tcg_gen_ctz_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
>              tcg_gen_clzi_i64(t, t, 64);
>              tcg_gen_xori_i64(t, t, 63);
>          }
> -        z = tcg_const_i64(0);
> +        z = tcg_constant_i64(0);
>          tcg_gen_movcond_i64(TCG_COND_EQ, ret, arg1, z, arg2, t);
>          tcg_temp_free_i64(t);
>          tcg_temp_free_i64(z);
> @@ -1895,8 +1882,8 @@ void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
>      if (TCG_TARGET_REG_BITS == 32
>          && TCG_TARGET_HAS_ctz_i32
>          && arg2 <= 0xffffffffu) {
> -        TCGv_i32 t32 = tcg_const_i32((uint32_t)arg2 - 32);
> -        tcg_gen_ctz_i32(t32, TCGV_HIGH(arg1), t32);
> +        TCGv_i32 t32 = tcg_temp_new_i32();
> +        tcg_gen_ctzi_i32(t32, TCGV_HIGH(arg1), arg2 - 32);
>          tcg_gen_addi_i32(t32, t32, 32);
>          tcg_gen_ctz_i32(TCGV_LOW(ret), TCGV_LOW(arg1), t32);
>          tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
> @@ -1911,9 +1898,9 @@ void tcg_gen_ctzi_i64(TCGv_i64 ret, TCGv_i64 arg1, uint64_t arg2)
>          tcg_gen_ctpop_i64(ret, t);
>          tcg_temp_free_i64(t);
>      } else {
> -        TCGv_i64 t64 = tcg_const_i64(arg2);
> -        tcg_gen_ctz_i64(ret, arg1, t64);
> -        tcg_temp_free_i64(t64);
> +        TCGv_i64 t0 = tcg_const_i64(arg2);
> +        tcg_gen_ctz_i64(ret, arg1, t0);
> +        tcg_temp_free_i64(t0);
>      }
>  }
>  
> @@ -1969,9 +1956,7 @@ void tcg_gen_rotli_i64(TCGv_i64 ret, TCGv_i64 arg1, unsigned arg2)
>      if (arg2 == 0) {
>          tcg_gen_mov_i64(ret, arg1);
>      } else if (TCG_TARGET_HAS_rot_i64) {
> -        TCGv_i64 t0 = tcg_const_i64(arg2);
> -        tcg_gen_rotl_i64(ret, arg1, t0);
> -        tcg_temp_free_i64(t0);
> +        tcg_gen_rotl_i64(ret, arg1, tcg_constant_i64(arg2));
>      } else {
>          TCGv_i64 t0, t1;
>          t0 = tcg_temp_new_i64();
> @@ -2089,9 +2074,8 @@ void tcg_gen_deposit_z_i64(TCGv_i64 ret, TCGv_i64 arg,
>          tcg_gen_andi_i64(ret, arg, (1ull << len) - 1);
>      } else if (TCG_TARGET_HAS_deposit_i64
>                 && TCG_TARGET_deposit_i64_valid(ofs, len)) {
> -        TCGv_i64 zero = tcg_const_i64(0);
> +        TCGv_i64 zero = tcg_constant_i64(0);
>          tcg_gen_op5ii_i64(INDEX_op_deposit_i64, ret, zero, arg, ofs, len);
> -        tcg_temp_free_i64(zero);
>      } else {
>          if (TCG_TARGET_REG_BITS == 32) {
>              if (ofs >= 32) {
> @@ -3102,9 +3086,8 @@ void tcg_gen_atomic_cmpxchg_i32(TCGv_i32 retv, TCGv addr, TCGv_i32 cmpv,
>  
>  #ifdef CONFIG_SOFTMMU
>          {
> -            TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
> -            gen(retv, cpu_env, addr, cmpv, newv, oi);
> -            tcg_temp_free_i32(oi);
> +            TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx);
> +            gen(retv, cpu_env, addr, cmpv, newv, tcg_constant_i32(oi));
>          }
>  #else
>          gen(retv, cpu_env, addr, cmpv, newv);
> @@ -3147,9 +3130,8 @@ void tcg_gen_atomic_cmpxchg_i64(TCGv_i64 retv, TCGv addr, TCGv_i64 cmpv,
>  
>  #ifdef CONFIG_SOFTMMU
>          {
> -            TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop, idx));
> -            gen(retv, cpu_env, addr, cmpv, newv, oi);
> -            tcg_temp_free_i32(oi);
> +            TCGMemOpIdx oi = make_memop_idx(memop, idx);
> +            gen(retv, cpu_env, addr, cmpv, newv, tcg_constant_i32(oi));
>          }
>  #else
>          gen(retv, cpu_env, addr, cmpv, newv);
> @@ -3210,9 +3192,8 @@ static void do_atomic_op_i32(TCGv_i32 ret, TCGv addr, TCGv_i32 val,
>  
>  #ifdef CONFIG_SOFTMMU
>      {
> -        TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
> -        gen(ret, cpu_env, addr, val, oi);
> -        tcg_temp_free_i32(oi);
> +        TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx);
> +        gen(ret, cpu_env, addr, val, tcg_constant_i32(oi));
>      }
>  #else
>      gen(ret, cpu_env, addr, val);
> @@ -3255,9 +3236,8 @@ static void do_atomic_op_i64(TCGv_i64 ret, TCGv addr, TCGv_i64 val,
>  
>  #ifdef CONFIG_SOFTMMU
>          {
> -            TCGv_i32 oi = tcg_const_i32(make_memop_idx(memop & ~MO_SIGN, idx));
> -            gen(ret, cpu_env, addr, val, oi);
> -            tcg_temp_free_i32(oi);
> +            TCGMemOpIdx oi = make_memop_idx(memop & ~MO_SIGN, idx);
> +            gen(ret, cpu_env, addr, val, tcg_constant_i32(oi));
>          }
>  #else
>          gen(ret, cpu_env, addr, val);


-- 
Alex Bennée

Re: [PATCH v2 13/36] tcg: Use tcg_constant_{i32,i64} with tcg int expanders
Posted by Richard Henderson 5 years, 9 months ago
On 4/22/20 1:04 PM, Alex Bennée wrote:
> 
> Richard Henderson <richard.henderson@linaro.org> writes:
> 
>> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
> 
> We have a regression. Setting up a build dir with:
> 
>   ../../configure --disable-tools --disable-docs --target-list=sparc-softmmu,sparc64-softmmu
>   make -j30 && make check-acceptance
> 
> And then running a bisect between HEAD and master:
> 
>   git bisect run /bin/sh -c "cd builds/bisect && make -j30 && ./tests/venv/bin/avocado run ./tests/acceptance/boot_linux_console.py:BootLinuxConsole.test_sparc_ss20"
> 
> Fingers:
> 
>   a4d42b76dd29818e4f393c4c3eb59601b0015b2f is the first bad commit
>   commit a4d42b76dd29818e4f393c4c3eb59601b0015b2f
>   Author: Richard Henderson <richard.henderson@linaro.org>
>   Date:   Tue Apr 21 18:16:59 2020 -0700
> 
>       tcg: Use tcg_constant_{i32,i64} with tcg int expanders
> 
>       Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
>       Message-Id: <20200422011722.13287-14-richard.henderson@linaro.org>

Ho hum.  I can reproduce this, but after a day of debugging I'm no closer to
figuring out what's wrong than when I started.

I'm going to put this whole section of TEMP_CONST to the side for now.


r~

Re: [PATCH v2 13/36] tcg: Use tcg_constant_{i32,i64} with tcg int expanders
Posted by Alex Bennée 5 years, 9 months ago
Richard Henderson <richard.henderson@linaro.org> writes:

> On 4/22/20 1:04 PM, Alex Bennée wrote:
>> 
>> Richard Henderson <richard.henderson@linaro.org> writes:
>> 
>>> Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
>> 
>> We have a regression. Setting up a build dir with:
>> 
>>   ../../configure --disable-tools --disable-docs --target-list=sparc-softmmu,sparc64-softmmu
>>   make -j30 && make check-acceptance
>> 
>> And then running a bisect between HEAD and master:
>> 
>>   git bisect run /bin/sh -c "cd builds/bisect && make -j30 && ./tests/venv/bin/avocado run ./tests/acceptance/boot_linux_console.py:BootLinuxConsole.test_sparc_ss20"
>> 
>> Fingers:
>> 
>>   a4d42b76dd29818e4f393c4c3eb59601b0015b2f is the first bad commit
>>   commit a4d42b76dd29818e4f393c4c3eb59601b0015b2f
>>   Author: Richard Henderson <richard.henderson@linaro.org>
>>   Date:   Tue Apr 21 18:16:59 2020 -0700
>> 
>>       tcg: Use tcg_constant_{i32,i64} with tcg int expanders
>> 
>>       Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
>>       Message-Id: <20200422011722.13287-14-richard.henderson@linaro.org>
>
> Ho hum.  I can reproduce this, but after a day of debugging I'm no closer to
> figuring out what's wrong than when I started.
>
> I'm going to put this whole section of TEMP_CONST to the side for now.

From my own poking around I can say the hang occurs when you first
introduce just:

  void tcg_gen_movi_i32(TCGv_i32 ret, int32_t arg)
  {
      tcg_gen_mov_i32(ret, tcg_constant_i32(arg));
  }

and nothing else. Which indicates the problem has to be in the core
plumbing itself. This is odd because all the other architectures are
fine. I wonder if there is something special about sparc's constant
generation?

Eyeballing the numbers it does seem like sparc generates more negative
numbers than ARM does - although ARM does generate some. I thought I'd
just have a check to see what happens so I looked at the first
occurrence in the sparc test:

  0x00006224:  sethi  %hi(0xffdcf000), %g6
  0x00006228:  mov  %g6, %g6      ! 0xffdcf000
  0x0000622c:  sethi  %hi(0xffd00000), %g4
  0x00006230:  mov  %g4, %g4      ! 0xffd00000
  0x00006234:  sub  %g6, %g4, %g6
  0x00006238:  sub  %g1, %g6, %g3
  0x0000623c:  sethi  %hi(0x1000), %g5
  0x00006240:  sub  %g3, %g5, %g3
  0x00006244:  sub  %g3, %g5, %g3

Which seems to be translated into ops ok:

   ---- 00006224 00006228
   mov_i32 g6,$0xffdcf000

   ---- 00006228 0000622c

   ---- 0000622c 00006230
   mov_i32 g4,$0xffd00000

   ---- 00006230 00006234

   ---- 00006234 00006238
   sub_i32 tmp0,g6,g4
   mov_i32 g6,tmp0

   ---- 00006238 0000623c
   sub_i32 tmp0,g1,g6
   mov_i32 g3,tmp0

   ---- 0000623c 00006240
   mov_i32 g5,$0x1000

   ---- 00006240 00006244
   sub_i32 tmp0,g3,g5
   mov_i32 g3,tmp0

   ---- 00006244 00006248
   sub_i32 tmp0,g3,g5
   mov_i32 g3,tmp0

and looks like its doing the expected constant folding here.

   ---- 00006224 00006228

   ---- 00006228 0000622c

   ---- 0000622c 00006230

   ---- 00006230 00006234

   ---- 00006234 00006238
   movi_i32 tmp0,$0xcf000                   pref=0xffff
   mov_i32 g6,tmp0                          dead: 1  pref=0xffff

   ---- 00006238 0000623c
   sub_i32 tmp0,g1,g6                       dead: 1 2  pref=0xffff
   mov_i32 g3,tmp0                          dead: 1  pref=0xffff

   ---- 0000623c 00006240
   mov_i32 g5,$0x1000                       sync: 0  dead: 0  pref=0xffff

   ---- 00006240 00006244
   sub_i32 tmp0,g3,$0x1000                  dead: 1  pref=0xffff
   mov_i32 g3,tmp0                          dead: 1  pref=0xffff

   ---- 00006244 00006248
   sub_i32 tmp0,g3,$0x1000                  dead: 1  pref=0xffff
   mov_i32 g3,tmp0                          sync: 0  dead: 1  pref=0xf038


One other data point is it is certainly in the optimisation phase that
things go wrong because:

  //#define USE_TCG_OPTIMIZATIONS

means the test passes.


>
>
> r~


-- 
Alex Bennée