This one is more complicated, combining 32-bit and 64-bit
expansion with C if instead of preprocessor #if.
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
Message-Id: <20231029210848.78234-6-richard.henderson@linaro.org>
---
include/tcg/tcg-op-common.h | 144 +---------------------
tcg/tcg-op.c | 231 +++++++++++++++++++++++++-----------
2 files changed, 169 insertions(+), 206 deletions(-)
diff --git a/include/tcg/tcg-op-common.h b/include/tcg/tcg-op-common.h
index cdaa1415d1..f5ec54f874 100644
--- a/include/tcg/tcg-op-common.h
+++ b/include/tcg/tcg-op-common.h
@@ -305,130 +305,6 @@ void tcg_gen_abs_i64(TCGv_i64, TCGv_i64);
/* Replicate a value of size @vece from @in to all the lanes in @out */
void tcg_gen_dup_i64(unsigned vece, TCGv_i64 out, TCGv_i64 in);
-#if TCG_TARGET_REG_BITS == 64
-static inline void tcg_gen_discard_i64(TCGv_i64 arg)
-{
- tcg_gen_op1_i64(INDEX_op_discard, arg);
-}
-
-static inline void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
-{
- if (ret != arg) {
- tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
- }
-}
-
-static inline void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2,
- tcg_target_long offset)
-{
- tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
-}
-
-static inline void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2,
- tcg_target_long offset)
-{
- tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
-}
-
-static inline void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2,
- tcg_target_long offset)
-{
- tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
-}
-
-static inline void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2,
- tcg_target_long offset)
-{
- tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
-}
-
-static inline void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2,
- tcg_target_long offset)
-{
- tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
-}
-
-static inline void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2,
- tcg_target_long offset)
-{
- tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
-}
-
-static inline void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2,
- tcg_target_long offset)
-{
- tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
-}
-
-static inline void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2,
- tcg_target_long offset)
-{
- tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
-}
-
-static inline void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2,
- tcg_target_long offset)
-{
- tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
-}
-
-static inline void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2,
- tcg_target_long offset)
-{
- tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
-}
-
-static inline void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2,
- tcg_target_long offset)
-{
- tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
-}
-
-static inline void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
-}
-
-static inline void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
-{
- tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
-}
-#else /* TCG_TARGET_REG_BITS == 32 */
void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset);
void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset);
void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset);
@@ -453,16 +329,8 @@ void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2);
void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2);
void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2);
void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2);
-#endif /* TCG_TARGET_REG_BITS */
+void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg);
-static inline void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
-{
- if (TCG_TARGET_HAS_neg_i64) {
- tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
- } else {
- tcg_gen_subfi_i64(ret, 0, arg);
- }
-}
/* Size changing operations. */
@@ -473,19 +341,17 @@ void tcg_gen_extrl_i64_i32(TCGv_i32 ret, TCGv_i64 arg);
void tcg_gen_extrh_i64_i32(TCGv_i32 ret, TCGv_i64 arg);
void tcg_gen_extr_i64_i32(TCGv_i32 lo, TCGv_i32 hi, TCGv_i64 arg);
void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg);
+void tcg_gen_concat32_i64(TCGv_i64 ret, TCGv_i64 lo, TCGv_i64 hi);
-void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src);
void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg);
void tcg_gen_concat_i64_i128(TCGv_i128 ret, TCGv_i64 lo, TCGv_i64 hi);
+/* 128 bit ops */
+
+void tcg_gen_mov_i128(TCGv_i128 dst, TCGv_i128 src);
void tcg_gen_ld_i128(TCGv_i128 ret, TCGv_ptr base, tcg_target_long offset);
void tcg_gen_st_i128(TCGv_i128 val, TCGv_ptr base, tcg_target_long offset);
-static inline void tcg_gen_concat32_i64(TCGv_i64 ret, TCGv_i64 lo, TCGv_i64 hi)
-{
- tcg_gen_deposit_i64(ret, lo, hi, 32, 32);
-}
-
/* Local load/store bit ops */
void tcg_gen_qemu_ld_i32_chk(TCGv_i32, TCGTemp *, TCGArg, MemOp, TCGType);
diff --git a/tcg/tcg-op.c b/tcg/tcg-op.c
index ab6281ebec..579a2aab15 100644
--- a/tcg/tcg-op.c
+++ b/tcg/tcg-op.c
@@ -1503,152 +1503,238 @@ void tcg_gen_st_i32(TCGv_i32 arg1, TCGv_ptr arg2, tcg_target_long offset)
/* 64-bit ops */
-#if TCG_TARGET_REG_BITS == 32
-/* These are all inline for TCG_TARGET_REG_BITS == 64. */
-
void tcg_gen_discard_i64(TCGv_i64 arg)
{
- tcg_gen_discard_i32(TCGV_LOW(arg));
- tcg_gen_discard_i32(TCGV_HIGH(arg));
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_op1_i64(INDEX_op_discard, arg);
+ } else {
+ tcg_gen_discard_i32(TCGV_LOW(arg));
+ tcg_gen_discard_i32(TCGV_HIGH(arg));
+ }
}
void tcg_gen_mov_i64(TCGv_i64 ret, TCGv_i64 arg)
{
- TCGTemp *ts = tcgv_i64_temp(arg);
-
- /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
- if (ts->kind == TEMP_CONST) {
- tcg_gen_movi_i64(ret, ts->val);
+ if (ret == arg) {
+ return;
+ }
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_op2_i64(INDEX_op_mov_i64, ret, arg);
} else {
- tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
- tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
+ TCGTemp *ts = tcgv_i64_temp(arg);
+
+ /* Canonicalize TCGv_i64 TEMP_CONST into TCGv_i32 TEMP_CONST. */
+ if (ts->kind == TEMP_CONST) {
+ tcg_gen_movi_i64(ret, ts->val);
+ } else {
+ tcg_gen_mov_i32(TCGV_LOW(ret), TCGV_LOW(arg));
+ tcg_gen_mov_i32(TCGV_HIGH(ret), TCGV_HIGH(arg));
+ }
}
}
void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
{
- tcg_gen_movi_i32(TCGV_LOW(ret), arg);
- tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
+ } else {
+ tcg_gen_movi_i32(TCGV_LOW(ret), arg);
+ tcg_gen_movi_i32(TCGV_HIGH(ret), arg >> 32);
+ }
}
void tcg_gen_ld8u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
- tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_ldst_op_i64(INDEX_op_ld8u_i64, ret, arg2, offset);
+ } else {
+ tcg_gen_ld8u_i32(TCGV_LOW(ret), arg2, offset);
+ tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
+ }
}
void tcg_gen_ld8s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
- tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_ldst_op_i64(INDEX_op_ld8s_i64, ret, arg2, offset);
+ } else {
+ tcg_gen_ld8s_i32(TCGV_LOW(ret), arg2, offset);
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
+ }
}
void tcg_gen_ld16u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
- tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_ldst_op_i64(INDEX_op_ld16u_i64, ret, arg2, offset);
+ } else {
+ tcg_gen_ld16u_i32(TCGV_LOW(ret), arg2, offset);
+ tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
+ }
}
void tcg_gen_ld16s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
- tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_ldst_op_i64(INDEX_op_ld16s_i64, ret, arg2, offset);
+ } else {
+ tcg_gen_ld16s_i32(TCGV_LOW(ret), arg2, offset);
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
+ }
}
void tcg_gen_ld32u_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
- tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_ldst_op_i64(INDEX_op_ld32u_i64, ret, arg2, offset);
+ } else {
+ tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
+ tcg_gen_movi_i32(TCGV_HIGH(ret), 0);
+ }
}
void tcg_gen_ld32s_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
- tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_ldst_op_i64(INDEX_op_ld32s_i64, ret, arg2, offset);
+ } else {
+ tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
+ tcg_gen_sari_i32(TCGV_HIGH(ret), TCGV_LOW(ret), 31);
+ }
}
void tcg_gen_ld_i64(TCGv_i64 ret, TCGv_ptr arg2, tcg_target_long offset)
{
- /* Since arg2 and ret have different types,
- they cannot be the same temporary */
-#if HOST_BIG_ENDIAN
- tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
- tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
-#else
- tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
- tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
-#endif
+ /*
+ * For 32-bit host, since arg2 and ret have different types,
+ * they cannot be the same temporary -- no chance of overlap.
+ */
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_ldst_op_i64(INDEX_op_ld_i64, ret, arg2, offset);
+ } else if (HOST_BIG_ENDIAN) {
+ tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset);
+ tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset + 4);
+ } else {
+ tcg_gen_ld_i32(TCGV_LOW(ret), arg2, offset);
+ tcg_gen_ld_i32(TCGV_HIGH(ret), arg2, offset + 4);
+ }
}
void tcg_gen_st8_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_ldst_op_i64(INDEX_op_st8_i64, arg1, arg2, offset);
+ } else {
+ tcg_gen_st8_i32(TCGV_LOW(arg1), arg2, offset);
+ }
}
void tcg_gen_st16_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_ldst_op_i64(INDEX_op_st16_i64, arg1, arg2, offset);
+ } else {
+ tcg_gen_st16_i32(TCGV_LOW(arg1), arg2, offset);
+ }
}
void tcg_gen_st32_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
- tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_ldst_op_i64(INDEX_op_st32_i64, arg1, arg2, offset);
+ } else {
+ tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
+ }
}
void tcg_gen_st_i64(TCGv_i64 arg1, TCGv_ptr arg2, tcg_target_long offset)
{
-#if HOST_BIG_ENDIAN
- tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
- tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
-#else
- tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
- tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
-#endif
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_ldst_op_i64(INDEX_op_st_i64, arg1, arg2, offset);
+ } else if (HOST_BIG_ENDIAN) {
+ tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset);
+ tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset + 4);
+ } else {
+ tcg_gen_st_i32(TCGV_LOW(arg1), arg2, offset);
+ tcg_gen_st_i32(TCGV_HIGH(arg1), arg2, offset + 4);
+ }
}
void tcg_gen_add_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
- TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_op3_i64(INDEX_op_add_i64, ret, arg1, arg2);
+ } else {
+ tcg_gen_add2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
+ TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
+ }
}
void tcg_gen_sub_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
- TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_op3_i64(INDEX_op_sub_i64, ret, arg1, arg2);
+ } else {
+ tcg_gen_sub2_i32(TCGV_LOW(ret), TCGV_HIGH(ret), TCGV_LOW(arg1),
+ TCGV_HIGH(arg1), TCGV_LOW(arg2), TCGV_HIGH(arg2));
+ }
}
void tcg_gen_and_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
- tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_op3_i64(INDEX_op_and_i64, ret, arg1, arg2);
+ } else {
+ tcg_gen_and_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
+ tcg_gen_and_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
+ }
}
void tcg_gen_or_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
- tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_op3_i64(INDEX_op_or_i64, ret, arg1, arg2);
+ } else {
+ tcg_gen_or_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
+ tcg_gen_or_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
+ }
}
void tcg_gen_xor_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
- tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_op3_i64(INDEX_op_xor_i64, ret, arg1, arg2);
+ } else {
+ tcg_gen_xor_i32(TCGV_LOW(ret), TCGV_LOW(arg1), TCGV_LOW(arg2));
+ tcg_gen_xor_i32(TCGV_HIGH(ret), TCGV_HIGH(arg1), TCGV_HIGH(arg2));
+ }
}
void tcg_gen_shl_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- gen_helper_shl_i64(ret, arg1, arg2);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_op3_i64(INDEX_op_shl_i64, ret, arg1, arg2);
+ } else {
+ gen_helper_shl_i64(ret, arg1, arg2);
+ }
}
void tcg_gen_shr_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- gen_helper_shr_i64(ret, arg1, arg2);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_op3_i64(INDEX_op_shr_i64, ret, arg1, arg2);
+ } else {
+ gen_helper_shr_i64(ret, arg1, arg2);
+ }
}
void tcg_gen_sar_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
{
- gen_helper_sar_i64(ret, arg1, arg2);
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_op3_i64(INDEX_op_sar_i64, ret, arg1, arg2);
+ } else {
+ gen_helper_sar_i64(ret, arg1, arg2);
+ }
}
void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
@@ -1656,6 +1742,12 @@ void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
TCGv_i64 t0;
TCGv_i32 t1;
+ if (TCG_TARGET_REG_BITS == 64) {
+ tcg_gen_op3_i64(INDEX_op_mul_i64, ret, arg1, arg2);
+ return;
+ }
+
+
t0 = tcg_temp_ebb_new_i64();
t1 = tcg_temp_ebb_new_i32();
@@ -1672,15 +1764,6 @@ void tcg_gen_mul_i64(TCGv_i64 ret, TCGv_i64 arg1, TCGv_i64 arg2)
tcg_temp_free_i32(t1);
}
-#else
-
-void tcg_gen_movi_i64(TCGv_i64 ret, int64_t arg)
-{
- tcg_gen_mov_i64(ret, tcg_constant_i64(arg));
-}
-
-#endif /* TCG_TARGET_REG_SIZE == 32 */
-
void tcg_gen_addi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
/* some cases can be optimized here */
@@ -1723,6 +1806,15 @@ void tcg_gen_subi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
}
}
+void tcg_gen_neg_i64(TCGv_i64 ret, TCGv_i64 arg)
+{
+ if (TCG_TARGET_HAS_neg_i64) {
+ tcg_gen_op2_i64(INDEX_op_neg_i64, ret, arg);
+ } else {
+ tcg_gen_subfi_i64(ret, 0, arg);
+ }
+}
+
void tcg_gen_andi_i64(TCGv_i64 ret, TCGv_i64 arg1, int64_t arg2)
{
if (TCG_TARGET_REG_BITS == 32) {
@@ -3218,6 +3310,11 @@ void tcg_gen_extr32_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i64 arg)
tcg_gen_shri_i64(hi, arg, 32);
}
+void tcg_gen_concat32_i64(TCGv_i64 ret, TCGv_i64 lo, TCGv_i64 hi)
+{
+ tcg_gen_deposit_i64(ret, lo, hi, 32, 32);
+}
+
void tcg_gen_extr_i128_i64(TCGv_i64 lo, TCGv_i64 hi, TCGv_i128 arg)
{
tcg_gen_mov_i64(lo, TCGV128_LOW(arg));
--
2.34.1