1
Merge the first set of reviewed patches from my queue.
1
v2: Fix target/loongarch printf formats for vaddr
2
Include two more reviewed patches.
3
4
This time with actual pull urls. :-/
2
5
3
r~
6
r~
4
7
5
The following changes since commit 6dd06214892d71cbbdd25daed7693e58afcb1093:
6
8
7
Merge tag 'pull-hex-20230421' of https://github.com/quic/qemu into staging (2023-04-22 08:31:38 +0100)
9
The following changes since commit db7aa99ef894e88fc5eedf02ca2579b8c344b2ec:
10
11
Merge tag 'hw-misc-20250216' of https://github.com/philmd/qemu into staging (2025-02-16 20:48:06 -0500)
8
12
9
are available in the Git repository at:
13
are available in the Git repository at:
10
14
11
https://gitlab.com/rth7680/qemu.git tags/pull-tcg-20230423
15
https://gitlab.com/rth7680/qemu.git tags/pull-tcg-20250215-2
12
16
13
for you to fetch changes up to 3ea9be33400f14305565a9a094cb6031c07183d5:
17
for you to fetch changes up to a39bdd0f4ba96fcbb6b5bcb6e89591d2b24f52eb:
14
18
15
tcg/riscv: Conditionalize tcg_out_exts_i32_i64 (2023-04-23 08:46:45 +0100)
19
tcg: Remove TCG_TARGET_HAS_{br,set}cond2 from riscv and loongarch64 (2025-02-17 09:52:07 -0800)
16
20
17
----------------------------------------------------------------
21
----------------------------------------------------------------
18
tcg cleanups:
22
tcg: Remove last traces of TCG_TARGET_NEED_POOL_LABELS
19
- Remove tcg_abort()
23
tcg: Cleanups after disallowing 64-on-32
20
- Split out extensions as known backend interfaces
24
tcg: Introduce constraint for zero register
21
- Put the separate extensions together as tcg_out_movext
25
tcg: Remove TCG_TARGET_HAS_{br,set}cond2 from riscv and loongarch64
22
- Introduce tcg_out_xchg as a backend interface
26
tcg/i386: Use tcg_{high,unsigned}_cond in tcg_out_brcond2
23
- Clear TCGLabelQemuLdst on allocation
27
linux-user: Move TARGET_SA_RESTORER out of generic/signal.h
24
- Avoid redundant extensions for riscv
28
linux-user: Fix alignment when unmapping excess reservation
29
target/sparc: Fix register selection for all F*TOx and FxTO* instructions
30
target/sparc: Fix gdbstub incorrectly handling registers f32-f62
31
target/sparc: fake UltraSPARC T1 PCR and PIC registers
25
32
26
----------------------------------------------------------------
33
----------------------------------------------------------------
27
Richard Henderson (15):
34
Andreas Schwab (1):
28
tcg: Replace if + tcg_abort with tcg_debug_assert
35
linux-user: Move TARGET_SA_RESTORER out of generic/signal.h
29
tcg: Replace tcg_abort with g_assert_not_reached
30
tcg: Split out tcg_out_ext8s
31
tcg: Split out tcg_out_ext8u
32
tcg: Split out tcg_out_ext16s
33
tcg: Split out tcg_out_ext16u
34
tcg: Split out tcg_out_ext32s
35
tcg: Split out tcg_out_ext32u
36
tcg: Split out tcg_out_exts_i32_i64
37
tcg: Split out tcg_out_extu_i32_i64
38
tcg: Split out tcg_out_extrl_i64_i32
39
tcg: Introduce tcg_out_movext
40
tcg: Introduce tcg_out_xchg
41
tcg: Clear TCGLabelQemuLdst on allocation
42
tcg/riscv: Conditionalize tcg_out_exts_i32_i64
43
36
44
include/tcg/tcg.h | 6 --
37
Artyom Tarasenko (1):
45
target/i386/tcg/translate.c | 20 +++---
38
target/sparc: fake UltraSPARC T1 PCR and PIC registers
46
target/s390x/tcg/translate.c | 4 +-
39
47
tcg/optimize.c | 10 ++-
40
Fabiano Rosas (1):
48
tcg/tcg.c | 135 +++++++++++++++++++++++++++++++++++----
41
elfload: Fix alignment when unmapping excess reservation
49
tcg/aarch64/tcg-target.c.inc | 106 +++++++++++++++++++-----------
42
50
tcg/arm/tcg-target.c.inc | 93 +++++++++++++++++----------
43
Mikael Szreder (2):
51
tcg/i386/tcg-target.c.inc | 129 ++++++++++++++++++-------------------
44
target/sparc: Fix register selection for all F*TOx and FxTO* instructions
52
tcg/loongarch64/tcg-target.c.inc | 123 +++++++++++++----------------------
45
target/sparc: Fix gdbstub incorrectly handling registers f32-f62
53
tcg/mips/tcg-target.c.inc | 94 +++++++++++++++++++--------
46
54
tcg/ppc/tcg-target.c.inc | 119 ++++++++++++++++++----------------
47
Richard Henderson (22):
55
tcg/riscv/tcg-target.c.inc | 83 +++++++++++-------------
48
tcg: Remove last traces of TCG_TARGET_NEED_POOL_LABELS
56
tcg/s390x/tcg-target.c.inc | 128 +++++++++++++++++--------------------
49
tcg: Remove TCG_OVERSIZED_GUEST
57
tcg/sparc64/tcg-target.c.inc | 117 +++++++++++++++++++++------------
50
tcg: Drop support for two address registers in gen_ldst
58
tcg/tcg-ldst.c.inc | 1 +
51
tcg: Merge INDEX_op_qemu_*_{a32,a64}_*
59
tcg/tci/tcg-target.c.inc | 116 ++++++++++++++++++++++++++++++---
52
tcg/arm: Drop addrhi from prepare_host_addr
60
16 files changed, 786 insertions(+), 498 deletions(-)
53
tcg/i386: Drop addrhi from prepare_host_addr
54
tcg/mips: Drop addrhi from prepare_host_addr
55
tcg/ppc: Drop addrhi from prepare_host_addr
56
tcg: Replace addr{lo,hi}_reg with addr_reg in TCGLabelQemuLdst
57
plugins: Fix qemu_plugin_read_memory_vaddr parameters
58
accel/tcg: Fix tlb_set_page_with_attrs, tlb_set_page
59
target/loongarch: Use VADDR_PRIx for logging pc_next
60
include/exec: Change vaddr to uintptr_t
61
include/exec: Use uintptr_t in CPUTLBEntry
62
tcg: Introduce the 'z' constraint for a hardware zero register
63
tcg/aarch64: Use 'z' constraint
64
tcg/loongarch64: Use 'z' constraint
65
tcg/mips: Use 'z' constraint
66
tcg/riscv: Use 'z' constraint
67
tcg/sparc64: Use 'z' constraint
68
tcg/i386: Use tcg_{high,unsigned}_cond in tcg_out_brcond2
69
tcg: Remove TCG_TARGET_HAS_{br,set}cond2 from riscv and loongarch64
70
71
include/exec/tlb-common.h | 10 +-
72
include/exec/vaddr.h | 16 +-
73
include/qemu/atomic.h | 18 +-
74
include/tcg/oversized-guest.h | 23 ---
75
include/tcg/tcg-opc.h | 28 +--
76
include/tcg/tcg.h | 3 +-
77
linux-user/aarch64/target_signal.h | 2 +
78
linux-user/arm/target_signal.h | 2 +
79
linux-user/generic/signal.h | 1 -
80
linux-user/i386/target_signal.h | 2 +
81
linux-user/m68k/target_signal.h | 1 +
82
linux-user/microblaze/target_signal.h | 2 +
83
linux-user/ppc/target_signal.h | 2 +
84
linux-user/s390x/target_signal.h | 2 +
85
linux-user/sh4/target_signal.h | 2 +
86
linux-user/x86_64/target_signal.h | 2 +
87
linux-user/xtensa/target_signal.h | 2 +
88
tcg/aarch64/tcg-target-con-set.h | 12 +-
89
tcg/aarch64/tcg-target.h | 2 +
90
tcg/loongarch64/tcg-target-con-set.h | 15 +-
91
tcg/loongarch64/tcg-target-con-str.h | 1 -
92
tcg/loongarch64/tcg-target-has.h | 2 -
93
tcg/loongarch64/tcg-target.h | 2 +
94
tcg/mips/tcg-target-con-set.h | 26 +--
95
tcg/mips/tcg-target-con-str.h | 1 -
96
tcg/mips/tcg-target.h | 2 +
97
tcg/riscv/tcg-target-con-set.h | 10 +-
98
tcg/riscv/tcg-target-con-str.h | 1 -
99
tcg/riscv/tcg-target-has.h | 2 -
100
tcg/riscv/tcg-target.h | 2 +
101
tcg/sparc64/tcg-target-con-set.h | 12 +-
102
tcg/sparc64/tcg-target-con-str.h | 1 -
103
tcg/sparc64/tcg-target.h | 3 +-
104
tcg/tci/tcg-target.h | 1 -
105
accel/tcg/cputlb.c | 32 +---
106
accel/tcg/tcg-all.c | 9 +-
107
linux-user/elfload.c | 4 +-
108
plugins/api.c | 2 +-
109
target/arm/ptw.c | 34 ----
110
target/loongarch/tcg/translate.c | 2 +-
111
target/riscv/cpu_helper.c | 13 +-
112
target/sparc/gdbstub.c | 18 +-
113
target/sparc/translate.c | 19 +++
114
tcg/optimize.c | 21 +--
115
tcg/tcg-op-ldst.c | 103 +++--------
116
tcg/tcg.c | 97 +++++------
117
tcg/tci.c | 119 +++----------
118
docs/devel/multi-thread-tcg.rst | 1 -
119
docs/devel/tcg-ops.rst | 4 +-
120
target/loongarch/tcg/insn_trans/trans_atomic.c.inc | 2 +-
121
target/sparc/insns.decode | 19 ++-
122
tcg/aarch64/tcg-target.c.inc | 86 ++++------
123
tcg/arm/tcg-target.c.inc | 114 ++++---------
124
tcg/i386/tcg-target.c.inc | 190 +++++----------------
125
tcg/loongarch64/tcg-target.c.inc | 72 +++-----
126
tcg/mips/tcg-target.c.inc | 169 ++++++------------
127
tcg/ppc/tcg-target.c.inc | 164 +++++-------------
128
tcg/riscv/tcg-target.c.inc | 56 +++---
129
tcg/s390x/tcg-target.c.inc | 40 ++---
130
tcg/sparc64/tcg-target.c.inc | 45 ++---
131
tcg/tci/tcg-target.c.inc | 60 ++-----
132
61 files changed, 548 insertions(+), 1160 deletions(-)
133
delete mode 100644 include/tcg/oversized-guest.h
diff view generated by jsdifflib
Deleted patch
1
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
2
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
3
---
4
tcg/tcg.c | 4 +---
5
tcg/i386/tcg-target.c.inc | 8 +++-----
6
2 files changed, 4 insertions(+), 8 deletions(-)
7
1
8
diff --git a/tcg/tcg.c b/tcg/tcg.c
9
index XXXXXXX..XXXXXXX 100644
10
--- a/tcg/tcg.c
11
+++ b/tcg/tcg.c
12
@@ -XXX,XX +XXX,XX @@ static TCGTemp *tcg_global_reg_new_internal(TCGContext *s, TCGType type,
13
{
14
TCGTemp *ts;
15
16
- if (TCG_TARGET_REG_BITS == 32 && type != TCG_TYPE_I32) {
17
- tcg_abort();
18
- }
19
+ tcg_debug_assert(TCG_TARGET_REG_BITS == 64 || type == TCG_TYPE_I32);
20
21
ts = tcg_global_alloc(s);
22
ts->base_type = type;
23
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
24
index XXXXXXX..XXXXXXX 100644
25
--- a/tcg/i386/tcg-target.c.inc
26
+++ b/tcg/i386/tcg-target.c.inc
27
@@ -XXX,XX +XXX,XX @@ static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
28
}
29
}
30
31
-/* Use SMALL != 0 to force a short forward branch. */
32
-static void tcg_out_jxx(TCGContext *s, int opc, TCGLabel *l, int small)
33
+/* Set SMALL to force a short forward branch. */
34
+static void tcg_out_jxx(TCGContext *s, int opc, TCGLabel *l, bool small)
35
{
36
int32_t val, val1;
37
38
@@ -XXX,XX +XXX,XX @@ static void tcg_out_jxx(TCGContext *s, int opc, TCGLabel *l, int small)
39
}
40
tcg_out8(s, val1);
41
} else {
42
- if (small) {
43
- tcg_abort();
44
- }
45
+ tcg_debug_assert(!small);
46
if (opc == -1) {
47
tcg_out8(s, OPC_JMP_long);
48
tcg_out32(s, val - 5);
49
--
50
2.34.1
51
52
diff view generated by jsdifflib
Deleted patch
1
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
2
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
3
---
4
include/tcg/tcg.h | 6 ------
5
target/i386/tcg/translate.c | 20 ++++++++++----------
6
target/s390x/tcg/translate.c | 4 ++--
7
tcg/optimize.c | 10 ++++------
8
tcg/tcg.c | 8 ++++----
9
tcg/aarch64/tcg-target.c.inc | 4 ++--
10
tcg/arm/tcg-target.c.inc | 2 +-
11
tcg/i386/tcg-target.c.inc | 14 +++++++-------
12
tcg/mips/tcg-target.c.inc | 14 +++++++-------
13
tcg/ppc/tcg-target.c.inc | 8 ++++----
14
tcg/s390x/tcg-target.c.inc | 8 ++++----
15
tcg/sparc64/tcg-target.c.inc | 2 +-
16
tcg/tci/tcg-target.c.inc | 2 +-
17
13 files changed, 47 insertions(+), 55 deletions(-)
18
1
19
diff --git a/include/tcg/tcg.h b/include/tcg/tcg.h
20
index XXXXXXX..XXXXXXX 100644
21
--- a/include/tcg/tcg.h
22
+++ b/include/tcg/tcg.h
23
@@ -XXX,XX +XXX,XX @@ typedef struct TCGTargetOpDef {
24
const char *args_ct_str[TCG_MAX_OP_ARGS];
25
} TCGTargetOpDef;
26
27
-#define tcg_abort() \
28
-do {\
29
- fprintf(stderr, "%s:%d: tcg fatal error\n", __FILE__, __LINE__);\
30
- abort();\
31
-} while (0)
32
-
33
bool tcg_op_supported(TCGOpcode op);
34
35
void tcg_gen_callN(void *func, TCGTemp *ret, int nargs, TCGTemp **args);
36
diff --git a/target/i386/tcg/translate.c b/target/i386/tcg/translate.c
37
index XXXXXXX..XXXXXXX 100644
38
--- a/target/i386/tcg/translate.c
39
+++ b/target/i386/tcg/translate.c
40
@@ -XXX,XX +XXX,XX @@ static TCGv gen_op_deposit_reg_v(DisasContext *s, MemOp ot, int reg, TCGv dest,
41
break;
42
#endif
43
default:
44
- tcg_abort();
45
+ g_assert_not_reached();
46
}
47
return cpu_regs[reg];
48
}
49
@@ -XXX,XX +XXX,XX @@ static void gen_lea_v_seg(DisasContext *s, MemOp aflag, TCGv a0,
50
}
51
break;
52
default:
53
- tcg_abort();
54
+ g_assert_not_reached();
55
}
56
57
if (ovr_seg >= 0) {
58
@@ -XXX,XX +XXX,XX @@ static void gen_helper_in_func(MemOp ot, TCGv v, TCGv_i32 n)
59
gen_helper_inl(v, cpu_env, n);
60
break;
61
default:
62
- tcg_abort();
63
+ g_assert_not_reached();
64
}
65
}
66
67
@@ -XXX,XX +XXX,XX @@ static void gen_helper_out_func(MemOp ot, TCGv_i32 v, TCGv_i32 n)
68
gen_helper_outl(cpu_env, v, n);
69
break;
70
default:
71
- tcg_abort();
72
+ g_assert_not_reached();
73
}
74
}
75
76
@@ -XXX,XX +XXX,XX @@ static void gen_rotc_rm_T1(DisasContext *s, MemOp ot, int op1,
77
break;
78
#endif
79
default:
80
- tcg_abort();
81
+ g_assert_not_reached();
82
}
83
} else {
84
switch (ot) {
85
@@ -XXX,XX +XXX,XX @@ static void gen_rotc_rm_T1(DisasContext *s, MemOp ot, int op1,
86
break;
87
#endif
88
default:
89
- tcg_abort();
90
+ g_assert_not_reached();
91
}
92
}
93
/* store */
94
@@ -XXX,XX +XXX,XX @@ static AddressParts gen_lea_modrm_0(CPUX86State *env, DisasContext *s,
95
break;
96
97
default:
98
- tcg_abort();
99
+ g_assert_not_reached();
100
}
101
102
done:
103
@@ -XXX,XX +XXX,XX @@ static inline uint32_t insn_get(CPUX86State *env, DisasContext *s, MemOp ot)
104
ret = x86_ldl_code(env, s);
105
break;
106
default:
107
- tcg_abort();
108
+ g_assert_not_reached();
109
}
110
return ret;
111
}
112
@@ -XXX,XX +XXX,XX @@ static bool disas_insn(DisasContext *s, CPUState *cpu)
113
gen_op_mov_reg_v(s, MO_16, R_EAX, s->T0);
114
break;
115
default:
116
- tcg_abort();
117
+ g_assert_not_reached();
118
}
119
break;
120
case 0x99: /* CDQ/CWD */
121
@@ -XXX,XX +XXX,XX @@ static bool disas_insn(DisasContext *s, CPUState *cpu)
122
gen_op_mov_reg_v(s, MO_16, R_EDX, s->T0);
123
break;
124
default:
125
- tcg_abort();
126
+ g_assert_not_reached();
127
}
128
break;
129
case 0x1af: /* imul Gv, Ev */
130
diff --git a/target/s390x/tcg/translate.c b/target/s390x/tcg/translate.c
131
index XXXXXXX..XXXXXXX 100644
132
--- a/target/s390x/tcg/translate.c
133
+++ b/target/s390x/tcg/translate.c
134
@@ -XXX,XX +XXX,XX @@ static int get_mem_index(DisasContext *s)
135
case PSW_ASC_HOME >> FLAG_MASK_PSW_SHIFT:
136
return MMU_HOME_IDX;
137
default:
138
- tcg_abort();
139
+ g_assert_not_reached();
140
break;
141
}
142
#endif
143
@@ -XXX,XX +XXX,XX @@ static void gen_op_calc_cc(DisasContext *s)
144
gen_helper_calc_cc(cc_op, cpu_env, cc_op, cc_src, cc_dst, cc_vr);
145
break;
146
default:
147
- tcg_abort();
148
+ g_assert_not_reached();
149
}
150
151
/* We now have cc in cc_op as constant */
152
diff --git a/tcg/optimize.c b/tcg/optimize.c
153
index XXXXXXX..XXXXXXX 100644
154
--- a/tcg/optimize.c
155
+++ b/tcg/optimize.c
156
@@ -XXX,XX +XXX,XX @@ static uint64_t do_constant_folding_2(TCGOpcode op, uint64_t x, uint64_t y)
157
return (uint64_t)x % ((uint64_t)y ? : 1);
158
159
default:
160
- fprintf(stderr,
161
- "Unrecognized operation %d in do_constant_folding.\n", op);
162
- tcg_abort();
163
+ g_assert_not_reached();
164
}
165
}
166
167
@@ -XXX,XX +XXX,XX @@ static bool do_constant_folding_cond_32(uint32_t x, uint32_t y, TCGCond c)
168
case TCG_COND_GTU:
169
return x > y;
170
default:
171
- tcg_abort();
172
+ g_assert_not_reached();
173
}
174
}
175
176
@@ -XXX,XX +XXX,XX @@ static bool do_constant_folding_cond_64(uint64_t x, uint64_t y, TCGCond c)
177
case TCG_COND_GTU:
178
return x > y;
179
default:
180
- tcg_abort();
181
+ g_assert_not_reached();
182
}
183
}
184
185
@@ -XXX,XX +XXX,XX @@ static bool do_constant_folding_cond_eq(TCGCond c)
186
case TCG_COND_EQ:
187
return 1;
188
default:
189
- tcg_abort();
190
+ g_assert_not_reached();
191
}
192
}
193
194
diff --git a/tcg/tcg.c b/tcg/tcg.c
195
index XXXXXXX..XXXXXXX 100644
196
--- a/tcg/tcg.c
197
+++ b/tcg/tcg.c
198
@@ -XXX,XX +XXX,XX @@ static void temp_sync(TCGContext *s, TCGTemp *ts, TCGRegSet allocated_regs,
199
200
case TEMP_VAL_DEAD:
201
default:
202
- tcg_abort();
203
+ g_assert_not_reached();
204
}
205
ts->mem_coherent = 1;
206
}
207
@@ -XXX,XX +XXX,XX @@ static TCGReg tcg_reg_alloc(TCGContext *s, TCGRegSet required_regs,
208
}
209
}
210
211
- tcg_abort();
212
+ g_assert_not_reached();
213
}
214
215
static TCGReg tcg_reg_alloc_pair(TCGContext *s, TCGRegSet required_regs,
216
@@ -XXX,XX +XXX,XX @@ static TCGReg tcg_reg_alloc_pair(TCGContext *s, TCGRegSet required_regs,
217
}
218
}
219
}
220
- tcg_abort();
221
+ g_assert_not_reached();
222
}
223
224
/* Make sure the temporary is in a register. If needed, allocate the register
225
@@ -XXX,XX +XXX,XX @@ static void temp_load(TCGContext *s, TCGTemp *ts, TCGRegSet desired_regs,
226
break;
227
case TEMP_VAL_DEAD:
228
default:
229
- tcg_abort();
230
+ g_assert_not_reached();
231
}
232
set_temp_val_reg(s, ts, reg);
233
}
234
diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc
235
index XXXXXXX..XXXXXXX 100644
236
--- a/tcg/aarch64/tcg-target.c.inc
237
+++ b/tcg/aarch64/tcg-target.c.inc
238
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld_direct(TCGContext *s, MemOp memop, TCGType ext,
239
tcg_out_ldst_r(s, I3312_LDRX, data_r, addr_r, otype, off_r);
240
break;
241
default:
242
- tcg_abort();
243
+ g_assert_not_reached();
244
}
245
}
246
247
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_st_direct(TCGContext *s, MemOp memop,
248
tcg_out_ldst_r(s, I3312_STRX, data_r, addr_r, otype, off_r);
249
break;
250
default:
251
- tcg_abort();
252
+ g_assert_not_reached();
253
}
254
}
255
256
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
257
index XXXXXXX..XXXXXXX 100644
258
--- a/tcg/arm/tcg-target.c.inc
259
+++ b/tcg/arm/tcg-target.c.inc
260
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
261
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
262
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
263
default:
264
- tcg_abort();
265
+ g_assert_not_reached();
266
}
267
}
268
269
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
270
index XXXXXXX..XXXXXXX 100644
271
--- a/tcg/i386/tcg-target.c.inc
272
+++ b/tcg/i386/tcg-target.c.inc
273
@@ -XXX,XX +XXX,XX @@ static bool patch_reloc(tcg_insn_unit *code_ptr, int type,
274
tcg_patch8(code_ptr, value);
275
break;
276
default:
277
- tcg_abort();
278
+ g_assert_not_reached();
279
}
280
return true;
281
}
282
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_pushi(TCGContext *s, tcg_target_long val)
283
tcg_out_opc(s, OPC_PUSH_Iv, 0, 0, 0);
284
tcg_out32(s, val);
285
} else {
286
- tcg_abort();
287
+ g_assert_not_reached();
288
}
289
}
290
291
@@ -XXX,XX +XXX,XX @@ static void tgen_arithi(TCGContext *s, int c, int r0,
292
return;
293
}
294
295
- tcg_abort();
296
+ g_assert_not_reached();
297
}
298
299
static void tcg_out_addi(TCGContext *s, int reg, tcg_target_long val)
300
@@ -XXX,XX +XXX,XX @@ static void tcg_out_brcond2(TCGContext *s, const TCGArg *args,
301
label_this, small);
302
break;
303
default:
304
- tcg_abort();
305
+ g_assert_not_reached();
306
}
307
tcg_out_label(s, label_next);
308
}
309
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
310
}
311
break;
312
default:
313
- tcg_abort();
314
+ g_assert_not_reached();
315
}
316
317
/* Jump to the code corresponding to next IR of qemu_st */
318
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
319
/* load bits 0..15 */
320
tcg_out_modrm(s, OPC_MOVL_EvGv | P_DATA16, a2, a0);
321
} else {
322
- tcg_abort();
323
+ g_assert_not_reached();
324
}
325
break;
326
327
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
328
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
329
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
330
default:
331
- tcg_abort();
332
+ g_assert_not_reached();
333
}
334
335
#undef OP_32_64
336
diff --git a/tcg/mips/tcg-target.c.inc b/tcg/mips/tcg-target.c.inc
337
index XXXXXXX..XXXXXXX 100644
338
--- a/tcg/mips/tcg-target.c.inc
339
+++ b/tcg/mips/tcg-target.c.inc
340
@@ -XXX,XX +XXX,XX @@ static void tcg_out_setcond(TCGContext *s, TCGCond cond, TCGReg ret,
341
break;
342
343
default:
344
- tcg_abort();
345
+ g_assert_not_reached();
346
break;
347
}
348
}
349
@@ -XXX,XX +XXX,XX @@ static void tcg_out_brcond(TCGContext *s, TCGCond cond, TCGReg arg1,
350
break;
351
352
default:
353
- tcg_abort();
354
+ g_assert_not_reached();
355
break;
356
}
357
358
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
359
}
360
break;
361
default:
362
- tcg_abort();
363
+ g_assert_not_reached();
364
}
365
i = tcg_out_call_iarg_imm(s, i, oi);
366
367
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg lo, TCGReg hi,
368
}
369
break;
370
default:
371
- tcg_abort();
372
+ g_assert_not_reached();
373
}
374
}
375
376
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_st_direct(TCGContext *s, TCGReg lo, TCGReg hi,
377
break;
378
379
default:
380
- tcg_abort();
381
+ g_assert_not_reached();
382
}
383
}
384
385
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_st_unalign(TCGContext *s, TCGReg lo, TCGReg hi,
386
break;
387
388
default:
389
- tcg_abort();
390
+ g_assert_not_reached();
391
}
392
}
393
static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, bool is_64)
394
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
395
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
396
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
397
default:
398
- tcg_abort();
399
+ g_assert_not_reached();
400
}
401
}
402
403
diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc
404
index XXXXXXX..XXXXXXX 100644
405
--- a/tcg/ppc/tcg-target.c.inc
406
+++ b/tcg/ppc/tcg-target.c.inc
407
@@ -XXX,XX +XXX,XX @@ static void tcg_out_cmp(TCGContext *s, int cond, TCGArg arg1, TCGArg arg2,
408
break;
409
410
default:
411
- tcg_abort();
412
+ g_assert_not_reached();
413
}
414
op |= BF(cr) | ((type == TCG_TYPE_I64) << 21);
415
416
@@ -XXX,XX +XXX,XX @@ static void tcg_out_setcond(TCGContext *s, TCGType type, TCGCond cond,
417
break;
418
419
default:
420
- tcg_abort();
421
+ g_assert_not_reached();
422
}
423
}
424
425
@@ -XXX,XX +XXX,XX @@ static void tcg_out_cmp2(TCGContext *s, const TCGArg *args,
426
break;
427
428
default:
429
- tcg_abort();
430
+ g_assert_not_reached();
431
}
432
}
433
434
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
435
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
436
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
437
default:
438
- tcg_abort();
439
+ g_assert_not_reached();
440
}
441
}
442
443
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
444
index XXXXXXX..XXXXXXX 100644
445
--- a/tcg/s390x/tcg-target.c.inc
446
+++ b/tcg/s390x/tcg-target.c.inc
447
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld_direct(TCGContext *s, MemOp opc, TCGReg data,
448
break;
449
450
default:
451
- tcg_abort();
452
+ g_assert_not_reached();
453
}
454
}
455
456
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_st_direct(TCGContext *s, MemOp opc, TCGReg data,
457
break;
458
459
default:
460
- tcg_abort();
461
+ g_assert_not_reached();
462
}
463
}
464
465
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
466
tcg_out_mov(s, TCG_TYPE_I64, TCG_REG_R4, data_reg);
467
break;
468
default:
469
- tcg_abort();
470
+ g_assert_not_reached();
471
}
472
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_R5, oi);
473
tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R6, (uintptr_t)lb->raddr);
474
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
475
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
476
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
477
default:
478
- tcg_abort();
479
+ g_assert_not_reached();
480
}
481
}
482
483
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
484
index XXXXXXX..XXXXXXX 100644
485
--- a/tcg/sparc64/tcg-target.c.inc
486
+++ b/tcg/sparc64/tcg-target.c.inc
487
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
488
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
489
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
490
default:
491
- tcg_abort();
492
+ g_assert_not_reached();
493
}
494
}
495
496
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
497
index XXXXXXX..XXXXXXX 100644
498
--- a/tcg/tci/tcg-target.c.inc
499
+++ b/tcg/tci/tcg-target.c.inc
500
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
501
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
502
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
503
default:
504
- tcg_abort();
505
+ g_assert_not_reached();
506
}
507
}
508
509
--
510
2.34.1
511
512
diff view generated by jsdifflib
Deleted patch
1
We will need a backend interface for performing 8-bit sign-extend.
2
Use it in tcg_reg_alloc_op in the meantime.
3
1
4
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
5
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
6
---
7
tcg/tcg.c | 21 ++++++++++++++++-----
8
tcg/aarch64/tcg-target.c.inc | 11 +++++++----
9
tcg/arm/tcg-target.c.inc | 10 ++++------
10
tcg/i386/tcg-target.c.inc | 10 +++++-----
11
tcg/loongarch64/tcg-target.c.inc | 11 ++++-------
12
tcg/mips/tcg-target.c.inc | 12 ++++++++----
13
tcg/ppc/tcg-target.c.inc | 10 ++++------
14
tcg/riscv/tcg-target.c.inc | 9 +++------
15
tcg/s390x/tcg-target.c.inc | 10 +++-------
16
tcg/sparc64/tcg-target.c.inc | 7 +++++++
17
tcg/tci/tcg-target.c.inc | 21 ++++++++++++++++++++-
18
11 files changed, 81 insertions(+), 51 deletions(-)
19
20
diff --git a/tcg/tcg.c b/tcg/tcg.c
21
index XXXXXXX..XXXXXXX 100644
22
--- a/tcg/tcg.c
23
+++ b/tcg/tcg.c
24
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ld(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg1,
25
static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
26
static void tcg_out_movi(TCGContext *s, TCGType type,
27
TCGReg ret, tcg_target_long arg);
28
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
29
static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
30
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
31
static void tcg_out_goto_tb(TCGContext *s, int which);
32
@@ -XXX,XX +XXX,XX @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
33
}
34
35
/* emit instruction */
36
- if (def->flags & TCG_OPF_VECTOR) {
37
- tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
38
- new_args, const_args);
39
- } else {
40
- tcg_out_op(s, op->opc, new_args, const_args);
41
+ switch (op->opc) {
42
+ case INDEX_op_ext8s_i32:
43
+ tcg_out_ext8s(s, TCG_TYPE_I32, new_args[0], new_args[1]);
44
+ break;
45
+ case INDEX_op_ext8s_i64:
46
+ tcg_out_ext8s(s, TCG_TYPE_I64, new_args[0], new_args[1]);
47
+ break;
48
+ default:
49
+ if (def->flags & TCG_OPF_VECTOR) {
50
+ tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
51
+ new_args, const_args);
52
+ } else {
53
+ tcg_out_op(s, op->opc, new_args, const_args);
54
+ }
55
+ break;
56
}
57
58
/* move the outputs in the correct register if needed */
59
diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc
60
index XXXXXXX..XXXXXXX 100644
61
--- a/tcg/aarch64/tcg-target.c.inc
62
+++ b/tcg/aarch64/tcg-target.c.inc
63
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_sxt(TCGContext *s, TCGType ext, MemOp s_bits,
64
tcg_out_sbfm(s, ext, rd, rn, 0, bits);
65
}
66
67
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rn)
68
+{
69
+ tcg_out_sxt(s, type, MO_8, rd, rn);
70
+}
71
+
72
static inline void tcg_out_uxt(TCGContext *s, MemOp s_bits,
73
TCGReg rd, TCGReg rn)
74
{
75
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
76
}
77
break;
78
79
- case INDEX_op_ext8s_i64:
80
- case INDEX_op_ext8s_i32:
81
- tcg_out_sxt(s, ext, MO_8, a0, a1);
82
- break;
83
case INDEX_op_ext16s_i64:
84
case INDEX_op_ext16s_i32:
85
tcg_out_sxt(s, ext, MO_16, a0, a1);
86
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
87
case INDEX_op_call: /* Always emitted via tcg_out_call. */
88
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
89
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
90
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
91
+ case INDEX_op_ext8s_i64:
92
default:
93
g_assert_not_reached();
94
}
95
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
96
index XXXXXXX..XXXXXXX 100644
97
--- a/tcg/arm/tcg-target.c.inc
98
+++ b/tcg/arm/tcg-target.c.inc
99
@@ -XXX,XX +XXX,XX @@ static void tcg_out_udiv(TCGContext *s, ARMCond cond,
100
tcg_out32(s, 0x0730f010 | (cond << 28) | (rd << 16) | rn | (rm << 8));
101
}
102
103
-static void tcg_out_ext8s(TCGContext *s, ARMCond cond, TCGReg rd, TCGReg rn)
104
+static void tcg_out_ext8s(TCGContext *s, TCGType t, TCGReg rd, TCGReg rn)
105
{
106
/* sxtb */
107
- tcg_out32(s, 0x06af0070 | (cond << 28) | (rd << 12) | rn);
108
+ tcg_out32(s, 0x06af0070 | (COND_AL << 28) | (rd << 12) | rn);
109
}
110
111
static void __attribute__((unused))
112
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
113
datahi = lb->datahi_reg;
114
switch (opc & MO_SSIZE) {
115
case MO_SB:
116
- tcg_out_ext8s(s, COND_AL, datalo, TCG_REG_R0);
117
+ tcg_out_ext8s(s, TCG_TYPE_I32, datalo, TCG_REG_R0);
118
break;
119
case MO_SW:
120
tcg_out_ext16s(s, COND_AL, datalo, TCG_REG_R0);
121
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
122
tcg_out_bswap32(s, COND_AL, args[0], args[1]);
123
break;
124
125
- case INDEX_op_ext8s_i32:
126
- tcg_out_ext8s(s, COND_AL, args[0], args[1]);
127
- break;
128
case INDEX_op_ext16s_i32:
129
tcg_out_ext16s(s, COND_AL, args[0], args[1]);
130
break;
131
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
132
case INDEX_op_call: /* Always emitted via tcg_out_call. */
133
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
134
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
135
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
136
default:
137
g_assert_not_reached();
138
}
139
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
140
index XXXXXXX..XXXXXXX 100644
141
--- a/tcg/i386/tcg-target.c.inc
142
+++ b/tcg/i386/tcg-target.c.inc
143
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_ext8u(TCGContext *s, int dest, int src)
144
tcg_out_modrm(s, OPC_MOVZBL + P_REXB_RM, dest, src);
145
}
146
147
-static void tcg_out_ext8s(TCGContext *s, int dest, int src, int rexw)
148
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
149
{
150
+ int rexw = type == TCG_TYPE_I32 ? 0 : P_REXW;
151
/* movsbl */
152
tcg_debug_assert(src < 4 || TCG_TARGET_REG_BITS == 64);
153
tcg_out_modrm(s, OPC_MOVSBL + P_REXB_RM + rexw, dest, src);
154
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
155
data_reg = l->datalo_reg;
156
switch (opc & MO_SSIZE) {
157
case MO_SB:
158
- tcg_out_ext8s(s, data_reg, TCG_REG_EAX, rexw);
159
+ tcg_out_ext8s(s, l->type, data_reg, TCG_REG_EAX);
160
break;
161
case MO_SW:
162
tcg_out_ext16s(s, data_reg, TCG_REG_EAX, rexw);
163
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
164
tcg_out_modrm(s, OPC_GRP3_Ev + rexw, EXT3_NOT, a0);
165
break;
166
167
- OP_32_64(ext8s):
168
- tcg_out_ext8s(s, a0, a1, rexw);
169
- break;
170
OP_32_64(ext16s):
171
tcg_out_ext16s(s, a0, a1, rexw);
172
break;
173
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
174
case INDEX_op_call: /* Always emitted via tcg_out_call. */
175
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
176
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
177
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
178
+ case INDEX_op_ext8s_i64:
179
default:
180
g_assert_not_reached();
181
}
182
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
183
index XXXXXXX..XXXXXXX 100644
184
--- a/tcg/loongarch64/tcg-target.c.inc
185
+++ b/tcg/loongarch64/tcg-target.c.inc
186
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32u(TCGContext *s, TCGReg ret, TCGReg arg)
187
tcg_out_opc_bstrpick_d(s, ret, arg, 0, 31);
188
}
189
190
-static void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
191
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
192
{
193
tcg_out_opc_sext_b(s, ret, arg);
194
}
195
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
196
197
switch (opc & MO_SSIZE) {
198
case MO_SB:
199
- tcg_out_ext8s(s, l->datalo_reg, TCG_REG_A0);
200
+ tcg_out_ext8s(s, type, l->datalo_reg, TCG_REG_A0);
201
break;
202
case MO_SW:
203
tcg_out_ext16s(s, l->datalo_reg, TCG_REG_A0);
204
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
205
tcg_out_brcond(s, a2, a0, a1, arg_label(args[3]));
206
break;
207
208
- case INDEX_op_ext8s_i32:
209
- case INDEX_op_ext8s_i64:
210
- tcg_out_ext8s(s, a0, a1);
211
- break;
212
-
213
case INDEX_op_ext8u_i32:
214
case INDEX_op_ext8u_i64:
215
tcg_out_ext8u(s, a0, a1);
216
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
217
case INDEX_op_call: /* Always emitted via tcg_out_call. */
218
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
219
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
220
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
221
+ case INDEX_op_ext8s_i64:
222
default:
223
g_assert_not_reached();
224
}
225
diff --git a/tcg/mips/tcg-target.c.inc b/tcg/mips/tcg-target.c.inc
226
index XXXXXXX..XXXXXXX 100644
227
--- a/tcg/mips/tcg-target.c.inc
228
+++ b/tcg/mips/tcg-target.c.inc
229
@@ -XXX,XX +XXX,XX @@ static void tcg_out_movi(TCGContext *s, TCGType type,
230
}
231
}
232
233
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
234
+{
235
+ tcg_debug_assert(TCG_TARGET_HAS_ext8s_i32);
236
+ tcg_out_opc_reg(s, OPC_SEB, rd, TCG_REG_ZERO, rs);
237
+}
238
+
239
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
240
tcg_target_long imm)
241
{
242
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
243
case INDEX_op_not_i64:
244
i1 = OPC_NOR;
245
goto do_unary;
246
- case INDEX_op_ext8s_i32:
247
- case INDEX_op_ext8s_i64:
248
- i1 = OPC_SEB;
249
- goto do_unary;
250
case INDEX_op_ext16s_i32:
251
case INDEX_op_ext16s_i64:
252
i1 = OPC_SEH;
253
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
254
case INDEX_op_call: /* Always emitted via tcg_out_call. */
255
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
256
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
257
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
258
+ case INDEX_op_ext8s_i64:
259
default:
260
g_assert_not_reached();
261
}
262
diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc
263
index XXXXXXX..XXXXXXX 100644
264
--- a/tcg/ppc/tcg-target.c.inc
265
+++ b/tcg/ppc/tcg-target.c.inc
266
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_rlw(TCGContext *s, int op, TCGReg ra, TCGReg rs,
267
tcg_out32(s, op | RA(ra) | RS(rs) | SH(sh) | MB(mb) | ME(me));
268
}
269
270
-static inline void tcg_out_ext8s(TCGContext *s, TCGReg dst, TCGReg src)
271
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg dst, TCGReg src)
272
{
273
tcg_out32(s, EXTSB | RA(dst) | RS(src));
274
}
275
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
276
case INDEX_op_ld8s_i32:
277
case INDEX_op_ld8s_i64:
278
tcg_out_mem_long(s, LBZ, LBZX, args[0], args[1], args[2]);
279
- tcg_out_ext8s(s, args[0], args[0]);
280
+ tcg_out_ext8s(s, TCG_TYPE_REG, args[0], args[0]);
281
break;
282
case INDEX_op_ld16u_i32:
283
case INDEX_op_ld16u_i64:
284
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
285
tcg_out_qemu_st(s, args, true);
286
break;
287
288
- case INDEX_op_ext8s_i32:
289
- case INDEX_op_ext8s_i64:
290
- tcg_out_ext8s(s, args[0], args[1]);
291
- break;
292
case INDEX_op_ext16s_i32:
293
case INDEX_op_ext16s_i64:
294
tcg_out_ext16s(s, args[0], args[1]);
295
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
296
case INDEX_op_call: /* Always emitted via tcg_out_call. */
297
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
298
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
299
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
300
+ case INDEX_op_ext8s_i64:
301
default:
302
g_assert_not_reached();
303
}
304
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
305
index XXXXXXX..XXXXXXX 100644
306
--- a/tcg/riscv/tcg-target.c.inc
307
+++ b/tcg/riscv/tcg-target.c.inc
308
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32u(TCGContext *s, TCGReg ret, TCGReg arg)
309
tcg_out_opc_imm(s, OPC_SRLI, ret, ret, 32);
310
}
311
312
-static void tcg_out_ext8s(TCGContext *s, TCGReg ret, TCGReg arg)
313
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
314
{
315
tcg_out_opc_imm(s, OPC_SLLIW, ret, arg, 24);
316
tcg_out_opc_imm(s, OPC_SRAIW, ret, ret, 24);
317
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
318
tcg_out_ext32u(s, a0, a1);
319
break;
320
321
- case INDEX_op_ext8s_i32:
322
- case INDEX_op_ext8s_i64:
323
- tcg_out_ext8s(s, a0, a1);
324
- break;
325
-
326
case INDEX_op_ext16s_i32:
327
case INDEX_op_ext16s_i64:
328
tcg_out_ext16s(s, a0, a1);
329
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
330
case INDEX_op_call: /* Always emitted via tcg_out_call. */
331
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
332
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
333
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
334
+ case INDEX_op_ext8s_i64:
335
default:
336
g_assert_not_reached();
337
}
338
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
339
index XXXXXXX..XXXXXXX 100644
340
--- a/tcg/s390x/tcg-target.c.inc
341
+++ b/tcg/s390x/tcg-target.c.inc
342
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_risbg(TCGContext *s, TCGReg dest, TCGReg src,
343
tcg_out16(s, (ofs << 8) | (RIEf_RISBG & 0xff));
344
}
345
346
-static void tgen_ext8s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
347
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
348
{
349
tcg_out_insn(s, RRE, LGBR, dest, src);
350
}
351
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
352
}
353
break;
354
355
- case INDEX_op_ext8s_i32:
356
- tgen_ext8s(s, TCG_TYPE_I32, args[0], args[1]);
357
- break;
358
case INDEX_op_ext16s_i32:
359
tgen_ext16s(s, TCG_TYPE_I32, args[0], args[1]);
360
break;
361
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
362
}
363
break;
364
365
- case INDEX_op_ext8s_i64:
366
- tgen_ext8s(s, TCG_TYPE_I64, args[0], args[1]);
367
- break;
368
case INDEX_op_ext16s_i64:
369
tgen_ext16s(s, TCG_TYPE_I64, args[0], args[1]);
370
break;
371
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
372
case INDEX_op_call: /* Always emitted via tcg_out_call. */
373
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
374
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
375
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
376
+ case INDEX_op_ext8s_i64:
377
default:
378
g_assert_not_reached();
379
}
380
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
381
index XXXXXXX..XXXXXXX 100644
382
--- a/tcg/sparc64/tcg-target.c.inc
383
+++ b/tcg/sparc64/tcg-target.c.inc
384
@@ -XXX,XX +XXX,XX @@ static void tcg_out_movi(TCGContext *s, TCGType type,
385
tcg_out_movi_int(s, type, ret, arg, false, TCG_REG_T2);
386
}
387
388
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
389
+{
390
+ g_assert_not_reached();
391
+}
392
+
393
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
394
tcg_target_long imm)
395
{
396
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
397
case INDEX_op_call: /* Always emitted via tcg_out_call. */
398
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
399
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
400
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
401
+ case INDEX_op_ext8s_i64:
402
default:
403
g_assert_not_reached();
404
}
405
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
406
index XXXXXXX..XXXXXXX 100644
407
--- a/tcg/tci/tcg-target.c.inc
408
+++ b/tcg/tci/tcg-target.c.inc
409
@@ -XXX,XX +XXX,XX @@ static void tcg_out_movi(TCGContext *s, TCGType type,
410
}
411
}
412
413
+static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
414
+{
415
+ switch (type) {
416
+ case TCG_TYPE_I32:
417
+ tcg_debug_assert(TCG_TARGET_HAS_ext8s_i32);
418
+ tcg_out_op_rr(s, INDEX_op_ext8s_i32, rd, rs);
419
+ break;
420
+#if TCG_TARGET_REG_BITS == 64
421
+ case TCG_TYPE_I64:
422
+ tcg_debug_assert(TCG_TARGET_HAS_ext8s_i64);
423
+ tcg_out_op_rr(s, INDEX_op_ext8s_i64, rd, rs);
424
+ break;
425
+#endif
426
+ default:
427
+ g_assert_not_reached();
428
+ }
429
+}
430
+
431
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
432
tcg_target_long imm)
433
{
434
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
435
436
CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
437
CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
438
- CASE_32_64(ext8s) /* Optional (TCG_TARGET_HAS_ext8s_*). */
439
CASE_32_64(ext8u) /* Optional (TCG_TARGET_HAS_ext8u_*). */
440
CASE_32_64(ext16s) /* Optional (TCG_TARGET_HAS_ext16s_*). */
441
CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */
442
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
443
case INDEX_op_call: /* Always emitted via tcg_out_call. */
444
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
445
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
446
+ case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
447
+ case INDEX_op_ext8s_i64:
448
default:
449
g_assert_not_reached();
450
}
451
--
452
2.34.1
453
454
diff view generated by jsdifflib
Deleted patch
1
We will need a backend interface for performing 8-bit zero-extend.
2
Use it in tcg_reg_alloc_op in the meantime.
3
1
4
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
5
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
6
---
7
tcg/tcg.c | 5 +++++
8
tcg/aarch64/tcg-target.c.inc | 11 +++++++----
9
tcg/arm/tcg-target.c.inc | 12 +++++++++---
10
tcg/i386/tcg-target.c.inc | 7 +++----
11
tcg/loongarch64/tcg-target.c.inc | 7 ++-----
12
tcg/mips/tcg-target.c.inc | 9 ++++++++-
13
tcg/ppc/tcg-target.c.inc | 7 +++++++
14
tcg/riscv/tcg-target.c.inc | 7 ++-----
15
tcg/s390x/tcg-target.c.inc | 14 +++++---------
16
tcg/sparc64/tcg-target.c.inc | 9 ++++++++-
17
tcg/tci/tcg-target.c.inc | 14 +++++++++++++-
18
11 files changed, 69 insertions(+), 33 deletions(-)
19
20
diff --git a/tcg/tcg.c b/tcg/tcg.c
21
index XXXXXXX..XXXXXXX 100644
22
--- a/tcg/tcg.c
23
+++ b/tcg/tcg.c
24
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
25
static void tcg_out_movi(TCGContext *s, TCGType type,
26
TCGReg ret, tcg_target_long arg);
27
static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
28
+static void tcg_out_ext8u(TCGContext *s, TCGReg ret, TCGReg arg);
29
static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
30
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
31
static void tcg_out_goto_tb(TCGContext *s, int which);
32
@@ -XXX,XX +XXX,XX @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
33
case INDEX_op_ext8s_i64:
34
tcg_out_ext8s(s, TCG_TYPE_I64, new_args[0], new_args[1]);
35
break;
36
+ case INDEX_op_ext8u_i32:
37
+ case INDEX_op_ext8u_i64:
38
+ tcg_out_ext8u(s, new_args[0], new_args[1]);
39
+ break;
40
default:
41
if (def->flags & TCG_OPF_VECTOR) {
42
tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
43
diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc
44
index XXXXXXX..XXXXXXX 100644
45
--- a/tcg/aarch64/tcg-target.c.inc
46
+++ b/tcg/aarch64/tcg-target.c.inc
47
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_uxt(TCGContext *s, MemOp s_bits,
48
tcg_out_ubfm(s, 0, rd, rn, 0, bits);
49
}
50
51
+static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rn)
52
+{
53
+ tcg_out_uxt(s, MO_8, rd, rn);
54
+}
55
+
56
static void tcg_out_addsubi(TCGContext *s, int ext, TCGReg rd,
57
TCGReg rn, int64_t aimm)
58
{
59
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
60
case INDEX_op_ext32s_i64:
61
tcg_out_sxt(s, TCG_TYPE_I64, MO_32, a0, a1);
62
break;
63
- case INDEX_op_ext8u_i64:
64
- case INDEX_op_ext8u_i32:
65
- tcg_out_uxt(s, MO_8, a0, a1);
66
- break;
67
case INDEX_op_ext16u_i64:
68
case INDEX_op_ext16u_i32:
69
tcg_out_uxt(s, MO_16, a0, a1);
70
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
71
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
72
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
73
case INDEX_op_ext8s_i64:
74
+ case INDEX_op_ext8u_i32:
75
+ case INDEX_op_ext8u_i64:
76
default:
77
g_assert_not_reached();
78
}
79
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
80
index XXXXXXX..XXXXXXX 100644
81
--- a/tcg/arm/tcg-target.c.inc
82
+++ b/tcg/arm/tcg-target.c.inc
83
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8s(TCGContext *s, TCGType t, TCGReg rd, TCGReg rn)
84
tcg_out32(s, 0x06af0070 | (COND_AL << 28) | (rd << 12) | rn);
85
}
86
87
+static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rn)
88
+{
89
+ tcg_out_dat_imm(s, COND_AL, ARITH_AND, rd, rn, 0xff);
90
+}
91
+
92
static void __attribute__((unused))
93
-tcg_out_ext8u(TCGContext *s, ARMCond cond, TCGReg rd, TCGReg rn)
94
+tcg_out_ext8u_cond(TCGContext *s, ARMCond cond, TCGReg rd, TCGReg rn)
95
{
96
tcg_out_dat_imm(s, cond, ARITH_AND, rd, rn, 0xff);
97
}
98
@@ -XXX,XX +XXX,XX @@ static TCGReg NAME(TCGContext *s, TCGReg argreg, ARGTYPE arg) \
99
100
DEFINE_TCG_OUT_ARG(tcg_out_arg_imm32, uint32_t, tcg_out_movi32,
101
(tcg_out_movi32(s, COND_AL, TCG_REG_TMP, arg), arg = TCG_REG_TMP))
102
-DEFINE_TCG_OUT_ARG(tcg_out_arg_reg8, TCGReg, tcg_out_ext8u,
103
- (tcg_out_ext8u(s, COND_AL, TCG_REG_TMP, arg), arg = TCG_REG_TMP))
104
+DEFINE_TCG_OUT_ARG(tcg_out_arg_reg8, TCGReg, tcg_out_ext8u_cond,
105
+ (tcg_out_ext8u_cond(s, COND_AL, TCG_REG_TMP, arg), arg = TCG_REG_TMP))
106
DEFINE_TCG_OUT_ARG(tcg_out_arg_reg16, TCGReg, tcg_out_ext16u,
107
(tcg_out_ext16u(s, COND_AL, TCG_REG_TMP, arg), arg = TCG_REG_TMP))
108
DEFINE_TCG_OUT_ARG(tcg_out_arg_reg32, TCGReg, tcg_out_mov_reg, )
109
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
110
case INDEX_op_exit_tb: /* Always emitted via tcg_out_exit_tb. */
111
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
112
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
113
+ case INDEX_op_ext8u_i32:
114
default:
115
g_assert_not_reached();
116
}
117
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
118
index XXXXXXX..XXXXXXX 100644
119
--- a/tcg/i386/tcg-target.c.inc
120
+++ b/tcg/i386/tcg-target.c.inc
121
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_rolw_8(TCGContext *s, int reg)
122
tcg_out_shifti(s, SHIFT_ROL + P_DATA16, reg, 8);
123
}
124
125
-static inline void tcg_out_ext8u(TCGContext *s, int dest, int src)
126
+static void tcg_out_ext8u(TCGContext *s, TCGReg dest, TCGReg src)
127
{
128
/* movzbl */
129
tcg_debug_assert(src < 4 || TCG_TARGET_REG_BITS == 64);
130
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
131
OP_32_64(ext16s):
132
tcg_out_ext16s(s, a0, a1, rexw);
133
break;
134
- OP_32_64(ext8u):
135
- tcg_out_ext8u(s, a0, a1);
136
- break;
137
OP_32_64(ext16u):
138
tcg_out_ext16u(s, a0, a1);
139
break;
140
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
141
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
142
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
143
case INDEX_op_ext8s_i64:
144
+ case INDEX_op_ext8u_i32:
145
+ case INDEX_op_ext8u_i64:
146
default:
147
g_assert_not_reached();
148
}
149
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
150
index XXXXXXX..XXXXXXX 100644
151
--- a/tcg/loongarch64/tcg-target.c.inc
152
+++ b/tcg/loongarch64/tcg-target.c.inc
153
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
154
tcg_out_brcond(s, a2, a0, a1, arg_label(args[3]));
155
break;
156
157
- case INDEX_op_ext8u_i32:
158
- case INDEX_op_ext8u_i64:
159
- tcg_out_ext8u(s, a0, a1);
160
- break;
161
-
162
case INDEX_op_ext16s_i32:
163
case INDEX_op_ext16s_i64:
164
tcg_out_ext16s(s, a0, a1);
165
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
166
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
167
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
168
case INDEX_op_ext8s_i64:
169
+ case INDEX_op_ext8u_i32:
170
+ case INDEX_op_ext8u_i64:
171
default:
172
g_assert_not_reached();
173
}
174
diff --git a/tcg/mips/tcg-target.c.inc b/tcg/mips/tcg-target.c.inc
175
index XXXXXXX..XXXXXXX 100644
176
--- a/tcg/mips/tcg-target.c.inc
177
+++ b/tcg/mips/tcg-target.c.inc
178
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
179
tcg_out_opc_reg(s, OPC_SEB, rd, TCG_REG_ZERO, rs);
180
}
181
182
+static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
183
+{
184
+ tcg_out_opc_imm(s, OPC_ANDI, rd, rs, 0xff);
185
+}
186
+
187
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
188
tcg_target_long imm)
189
{
190
@@ -XXX,XX +XXX,XX @@ static int tcg_out_call_iarg_reg8(TCGContext *s, int i, TCGReg arg)
191
if (i < ARRAY_SIZE(tcg_target_call_iarg_regs)) {
192
tmp = tcg_target_call_iarg_regs[i];
193
}
194
- tcg_out_opc_imm(s, OPC_ANDI, tmp, arg, 0xff);
195
+ tcg_out_ext8u(s, tmp, arg);
196
return tcg_out_call_iarg_reg(s, i, tmp);
197
}
198
199
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
200
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
201
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
202
case INDEX_op_ext8s_i64:
203
+ case INDEX_op_ext8u_i32:
204
+ case INDEX_op_ext8u_i64:
205
default:
206
g_assert_not_reached();
207
}
208
diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc
209
index XXXXXXX..XXXXXXX 100644
210
--- a/tcg/ppc/tcg-target.c.inc
211
+++ b/tcg/ppc/tcg-target.c.inc
212
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg dst, TCGReg src)
213
tcg_out32(s, EXTSB | RA(dst) | RS(src));
214
}
215
216
+static void tcg_out_ext8u(TCGContext *s, TCGReg dst, TCGReg src)
217
+{
218
+ tcg_out32(s, ANDI | SAI(src, dst, 0xff));
219
+}
220
+
221
static inline void tcg_out_ext16s(TCGContext *s, TCGReg dst, TCGReg src)
222
{
223
tcg_out32(s, EXTSH | RA(dst) | RS(src));
224
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
225
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
226
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
227
case INDEX_op_ext8s_i64:
228
+ case INDEX_op_ext8u_i32:
229
+ case INDEX_op_ext8u_i64:
230
default:
231
g_assert_not_reached();
232
}
233
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
234
index XXXXXXX..XXXXXXX 100644
235
--- a/tcg/riscv/tcg-target.c.inc
236
+++ b/tcg/riscv/tcg-target.c.inc
237
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
238
tcg_out_qemu_st(s, args, true);
239
break;
240
241
- case INDEX_op_ext8u_i32:
242
- case INDEX_op_ext8u_i64:
243
- tcg_out_ext8u(s, a0, a1);
244
- break;
245
-
246
case INDEX_op_ext16u_i32:
247
case INDEX_op_ext16u_i64:
248
tcg_out_ext16u(s, a0, a1);
249
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
250
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
251
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
252
case INDEX_op_ext8s_i64:
253
+ case INDEX_op_ext8u_i32:
254
+ case INDEX_op_ext8u_i64:
255
default:
256
g_assert_not_reached();
257
}
258
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
259
index XXXXXXX..XXXXXXX 100644
260
--- a/tcg/s390x/tcg-target.c.inc
261
+++ b/tcg/s390x/tcg-target.c.inc
262
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
263
tcg_out_insn(s, RRE, LGBR, dest, src);
264
}
265
266
-static void tgen_ext8u(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
267
+static void tcg_out_ext8u(TCGContext *s, TCGReg dest, TCGReg src)
268
{
269
tcg_out_insn(s, RRE, LLGCR, dest, src);
270
}
271
@@ -XXX,XX +XXX,XX @@ static void tgen_andi(TCGContext *s, TCGType type, TCGReg dest, uint64_t val)
272
return;
273
}
274
if ((val & valid) == 0xff) {
275
- tgen_ext8u(s, TCG_TYPE_I64, dest, dest);
276
+ tcg_out_ext8u(s, dest, dest);
277
return;
278
}
279
if ((val & valid) == 0xffff) {
280
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
281
}
282
switch (opc & MO_SIZE) {
283
case MO_UB:
284
- tgen_ext8u(s, TCG_TYPE_I64, TCG_REG_R4, data_reg);
285
+ tcg_out_ext8u(s, TCG_REG_R4, data_reg);
286
break;
287
case MO_UW:
288
tgen_ext16u(s, TCG_TYPE_I64, TCG_REG_R4, data_reg);
289
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
290
case INDEX_op_ext16s_i32:
291
tgen_ext16s(s, TCG_TYPE_I32, args[0], args[1]);
292
break;
293
- case INDEX_op_ext8u_i32:
294
- tgen_ext8u(s, TCG_TYPE_I32, args[0], args[1]);
295
- break;
296
case INDEX_op_ext16u_i32:
297
tgen_ext16u(s, TCG_TYPE_I32, args[0], args[1]);
298
break;
299
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
300
case INDEX_op_ext32s_i64:
301
tgen_ext32s(s, args[0], args[1]);
302
break;
303
- case INDEX_op_ext8u_i64:
304
- tgen_ext8u(s, TCG_TYPE_I64, args[0], args[1]);
305
- break;
306
case INDEX_op_ext16u_i64:
307
tgen_ext16u(s, TCG_TYPE_I64, args[0], args[1]);
308
break;
309
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
310
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
311
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
312
case INDEX_op_ext8s_i64:
313
+ case INDEX_op_ext8u_i32:
314
+ case INDEX_op_ext8u_i64:
315
default:
316
g_assert_not_reached();
317
}
318
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
319
index XXXXXXX..XXXXXXX 100644
320
--- a/tcg/sparc64/tcg-target.c.inc
321
+++ b/tcg/sparc64/tcg-target.c.inc
322
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
323
g_assert_not_reached();
324
}
325
326
+static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
327
+{
328
+ tcg_out_arithi(s, rd, rs, 0xff, ARITH_AND);
329
+}
330
+
331
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
332
tcg_target_long imm)
333
{
334
@@ -XXX,XX +XXX,XX @@ static void emit_extend(TCGContext *s, TCGReg r, int op)
335
*/
336
switch (op & MO_SIZE) {
337
case MO_8:
338
- tcg_out_arithi(s, r, r, 0xff, ARITH_AND);
339
+ tcg_out_ext8u(s, r, r);
340
break;
341
case MO_16:
342
tcg_out_arithi(s, r, r, 16, SHIFT_SLL);
343
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
344
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
345
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
346
case INDEX_op_ext8s_i64:
347
+ case INDEX_op_ext8u_i32:
348
+ case INDEX_op_ext8u_i64:
349
default:
350
g_assert_not_reached();
351
}
352
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
353
index XXXXXXX..XXXXXXX 100644
354
--- a/tcg/tci/tcg-target.c.inc
355
+++ b/tcg/tci/tcg-target.c.inc
356
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
357
}
358
}
359
360
+static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
361
+{
362
+ if (TCG_TARGET_REG_BITS == 64) {
363
+ tcg_debug_assert(TCG_TARGET_HAS_ext8u_i64);
364
+ tcg_out_op_rr(s, INDEX_op_ext8u_i64, rd, rs);
365
+ } else {
366
+ tcg_debug_assert(TCG_TARGET_HAS_ext8u_i32);
367
+ tcg_out_op_rr(s, INDEX_op_ext8u_i32, rd, rs);
368
+ }
369
+}
370
+
371
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
372
tcg_target_long imm)
373
{
374
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
375
376
CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
377
CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
378
- CASE_32_64(ext8u) /* Optional (TCG_TARGET_HAS_ext8u_*). */
379
CASE_32_64(ext16s) /* Optional (TCG_TARGET_HAS_ext16s_*). */
380
CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */
381
CASE_64(ext32s) /* Optional (TCG_TARGET_HAS_ext32s_i64). */
382
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
383
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
384
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
385
case INDEX_op_ext8s_i64:
386
+ case INDEX_op_ext8u_i32:
387
+ case INDEX_op_ext8u_i64:
388
default:
389
g_assert_not_reached();
390
}
391
--
392
2.34.1
393
394
diff view generated by jsdifflib
Deleted patch
1
We will need a backend interface for performing 16-bit sign-extend.
2
Use it in tcg_reg_alloc_op in the meantime.
3
1
4
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
5
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
6
---
7
tcg/tcg.c | 7 +++++++
8
tcg/aarch64/tcg-target.c.inc | 13 ++++++++-----
9
tcg/arm/tcg-target.c.inc | 10 ++++------
10
tcg/i386/tcg-target.c.inc | 16 ++++++++--------
11
tcg/loongarch64/tcg-target.c.inc | 13 +++++--------
12
tcg/mips/tcg-target.c.inc | 11 ++++++++---
13
tcg/ppc/tcg-target.c.inc | 12 +++++-------
14
tcg/riscv/tcg-target.c.inc | 9 +++------
15
tcg/s390x/tcg-target.c.inc | 12 ++++--------
16
tcg/sparc64/tcg-target.c.inc | 7 +++++++
17
tcg/tci/tcg-target.c.inc | 21 ++++++++++++++++++++-
18
11 files changed, 79 insertions(+), 52 deletions(-)
19
20
diff --git a/tcg/tcg.c b/tcg/tcg.c
21
index XXXXXXX..XXXXXXX 100644
22
--- a/tcg/tcg.c
23
+++ b/tcg/tcg.c
24
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_mov(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
25
static void tcg_out_movi(TCGContext *s, TCGType type,
26
TCGReg ret, tcg_target_long arg);
27
static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
28
+static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
29
static void tcg_out_ext8u(TCGContext *s, TCGReg ret, TCGReg arg);
30
static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
31
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
32
@@ -XXX,XX +XXX,XX @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
33
case INDEX_op_ext8u_i64:
34
tcg_out_ext8u(s, new_args[0], new_args[1]);
35
break;
36
+ case INDEX_op_ext16s_i32:
37
+ tcg_out_ext16s(s, TCG_TYPE_I32, new_args[0], new_args[1]);
38
+ break;
39
+ case INDEX_op_ext16s_i64:
40
+ tcg_out_ext16s(s, TCG_TYPE_I64, new_args[0], new_args[1]);
41
+ break;
42
default:
43
if (def->flags & TCG_OPF_VECTOR) {
44
tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
45
diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc
46
index XXXXXXX..XXXXXXX 100644
47
--- a/tcg/aarch64/tcg-target.c.inc
48
+++ b/tcg/aarch64/tcg-target.c.inc
49
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rn)
50
tcg_out_sxt(s, type, MO_8, rd, rn);
51
}
52
53
+static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rn)
54
+{
55
+ tcg_out_sxt(s, type, MO_16, rd, rn);
56
+}
57
+
58
static inline void tcg_out_uxt(TCGContext *s, MemOp s_bits,
59
TCGReg rd, TCGReg rn)
60
{
61
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
62
tcg_out_rev(s, TCG_TYPE_I32, MO_16, a0, a1);
63
if (a2 & TCG_BSWAP_OS) {
64
/* Output must be sign-extended. */
65
- tcg_out_sxt(s, ext, MO_16, a0, a0);
66
+ tcg_out_ext16s(s, ext, a0, a0);
67
} else if ((a2 & (TCG_BSWAP_IZ | TCG_BSWAP_OZ)) == TCG_BSWAP_OZ) {
68
/* Output must be zero-extended, but input isn't. */
69
tcg_out_uxt(s, MO_16, a0, a0);
70
}
71
break;
72
73
- case INDEX_op_ext16s_i64:
74
- case INDEX_op_ext16s_i32:
75
- tcg_out_sxt(s, ext, MO_16, a0, a1);
76
- break;
77
case INDEX_op_ext_i32_i64:
78
case INDEX_op_ext32s_i64:
79
tcg_out_sxt(s, TCG_TYPE_I64, MO_32, a0, a1);
80
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
81
case INDEX_op_ext8s_i64:
82
case INDEX_op_ext8u_i32:
83
case INDEX_op_ext8u_i64:
84
+ case INDEX_op_ext16s_i64:
85
+ case INDEX_op_ext16s_i32:
86
default:
87
g_assert_not_reached();
88
}
89
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
90
index XXXXXXX..XXXXXXX 100644
91
--- a/tcg/arm/tcg-target.c.inc
92
+++ b/tcg/arm/tcg-target.c.inc
93
@@ -XXX,XX +XXX,XX @@ tcg_out_ext8u_cond(TCGContext *s, ARMCond cond, TCGReg rd, TCGReg rn)
94
tcg_out_dat_imm(s, cond, ARITH_AND, rd, rn, 0xff);
95
}
96
97
-static void tcg_out_ext16s(TCGContext *s, ARMCond cond, TCGReg rd, TCGReg rn)
98
+static void tcg_out_ext16s(TCGContext *s, TCGType t, TCGReg rd, TCGReg rn)
99
{
100
/* sxth */
101
- tcg_out32(s, 0x06bf0070 | (cond << 28) | (rd << 12) | rn);
102
+ tcg_out32(s, 0x06bf0070 | (COND_AL << 28) | (rd << 12) | rn);
103
}
104
105
static void tcg_out_ext16u(TCGContext *s, ARMCond cond, TCGReg rd, TCGReg rn)
106
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
107
tcg_out_ext8s(s, TCG_TYPE_I32, datalo, TCG_REG_R0);
108
break;
109
case MO_SW:
110
- tcg_out_ext16s(s, COND_AL, datalo, TCG_REG_R0);
111
+ tcg_out_ext16s(s, TCG_TYPE_I32, datalo, TCG_REG_R0);
112
break;
113
default:
114
tcg_out_mov_reg(s, COND_AL, datalo, TCG_REG_R0);
115
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
116
tcg_out_bswap32(s, COND_AL, args[0], args[1]);
117
break;
118
119
- case INDEX_op_ext16s_i32:
120
- tcg_out_ext16s(s, COND_AL, args[0], args[1]);
121
- break;
122
case INDEX_op_ext16u_i32:
123
tcg_out_ext16u(s, COND_AL, args[0], args[1]);
124
break;
125
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
126
case INDEX_op_goto_tb: /* Always emitted via tcg_out_goto_tb. */
127
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
128
case INDEX_op_ext8u_i32:
129
+ case INDEX_op_ext16s_i32:
130
default:
131
g_assert_not_reached();
132
}
133
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
134
index XXXXXXX..XXXXXXX 100644
135
--- a/tcg/i386/tcg-target.c.inc
136
+++ b/tcg/i386/tcg-target.c.inc
137
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_ext16u(TCGContext *s, int dest, int src)
138
tcg_out_modrm(s, OPC_MOVZWL, dest, src);
139
}
140
141
-static inline void tcg_out_ext16s(TCGContext *s, int dest, int src, int rexw)
142
+static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
143
{
144
+ int rexw = type == TCG_TYPE_I32 ? 0 : P_REXW;
145
/* movsw[lq] */
146
tcg_out_modrm(s, OPC_MOVSWL + rexw, dest, src);
147
}
148
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
149
MemOp opc = get_memop(oi);
150
TCGReg data_reg;
151
tcg_insn_unit **label_ptr = &l->label_ptr[0];
152
- int rexw = (l->type == TCG_TYPE_I64 ? P_REXW : 0);
153
154
/* resolve label address */
155
tcg_patch32(label_ptr[0], s->code_ptr - label_ptr[0] - 4);
156
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
157
tcg_out_ext8s(s, l->type, data_reg, TCG_REG_EAX);
158
break;
159
case MO_SW:
160
- tcg_out_ext16s(s, data_reg, TCG_REG_EAX, rexw);
161
+ tcg_out_ext16s(s, l->type, data_reg, TCG_REG_EAX);
162
break;
163
#if TCG_TARGET_REG_BITS == 64
164
case MO_SL:
165
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg datalo, TCGReg datahi,
166
TCGReg base, int index, intptr_t ofs,
167
int seg, bool is64, MemOp memop)
168
{
169
+ TCGType type = is64 ? TCG_TYPE_I64 : TCG_TYPE_I32;
170
bool use_movbe = false;
171
int rexw = is64 * P_REXW;
172
int movop = OPC_MOVL_GvEv;
173
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld_direct(TCGContext *s, TCGReg datalo, TCGReg datahi,
174
if (use_movbe) {
175
tcg_out_modrm_sib_offset(s, OPC_MOVBE_GyMy + P_DATA16 + seg,
176
datalo, base, index, 0, ofs);
177
- tcg_out_ext16s(s, datalo, datalo, rexw);
178
+ tcg_out_ext16s(s, type, datalo, datalo);
179
} else {
180
tcg_out_modrm_sib_offset(s, OPC_MOVSWL + rexw + seg,
181
datalo, base, index, 0, ofs);
182
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
183
tcg_out_modrm(s, OPC_GRP3_Ev + rexw, EXT3_NOT, a0);
184
break;
185
186
- OP_32_64(ext16s):
187
- tcg_out_ext16s(s, a0, a1, rexw);
188
- break;
189
OP_32_64(ext16u):
190
tcg_out_ext16u(s, a0, a1);
191
break;
192
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
193
if (a1 < 4 && a0 < 8) {
194
tcg_out_modrm(s, OPC_MOVSBL, a0, a1 + 4);
195
} else {
196
- tcg_out_ext16s(s, a0, a1, 0);
197
+ tcg_out_ext16s(s, TCG_TYPE_I32, a0, a1);
198
tcg_out_shifti(s, SHIFT_SAR, a0, 8);
199
}
200
break;
201
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
202
case INDEX_op_ext8s_i64:
203
case INDEX_op_ext8u_i32:
204
case INDEX_op_ext8u_i64:
205
+ case INDEX_op_ext16s_i32:
206
+ case INDEX_op_ext16s_i64:
207
default:
208
g_assert_not_reached();
209
}
210
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
211
index XXXXXXX..XXXXXXX 100644
212
--- a/tcg/loongarch64/tcg-target.c.inc
213
+++ b/tcg/loongarch64/tcg-target.c.inc
214
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
215
tcg_out_opc_sext_b(s, ret, arg);
216
}
217
218
-static void tcg_out_ext16s(TCGContext *s, TCGReg ret, TCGReg arg)
219
+static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
220
{
221
tcg_out_opc_sext_h(s, ret, arg);
222
}
223
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
224
tcg_out_ext8s(s, type, l->datalo_reg, TCG_REG_A0);
225
break;
226
case MO_SW:
227
- tcg_out_ext16s(s, l->datalo_reg, TCG_REG_A0);
228
+ tcg_out_ext16s(s, type, l->datalo_reg, TCG_REG_A0);
229
break;
230
case MO_SL:
231
tcg_out_ext32s(s, l->datalo_reg, TCG_REG_A0);
232
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
233
tcg_out_brcond(s, a2, a0, a1, arg_label(args[3]));
234
break;
235
236
- case INDEX_op_ext16s_i32:
237
- case INDEX_op_ext16s_i64:
238
- tcg_out_ext16s(s, a0, a1);
239
- break;
240
-
241
case INDEX_op_ext16u_i32:
242
case INDEX_op_ext16u_i64:
243
tcg_out_ext16u(s, a0, a1);
244
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
245
case INDEX_op_bswap16_i64:
246
tcg_out_opc_revb_2h(s, a0, a1);
247
if (a2 & TCG_BSWAP_OS) {
248
- tcg_out_ext16s(s, a0, a0);
249
+ tcg_out_ext16s(s, TCG_TYPE_REG, a0, a0);
250
} else if ((a2 & (TCG_BSWAP_IZ | TCG_BSWAP_OZ)) == TCG_BSWAP_OZ) {
251
tcg_out_ext16u(s, a0, a0);
252
}
253
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
254
case INDEX_op_ext8s_i64:
255
case INDEX_op_ext8u_i32:
256
case INDEX_op_ext8u_i64:
257
+ case INDEX_op_ext16s_i32:
258
+ case INDEX_op_ext16s_i64:
259
default:
260
g_assert_not_reached();
261
}
262
diff --git a/tcg/mips/tcg-target.c.inc b/tcg/mips/tcg-target.c.inc
263
index XXXXXXX..XXXXXXX 100644
264
--- a/tcg/mips/tcg-target.c.inc
265
+++ b/tcg/mips/tcg-target.c.inc
266
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
267
tcg_out_opc_imm(s, OPC_ANDI, rd, rs, 0xff);
268
}
269
270
+static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
271
+{
272
+ tcg_debug_assert(TCG_TARGET_HAS_ext16s_i32);
273
+ tcg_out_opc_reg(s, OPC_SEH, rd, TCG_REG_ZERO, rs);
274
+}
275
+
276
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
277
tcg_target_long imm)
278
{
279
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
280
case INDEX_op_not_i64:
281
i1 = OPC_NOR;
282
goto do_unary;
283
- case INDEX_op_ext16s_i32:
284
- case INDEX_op_ext16s_i64:
285
- i1 = OPC_SEH;
286
do_unary:
287
tcg_out_opc_reg(s, i1, a0, TCG_REG_ZERO, a1);
288
break;
289
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
290
case INDEX_op_ext8s_i64:
291
case INDEX_op_ext8u_i32:
292
case INDEX_op_ext8u_i64:
293
+ case INDEX_op_ext16s_i32:
294
+ case INDEX_op_ext16s_i64:
295
default:
296
g_assert_not_reached();
297
}
298
diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc
299
index XXXXXXX..XXXXXXX 100644
300
--- a/tcg/ppc/tcg-target.c.inc
301
+++ b/tcg/ppc/tcg-target.c.inc
302
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8u(TCGContext *s, TCGReg dst, TCGReg src)
303
tcg_out32(s, ANDI | SAI(src, dst, 0xff));
304
}
305
306
-static inline void tcg_out_ext16s(TCGContext *s, TCGReg dst, TCGReg src)
307
+static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg dst, TCGReg src)
308
{
309
tcg_out32(s, EXTSH | RA(dst) | RS(src));
310
}
311
@@ -XXX,XX +XXX,XX @@ static void tcg_out_bswap16(TCGContext *s, TCGReg dst, TCGReg src, int flags)
312
if (have_isa_3_10) {
313
tcg_out32(s, BRH | RA(dst) | RS(src));
314
if (flags & TCG_BSWAP_OS) {
315
- tcg_out_ext16s(s, dst, dst);
316
+ tcg_out_ext16s(s, TCG_TYPE_REG, dst, dst);
317
} else if ((flags & (TCG_BSWAP_IZ | TCG_BSWAP_OZ)) == TCG_BSWAP_OZ) {
318
tcg_out_ext16u(s, dst, dst);
319
}
320
@@ -XXX,XX +XXX,XX @@ static void tcg_out_bswap16(TCGContext *s, TCGReg dst, TCGReg src, int flags)
321
tcg_out_rlw(s, RLWIMI, tmp, src, 8, 16, 23);
322
323
if (flags & TCG_BSWAP_OS) {
324
- tcg_out_ext16s(s, dst, tmp);
325
+ tcg_out_ext16s(s, TCG_TYPE_REG, dst, tmp);
326
} else {
327
tcg_out_mov(s, TCG_TYPE_REG, dst, tmp);
328
}
329
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
330
tcg_out_qemu_st(s, args, true);
331
break;
332
333
- case INDEX_op_ext16s_i32:
334
- case INDEX_op_ext16s_i64:
335
- tcg_out_ext16s(s, args[0], args[1]);
336
- break;
337
case INDEX_op_ext_i32_i64:
338
case INDEX_op_ext32s_i64:
339
tcg_out_ext32s(s, args[0], args[1]);
340
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
341
case INDEX_op_ext8s_i64:
342
case INDEX_op_ext8u_i32:
343
case INDEX_op_ext8u_i64:
344
+ case INDEX_op_ext16s_i32:
345
+ case INDEX_op_ext16s_i64:
346
default:
347
g_assert_not_reached();
348
}
349
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
350
index XXXXXXX..XXXXXXX 100644
351
--- a/tcg/riscv/tcg-target.c.inc
352
+++ b/tcg/riscv/tcg-target.c.inc
353
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
354
tcg_out_opc_imm(s, OPC_SRAIW, ret, ret, 24);
355
}
356
357
-static void tcg_out_ext16s(TCGContext *s, TCGReg ret, TCGReg arg)
358
+static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg)
359
{
360
tcg_out_opc_imm(s, OPC_SLLIW, ret, arg, 16);
361
tcg_out_opc_imm(s, OPC_SRAIW, ret, ret, 16);
362
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
363
tcg_out_ext32u(s, a0, a1);
364
break;
365
366
- case INDEX_op_ext16s_i32:
367
- case INDEX_op_ext16s_i64:
368
- tcg_out_ext16s(s, a0, a1);
369
- break;
370
-
371
case INDEX_op_ext32s_i64:
372
case INDEX_op_extrl_i64_i32:
373
case INDEX_op_ext_i32_i64:
374
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
375
case INDEX_op_ext8s_i64:
376
case INDEX_op_ext8u_i32:
377
case INDEX_op_ext8u_i64:
378
+ case INDEX_op_ext16s_i32:
379
+ case INDEX_op_ext16s_i64:
380
default:
381
g_assert_not_reached();
382
}
383
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
384
index XXXXXXX..XXXXXXX 100644
385
--- a/tcg/s390x/tcg-target.c.inc
386
+++ b/tcg/s390x/tcg-target.c.inc
387
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8u(TCGContext *s, TCGReg dest, TCGReg src)
388
tcg_out_insn(s, RRE, LLGCR, dest, src);
389
}
390
391
-static void tgen_ext16s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
392
+static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
393
{
394
tcg_out_insn(s, RRE, LGHR, dest, src);
395
}
396
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld_direct(TCGContext *s, MemOp opc, TCGReg data,
397
case MO_SW | MO_BSWAP:
398
/* swapped sign-extended halfword load */
399
tcg_out_insn(s, RXY, LRVH, data, base, index, disp);
400
- tgen_ext16s(s, TCG_TYPE_I64, data, data);
401
+ tcg_out_ext16s(s, TCG_TYPE_REG, data, data);
402
break;
403
case MO_SW:
404
tcg_out_insn(s, RXY, LGH, data, base, index, disp);
405
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
406
}
407
break;
408
409
- case INDEX_op_ext16s_i32:
410
- tgen_ext16s(s, TCG_TYPE_I32, args[0], args[1]);
411
- break;
412
case INDEX_op_ext16u_i32:
413
tgen_ext16u(s, TCG_TYPE_I32, args[0], args[1]);
414
break;
415
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
416
}
417
break;
418
419
- case INDEX_op_ext16s_i64:
420
- tgen_ext16s(s, TCG_TYPE_I64, args[0], args[1]);
421
- break;
422
case INDEX_op_ext_i32_i64:
423
case INDEX_op_ext32s_i64:
424
tgen_ext32s(s, args[0], args[1]);
425
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
426
case INDEX_op_ext8s_i64:
427
case INDEX_op_ext8u_i32:
428
case INDEX_op_ext8u_i64:
429
+ case INDEX_op_ext16s_i32:
430
+ case INDEX_op_ext16s_i64:
431
default:
432
g_assert_not_reached();
433
}
434
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
435
index XXXXXXX..XXXXXXX 100644
436
--- a/tcg/sparc64/tcg-target.c.inc
437
+++ b/tcg/sparc64/tcg-target.c.inc
438
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
439
g_assert_not_reached();
440
}
441
442
+static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
443
+{
444
+ g_assert_not_reached();
445
+}
446
+
447
static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
448
{
449
tcg_out_arithi(s, rd, rs, 0xff, ARITH_AND);
450
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
451
case INDEX_op_ext8s_i64:
452
case INDEX_op_ext8u_i32:
453
case INDEX_op_ext8u_i64:
454
+ case INDEX_op_ext16s_i32:
455
+ case INDEX_op_ext16s_i64:
456
default:
457
g_assert_not_reached();
458
}
459
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
460
index XXXXXXX..XXXXXXX 100644
461
--- a/tcg/tci/tcg-target.c.inc
462
+++ b/tcg/tci/tcg-target.c.inc
463
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
464
}
465
}
466
467
+static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
468
+{
469
+ switch (type) {
470
+ case TCG_TYPE_I32:
471
+ tcg_debug_assert(TCG_TARGET_HAS_ext16s_i32);
472
+ tcg_out_op_rr(s, INDEX_op_ext16s_i32, rd, rs);
473
+ break;
474
+#if TCG_TARGET_REG_BITS == 64
475
+ case TCG_TYPE_I64:
476
+ tcg_debug_assert(TCG_TARGET_HAS_ext16s_i64);
477
+ tcg_out_op_rr(s, INDEX_op_ext16s_i64, rd, rs);
478
+ break;
479
+#endif
480
+ default:
481
+ g_assert_not_reached();
482
+ }
483
+}
484
+
485
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
486
tcg_target_long imm)
487
{
488
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
489
490
CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
491
CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
492
- CASE_32_64(ext16s) /* Optional (TCG_TARGET_HAS_ext16s_*). */
493
CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */
494
CASE_64(ext32s) /* Optional (TCG_TARGET_HAS_ext32s_i64). */
495
CASE_64(ext32u) /* Optional (TCG_TARGET_HAS_ext32u_i64). */
496
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
497
case INDEX_op_ext8s_i64:
498
case INDEX_op_ext8u_i32:
499
case INDEX_op_ext8u_i64:
500
+ case INDEX_op_ext16s_i32:
501
+ case INDEX_op_ext16s_i64:
502
default:
503
g_assert_not_reached();
504
}
505
--
506
2.34.1
507
508
diff view generated by jsdifflib
Deleted patch
1
We will need a backend interface for performing 16-bit zero-extend.
2
Use it in tcg_reg_alloc_op in the meantime.
3
1
4
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
5
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
6
---
7
tcg/tcg.c | 5 +++++
8
tcg/aarch64/tcg-target.c.inc | 13 ++++++++-----
9
tcg/arm/tcg-target.c.inc | 17 ++++++++++-------
10
tcg/i386/tcg-target.c.inc | 8 +++-----
11
tcg/loongarch64/tcg-target.c.inc | 7 ++-----
12
tcg/mips/tcg-target.c.inc | 5 +++++
13
tcg/ppc/tcg-target.c.inc | 4 +++-
14
tcg/riscv/tcg-target.c.inc | 7 ++-----
15
tcg/s390x/tcg-target.c.inc | 17 ++++++-----------
16
tcg/sparc64/tcg-target.c.inc | 11 +++++++++--
17
tcg/tci/tcg-target.c.inc | 14 +++++++++++++-
18
11 files changed, 66 insertions(+), 42 deletions(-)
19
20
diff --git a/tcg/tcg.c b/tcg/tcg.c
21
index XXXXXXX..XXXXXXX 100644
22
--- a/tcg/tcg.c
23
+++ b/tcg/tcg.c
24
@@ -XXX,XX +XXX,XX @@ static void tcg_out_movi(TCGContext *s, TCGType type,
25
static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
26
static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
27
static void tcg_out_ext8u(TCGContext *s, TCGReg ret, TCGReg arg);
28
+static void tcg_out_ext16u(TCGContext *s, TCGReg ret, TCGReg arg);
29
static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
30
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
31
static void tcg_out_goto_tb(TCGContext *s, int which);
32
@@ -XXX,XX +XXX,XX @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
33
case INDEX_op_ext16s_i64:
34
tcg_out_ext16s(s, TCG_TYPE_I64, new_args[0], new_args[1]);
35
break;
36
+ case INDEX_op_ext16u_i32:
37
+ case INDEX_op_ext16u_i64:
38
+ tcg_out_ext16u(s, new_args[0], new_args[1]);
39
+ break;
40
default:
41
if (def->flags & TCG_OPF_VECTOR) {
42
tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
43
diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc
44
index XXXXXXX..XXXXXXX 100644
45
--- a/tcg/aarch64/tcg-target.c.inc
46
+++ b/tcg/aarch64/tcg-target.c.inc
47
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rn)
48
tcg_out_uxt(s, MO_8, rd, rn);
49
}
50
51
+static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rn)
52
+{
53
+ tcg_out_uxt(s, MO_16, rd, rn);
54
+}
55
+
56
static void tcg_out_addsubi(TCGContext *s, int ext, TCGReg rd,
57
TCGReg rn, int64_t aimm)
58
{
59
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
60
tcg_out_ext16s(s, ext, a0, a0);
61
} else if ((a2 & (TCG_BSWAP_IZ | TCG_BSWAP_OZ)) == TCG_BSWAP_OZ) {
62
/* Output must be zero-extended, but input isn't. */
63
- tcg_out_uxt(s, MO_16, a0, a0);
64
+ tcg_out_ext16u(s, a0, a0);
65
}
66
break;
67
68
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
69
case INDEX_op_ext32s_i64:
70
tcg_out_sxt(s, TCG_TYPE_I64, MO_32, a0, a1);
71
break;
72
- case INDEX_op_ext16u_i64:
73
- case INDEX_op_ext16u_i32:
74
- tcg_out_uxt(s, MO_16, a0, a1);
75
- break;
76
case INDEX_op_extu_i32_i64:
77
case INDEX_op_ext32u_i64:
78
tcg_out_movr(s, TCG_TYPE_I32, a0, a1);
79
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
80
case INDEX_op_ext8u_i64:
81
case INDEX_op_ext16s_i64:
82
case INDEX_op_ext16s_i32:
83
+ case INDEX_op_ext16u_i64:
84
+ case INDEX_op_ext16u_i32:
85
default:
86
g_assert_not_reached();
87
}
88
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
89
index XXXXXXX..XXXXXXX 100644
90
--- a/tcg/arm/tcg-target.c.inc
91
+++ b/tcg/arm/tcg-target.c.inc
92
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16s(TCGContext *s, TCGType t, TCGReg rd, TCGReg rn)
93
tcg_out32(s, 0x06bf0070 | (COND_AL << 28) | (rd << 12) | rn);
94
}
95
96
-static void tcg_out_ext16u(TCGContext *s, ARMCond cond, TCGReg rd, TCGReg rn)
97
+static void tcg_out_ext16u_cond(TCGContext *s, ARMCond cond,
98
+ TCGReg rd, TCGReg rn)
99
{
100
/* uxth */
101
tcg_out32(s, 0x06ff0070 | (cond << 28) | (rd << 12) | rn);
102
}
103
104
+static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rn)
105
+{
106
+ tcg_out_ext16u_cond(s, COND_AL, rd, rn);
107
+}
108
+
109
static void tcg_out_bswap16(TCGContext *s, ARMCond cond,
110
TCGReg rd, TCGReg rn, int flags)
111
{
112
@@ -XXX,XX +XXX,XX @@ DEFINE_TCG_OUT_ARG(tcg_out_arg_imm32, uint32_t, tcg_out_movi32,
113
(tcg_out_movi32(s, COND_AL, TCG_REG_TMP, arg), arg = TCG_REG_TMP))
114
DEFINE_TCG_OUT_ARG(tcg_out_arg_reg8, TCGReg, tcg_out_ext8u_cond,
115
(tcg_out_ext8u_cond(s, COND_AL, TCG_REG_TMP, arg), arg = TCG_REG_TMP))
116
-DEFINE_TCG_OUT_ARG(tcg_out_arg_reg16, TCGReg, tcg_out_ext16u,
117
- (tcg_out_ext16u(s, COND_AL, TCG_REG_TMP, arg), arg = TCG_REG_TMP))
118
+DEFINE_TCG_OUT_ARG(tcg_out_arg_reg16, TCGReg, tcg_out_ext16u_cond,
119
+ (tcg_out_ext16u_cond(s, COND_AL, TCG_REG_TMP, arg), arg = TCG_REG_TMP))
120
DEFINE_TCG_OUT_ARG(tcg_out_arg_reg32, TCGReg, tcg_out_mov_reg, )
121
122
static TCGReg tcg_out_arg_reg64(TCGContext *s, TCGReg argreg,
123
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
124
tcg_out_bswap32(s, COND_AL, args[0], args[1]);
125
break;
126
127
- case INDEX_op_ext16u_i32:
128
- tcg_out_ext16u(s, COND_AL, args[0], args[1]);
129
- break;
130
-
131
case INDEX_op_deposit_i32:
132
tcg_out_deposit(s, COND_AL, args[0], args[2],
133
args[3], args[4], const_args[2]);
134
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
135
case INDEX_op_ext8s_i32: /* Always emitted via tcg_reg_alloc_op. */
136
case INDEX_op_ext8u_i32:
137
case INDEX_op_ext16s_i32:
138
+ case INDEX_op_ext16u_i32:
139
default:
140
g_assert_not_reached();
141
}
142
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
143
index XXXXXXX..XXXXXXX 100644
144
--- a/tcg/i386/tcg-target.c.inc
145
+++ b/tcg/i386/tcg-target.c.inc
146
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
147
tcg_out_modrm(s, OPC_MOVSBL + P_REXB_RM + rexw, dest, src);
148
}
149
150
-static inline void tcg_out_ext16u(TCGContext *s, int dest, int src)
151
+static void tcg_out_ext16u(TCGContext *s, TCGReg dest, TCGReg src)
152
{
153
/* movzwl */
154
tcg_out_modrm(s, OPC_MOVZWL, dest, src);
155
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
156
tcg_out_modrm(s, OPC_GRP3_Ev + rexw, EXT3_NOT, a0);
157
break;
158
159
- OP_32_64(ext16u):
160
- tcg_out_ext16u(s, a0, a1);
161
- break;
162
-
163
case INDEX_op_qemu_ld_i32:
164
tcg_out_qemu_ld(s, args, 0);
165
break;
166
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
167
case INDEX_op_ext8u_i64:
168
case INDEX_op_ext16s_i32:
169
case INDEX_op_ext16s_i64:
170
+ case INDEX_op_ext16u_i32:
171
+ case INDEX_op_ext16u_i64:
172
default:
173
g_assert_not_reached();
174
}
175
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
176
index XXXXXXX..XXXXXXX 100644
177
--- a/tcg/loongarch64/tcg-target.c.inc
178
+++ b/tcg/loongarch64/tcg-target.c.inc
179
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
180
tcg_out_brcond(s, a2, a0, a1, arg_label(args[3]));
181
break;
182
183
- case INDEX_op_ext16u_i32:
184
- case INDEX_op_ext16u_i64:
185
- tcg_out_ext16u(s, a0, a1);
186
- break;
187
-
188
case INDEX_op_ext32u_i64:
189
case INDEX_op_extu_i32_i64:
190
tcg_out_ext32u(s, a0, a1);
191
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
192
case INDEX_op_ext8u_i64:
193
case INDEX_op_ext16s_i32:
194
case INDEX_op_ext16s_i64:
195
+ case INDEX_op_ext16u_i32:
196
+ case INDEX_op_ext16u_i64:
197
default:
198
g_assert_not_reached();
199
}
200
diff --git a/tcg/mips/tcg-target.c.inc b/tcg/mips/tcg-target.c.inc
201
index XXXXXXX..XXXXXXX 100644
202
--- a/tcg/mips/tcg-target.c.inc
203
+++ b/tcg/mips/tcg-target.c.inc
204
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
205
tcg_out_opc_reg(s, OPC_SEH, rd, TCG_REG_ZERO, rs);
206
}
207
208
+static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
209
+{
210
+ tcg_out_opc_imm(s, OPC_ANDI, rd, rs, 0xffff);
211
+}
212
+
213
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
214
tcg_target_long imm)
215
{
216
diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc
217
index XXXXXXX..XXXXXXX 100644
218
--- a/tcg/ppc/tcg-target.c.inc
219
+++ b/tcg/ppc/tcg-target.c.inc
220
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg dst, TCGReg src)
221
tcg_out32(s, EXTSH | RA(dst) | RS(src));
222
}
223
224
-static inline void tcg_out_ext16u(TCGContext *s, TCGReg dst, TCGReg src)
225
+static void tcg_out_ext16u(TCGContext *s, TCGReg dst, TCGReg src)
226
{
227
tcg_out32(s, ANDI | SAI(src, dst, 0xffff));
228
}
229
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
230
case INDEX_op_ext8u_i64:
231
case INDEX_op_ext16s_i32:
232
case INDEX_op_ext16s_i64:
233
+ case INDEX_op_ext16u_i32:
234
+ case INDEX_op_ext16u_i64:
235
default:
236
g_assert_not_reached();
237
}
238
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
239
index XXXXXXX..XXXXXXX 100644
240
--- a/tcg/riscv/tcg-target.c.inc
241
+++ b/tcg/riscv/tcg-target.c.inc
242
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
243
tcg_out_qemu_st(s, args, true);
244
break;
245
246
- case INDEX_op_ext16u_i32:
247
- case INDEX_op_ext16u_i64:
248
- tcg_out_ext16u(s, a0, a1);
249
- break;
250
-
251
case INDEX_op_ext32u_i64:
252
case INDEX_op_extu_i32_i64:
253
tcg_out_ext32u(s, a0, a1);
254
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
255
case INDEX_op_ext8u_i64:
256
case INDEX_op_ext16s_i32:
257
case INDEX_op_ext16s_i64:
258
+ case INDEX_op_ext16u_i32:
259
+ case INDEX_op_ext16u_i64:
260
default:
261
g_assert_not_reached();
262
}
263
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
264
index XXXXXXX..XXXXXXX 100644
265
--- a/tcg/s390x/tcg-target.c.inc
266
+++ b/tcg/s390x/tcg-target.c.inc
267
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
268
tcg_out_insn(s, RRE, LGHR, dest, src);
269
}
270
271
-static void tgen_ext16u(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
272
+static void tcg_out_ext16u(TCGContext *s, TCGReg dest, TCGReg src)
273
{
274
tcg_out_insn(s, RRE, LLGHR, dest, src);
275
}
276
@@ -XXX,XX +XXX,XX @@ static void tgen_andi(TCGContext *s, TCGType type, TCGReg dest, uint64_t val)
277
return;
278
}
279
if ((val & valid) == 0xffff) {
280
- tgen_ext16u(s, TCG_TYPE_I64, dest, dest);
281
+ tcg_out_ext16u(s, dest, dest);
282
return;
283
}
284
285
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld_direct(TCGContext *s, MemOp opc, TCGReg data,
286
case MO_UW | MO_BSWAP:
287
/* swapped unsigned halfword load with upper bits zeroed */
288
tcg_out_insn(s, RXY, LRVH, data, base, index, disp);
289
- tgen_ext16u(s, TCG_TYPE_I64, data, data);
290
+ tcg_out_ext16u(s, data, data);
291
break;
292
case MO_UW:
293
tcg_out_insn(s, RXY, LLGH, data, base, index, disp);
294
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
295
tcg_out_ext8u(s, TCG_REG_R4, data_reg);
296
break;
297
case MO_UW:
298
- tgen_ext16u(s, TCG_TYPE_I64, TCG_REG_R4, data_reg);
299
+ tcg_out_ext16u(s, TCG_REG_R4, data_reg);
300
break;
301
case MO_UL:
302
tgen_ext32u(s, TCG_REG_R4, data_reg);
303
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
304
}
305
break;
306
307
- case INDEX_op_ext16u_i32:
308
- tgen_ext16u(s, TCG_TYPE_I32, args[0], args[1]);
309
- break;
310
-
311
case INDEX_op_bswap16_i32:
312
a0 = args[0], a1 = args[1], a2 = args[2];
313
tcg_out_insn(s, RRE, LRVR, a0, a1);
314
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
315
case INDEX_op_ext32s_i64:
316
tgen_ext32s(s, args[0], args[1]);
317
break;
318
- case INDEX_op_ext16u_i64:
319
- tgen_ext16u(s, TCG_TYPE_I64, args[0], args[1]);
320
- break;
321
case INDEX_op_extu_i32_i64:
322
case INDEX_op_ext32u_i64:
323
tgen_ext32u(s, args[0], args[1]);
324
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
325
case INDEX_op_ext8u_i64:
326
case INDEX_op_ext16s_i32:
327
case INDEX_op_ext16s_i64:
328
+ case INDEX_op_ext16u_i32:
329
+ case INDEX_op_ext16u_i64:
330
default:
331
g_assert_not_reached();
332
}
333
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
334
index XXXXXXX..XXXXXXX 100644
335
--- a/tcg/sparc64/tcg-target.c.inc
336
+++ b/tcg/sparc64/tcg-target.c.inc
337
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8u(TCGContext *s, TCGReg rd, TCGReg rs)
338
tcg_out_arithi(s, rd, rs, 0xff, ARITH_AND);
339
}
340
341
+static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
342
+{
343
+ tcg_out_arithi(s, rd, rs, 16, SHIFT_SLL);
344
+ tcg_out_arithi(s, rd, rd, 16, SHIFT_SRL);
345
+}
346
+
347
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
348
tcg_target_long imm)
349
{
350
@@ -XXX,XX +XXX,XX @@ static void emit_extend(TCGContext *s, TCGReg r, int op)
351
tcg_out_ext8u(s, r, r);
352
break;
353
case MO_16:
354
- tcg_out_arithi(s, r, r, 16, SHIFT_SLL);
355
- tcg_out_arithi(s, r, r, 16, SHIFT_SRL);
356
+ tcg_out_ext16u(s, r, r);
357
break;
358
case MO_32:
359
tcg_out_arith(s, r, r, 0, SHIFT_SRL);
360
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
361
case INDEX_op_ext8u_i64:
362
case INDEX_op_ext16s_i32:
363
case INDEX_op_ext16s_i64:
364
+ case INDEX_op_ext16u_i32:
365
+ case INDEX_op_ext16u_i64:
366
default:
367
g_assert_not_reached();
368
}
369
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
370
index XXXXXXX..XXXXXXX 100644
371
--- a/tcg/tci/tcg-target.c.inc
372
+++ b/tcg/tci/tcg-target.c.inc
373
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rs)
374
}
375
}
376
377
+static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
378
+{
379
+ if (TCG_TARGET_REG_BITS == 64) {
380
+ tcg_debug_assert(TCG_TARGET_HAS_ext16u_i64);
381
+ tcg_out_op_rr(s, INDEX_op_ext16u_i64, rd, rs);
382
+ } else {
383
+ tcg_debug_assert(TCG_TARGET_HAS_ext16u_i32);
384
+ tcg_out_op_rr(s, INDEX_op_ext16u_i32, rd, rs);
385
+ }
386
+}
387
+
388
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
389
tcg_target_long imm)
390
{
391
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
392
393
CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
394
CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
395
- CASE_32_64(ext16u) /* Optional (TCG_TARGET_HAS_ext16u_*). */
396
CASE_64(ext32s) /* Optional (TCG_TARGET_HAS_ext32s_i64). */
397
CASE_64(ext32u) /* Optional (TCG_TARGET_HAS_ext32u_i64). */
398
CASE_64(ext_i32)
399
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
400
case INDEX_op_ext8u_i64:
401
case INDEX_op_ext16s_i32:
402
case INDEX_op_ext16s_i64:
403
+ case INDEX_op_ext16u_i32:
404
+ case INDEX_op_ext16u_i64:
405
default:
406
g_assert_not_reached();
407
}
408
--
409
2.34.1
410
411
diff view generated by jsdifflib
Deleted patch
1
We will need a backend interface for performing 32-bit sign-extend.
2
Use it in tcg_reg_alloc_op in the meantime.
3
1
4
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
5
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
6
---
7
tcg/tcg.c | 4 ++++
8
tcg/aarch64/tcg-target.c.inc | 9 +++++++--
9
tcg/arm/tcg-target.c.inc | 5 +++++
10
tcg/i386/tcg-target.c.inc | 5 +++--
11
tcg/loongarch64/tcg-target.c.inc | 2 +-
12
tcg/mips/tcg-target.c.inc | 12 +++++++++---
13
tcg/ppc/tcg-target.c.inc | 5 +++--
14
tcg/riscv/tcg-target.c.inc | 2 +-
15
tcg/s390x/tcg-target.c.inc | 10 +++++-----
16
tcg/sparc64/tcg-target.c.inc | 11 ++++++++---
17
tcg/tci/tcg-target.c.inc | 9 ++++++++-
18
11 files changed, 54 insertions(+), 20 deletions(-)
19
20
diff --git a/tcg/tcg.c b/tcg/tcg.c
21
index XXXXXXX..XXXXXXX 100644
22
--- a/tcg/tcg.c
23
+++ b/tcg/tcg.c
24
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
25
static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
26
static void tcg_out_ext8u(TCGContext *s, TCGReg ret, TCGReg arg);
27
static void tcg_out_ext16u(TCGContext *s, TCGReg ret, TCGReg arg);
28
+static void tcg_out_ext32s(TCGContext *s, TCGReg ret, TCGReg arg);
29
static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
30
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
31
static void tcg_out_goto_tb(TCGContext *s, int which);
32
@@ -XXX,XX +XXX,XX @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
33
case INDEX_op_ext16u_i64:
34
tcg_out_ext16u(s, new_args[0], new_args[1]);
35
break;
36
+ case INDEX_op_ext32s_i64:
37
+ tcg_out_ext32s(s, new_args[0], new_args[1]);
38
+ break;
39
default:
40
if (def->flags & TCG_OPF_VECTOR) {
41
tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
42
diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc
43
index XXXXXXX..XXXXXXX 100644
44
--- a/tcg/aarch64/tcg-target.c.inc
45
+++ b/tcg/aarch64/tcg-target.c.inc
46
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg rd, TCGReg rn)
47
tcg_out_sxt(s, type, MO_16, rd, rn);
48
}
49
50
+static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rn)
51
+{
52
+ tcg_out_sxt(s, TCG_TYPE_I64, MO_32, rd, rn);
53
+}
54
+
55
static inline void tcg_out_uxt(TCGContext *s, MemOp s_bits,
56
TCGReg rd, TCGReg rn)
57
{
58
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
59
case INDEX_op_bswap32_i64:
60
tcg_out_rev(s, TCG_TYPE_I32, MO_32, a0, a1);
61
if (a2 & TCG_BSWAP_OS) {
62
- tcg_out_sxt(s, TCG_TYPE_I64, MO_32, a0, a0);
63
+ tcg_out_ext32s(s, a0, a0);
64
}
65
break;
66
case INDEX_op_bswap32_i32:
67
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
68
break;
69
70
case INDEX_op_ext_i32_i64:
71
- case INDEX_op_ext32s_i64:
72
tcg_out_sxt(s, TCG_TYPE_I64, MO_32, a0, a1);
73
break;
74
case INDEX_op_extu_i32_i64:
75
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
76
case INDEX_op_ext16s_i32:
77
case INDEX_op_ext16u_i64:
78
case INDEX_op_ext16u_i32:
79
+ case INDEX_op_ext32s_i64:
80
default:
81
g_assert_not_reached();
82
}
83
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
84
index XXXXXXX..XXXXXXX 100644
85
--- a/tcg/arm/tcg-target.c.inc
86
+++ b/tcg/arm/tcg-target.c.inc
87
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rn)
88
tcg_out_ext16u_cond(s, COND_AL, rd, rn);
89
}
90
91
+static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rn)
92
+{
93
+ g_assert_not_reached();
94
+}
95
+
96
static void tcg_out_bswap16(TCGContext *s, ARMCond cond,
97
TCGReg rd, TCGReg rn, int flags)
98
{
99
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
100
index XXXXXXX..XXXXXXX 100644
101
--- a/tcg/i386/tcg-target.c.inc
102
+++ b/tcg/i386/tcg-target.c.inc
103
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_ext32u(TCGContext *s, int dest, int src)
104
tcg_out_modrm(s, OPC_MOVL_GvEv, dest, src);
105
}
106
107
-static inline void tcg_out_ext32s(TCGContext *s, int dest, int src)
108
+static void tcg_out_ext32s(TCGContext *s, TCGReg dest, TCGReg src)
109
{
110
+ tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
111
tcg_out_modrm(s, OPC_MOVSLQ, dest, src);
112
}
113
114
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
115
tcg_out_ext32u(s, a0, a1);
116
break;
117
case INDEX_op_ext_i32_i64:
118
- case INDEX_op_ext32s_i64:
119
tcg_out_ext32s(s, a0, a1);
120
break;
121
case INDEX_op_extrh_i64_i32:
122
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
123
case INDEX_op_ext16s_i64:
124
case INDEX_op_ext16u_i32:
125
case INDEX_op_ext16u_i64:
126
+ case INDEX_op_ext32s_i64:
127
default:
128
g_assert_not_reached();
129
}
130
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
131
index XXXXXXX..XXXXXXX 100644
132
--- a/tcg/loongarch64/tcg-target.c.inc
133
+++ b/tcg/loongarch64/tcg-target.c.inc
134
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
135
tcg_out_ext32u(s, a0, a1);
136
break;
137
138
- case INDEX_op_ext32s_i64:
139
case INDEX_op_extrl_i64_i32:
140
case INDEX_op_ext_i32_i64:
141
tcg_out_ext32s(s, a0, a1);
142
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
143
case INDEX_op_ext16s_i64:
144
case INDEX_op_ext16u_i32:
145
case INDEX_op_ext16u_i64:
146
+ case INDEX_op_ext32s_i64:
147
default:
148
g_assert_not_reached();
149
}
150
diff --git a/tcg/mips/tcg-target.c.inc b/tcg/mips/tcg-target.c.inc
151
index XXXXXXX..XXXXXXX 100644
152
--- a/tcg/mips/tcg-target.c.inc
153
+++ b/tcg/mips/tcg-target.c.inc
154
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
155
tcg_out_opc_imm(s, OPC_ANDI, rd, rs, 0xffff);
156
}
157
158
+static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
159
+{
160
+ tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
161
+ tcg_out_opc_sa(s, OPC_SLL, rd, rs, 0);
162
+}
163
+
164
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
165
tcg_target_long imm)
166
{
167
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
168
/* delay slot */
169
if (TCG_TARGET_REG_BITS == 64 && l->type == TCG_TYPE_I32) {
170
/* we always sign-extend 32-bit loads */
171
- tcg_out_opc_sa(s, OPC_SLL, v0, TCG_REG_V0, 0);
172
+ tcg_out_ext32s(s, v0, TCG_REG_V0);
173
} else {
174
tcg_out_opc_reg(s, OPC_OR, v0, TCG_REG_V0, TCG_REG_ZERO);
175
}
176
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
177
case INDEX_op_extrh_i64_i32:
178
tcg_out_dsra(s, a0, a1, 32);
179
break;
180
- case INDEX_op_ext32s_i64:
181
case INDEX_op_ext_i32_i64:
182
case INDEX_op_extrl_i64_i32:
183
- tcg_out_opc_sa(s, OPC_SLL, a0, a1, 0);
184
+ tcg_out_ext32s(s, a0, a1);
185
break;
186
case INDEX_op_ext32u_i64:
187
case INDEX_op_extu_i32_i64:
188
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
189
case INDEX_op_ext8u_i64:
190
case INDEX_op_ext16s_i32:
191
case INDEX_op_ext16s_i64:
192
+ case INDEX_op_ext32s_i64:
193
default:
194
g_assert_not_reached();
195
}
196
diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc
197
index XXXXXXX..XXXXXXX 100644
198
--- a/tcg/ppc/tcg-target.c.inc
199
+++ b/tcg/ppc/tcg-target.c.inc
200
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16u(TCGContext *s, TCGReg dst, TCGReg src)
201
tcg_out32(s, ANDI | SAI(src, dst, 0xffff));
202
}
203
204
-static inline void tcg_out_ext32s(TCGContext *s, TCGReg dst, TCGReg src)
205
+static void tcg_out_ext32s(TCGContext *s, TCGReg dst, TCGReg src)
206
{
207
+ tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
208
tcg_out32(s, EXTSW | RA(dst) | RS(src));
209
}
210
211
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
212
break;
213
214
case INDEX_op_ext_i32_i64:
215
- case INDEX_op_ext32s_i64:
216
tcg_out_ext32s(s, args[0], args[1]);
217
break;
218
case INDEX_op_extu_i32_i64:
219
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
220
case INDEX_op_ext16s_i64:
221
case INDEX_op_ext16u_i32:
222
case INDEX_op_ext16u_i64:
223
+ case INDEX_op_ext32s_i64:
224
default:
225
g_assert_not_reached();
226
}
227
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
228
index XXXXXXX..XXXXXXX 100644
229
--- a/tcg/riscv/tcg-target.c.inc
230
+++ b/tcg/riscv/tcg-target.c.inc
231
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
232
tcg_out_ext32u(s, a0, a1);
233
break;
234
235
- case INDEX_op_ext32s_i64:
236
case INDEX_op_extrl_i64_i32:
237
case INDEX_op_ext_i32_i64:
238
tcg_out_ext32s(s, a0, a1);
239
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
240
case INDEX_op_ext16s_i64:
241
case INDEX_op_ext16u_i32:
242
case INDEX_op_ext16u_i64:
243
+ case INDEX_op_ext32s_i64:
244
default:
245
g_assert_not_reached();
246
}
247
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
248
index XXXXXXX..XXXXXXX 100644
249
--- a/tcg/s390x/tcg-target.c.inc
250
+++ b/tcg/s390x/tcg-target.c.inc
251
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16u(TCGContext *s, TCGReg dest, TCGReg src)
252
tcg_out_insn(s, RRE, LLGHR, dest, src);
253
}
254
255
-static inline void tgen_ext32s(TCGContext *s, TCGReg dest, TCGReg src)
256
+static void tcg_out_ext32s(TCGContext *s, TCGReg dest, TCGReg src)
257
{
258
tcg_out_insn(s, RRE, LGFR, dest, src);
259
}
260
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld_direct(TCGContext *s, MemOp opc, TCGReg data,
261
case MO_SL | MO_BSWAP:
262
/* swapped sign-extended int load */
263
tcg_out_insn(s, RXY, LRV, data, base, index, disp);
264
- tgen_ext32s(s, data, data);
265
+ tcg_out_ext32s(s, data, data);
266
break;
267
case MO_SL:
268
tcg_out_insn(s, RXY, LGF, data, base, index, disp);
269
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
270
a0 = args[0], a1 = args[1], a2 = args[2];
271
tcg_out_insn(s, RRE, LRVR, a0, a1);
272
if (a2 & TCG_BSWAP_OS) {
273
- tgen_ext32s(s, a0, a0);
274
+ tcg_out_ext32s(s, a0, a0);
275
} else if ((a2 & (TCG_BSWAP_IZ | TCG_BSWAP_OZ)) == TCG_BSWAP_OZ) {
276
tgen_ext32u(s, a0, a0);
277
}
278
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
279
break;
280
281
case INDEX_op_ext_i32_i64:
282
- case INDEX_op_ext32s_i64:
283
- tgen_ext32s(s, args[0], args[1]);
284
+ tcg_out_ext32s(s, args[0], args[1]);
285
break;
286
case INDEX_op_extu_i32_i64:
287
case INDEX_op_ext32u_i64:
288
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
289
case INDEX_op_ext16s_i64:
290
case INDEX_op_ext16u_i32:
291
case INDEX_op_ext16u_i64:
292
+ case INDEX_op_ext32s_i64:
293
default:
294
g_assert_not_reached();
295
}
296
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
297
index XXXXXXX..XXXXXXX 100644
298
--- a/tcg/sparc64/tcg-target.c.inc
299
+++ b/tcg/sparc64/tcg-target.c.inc
300
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
301
tcg_out_arithi(s, rd, rd, 16, SHIFT_SRL);
302
}
303
304
+static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
305
+{
306
+ tcg_out_arithi(s, rd, rs, 0, SHIFT_SRA);
307
+}
308
+
309
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
310
tcg_target_long imm)
311
{
312
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld(TCGContext *s, TCGReg data, TCGReg addr,
313
314
/* We let the helper sign-extend SB and SW, but leave SL for here. */
315
if (is_64 && (memop & MO_SSIZE) == MO_SL) {
316
- tcg_out_arithi(s, data, TCG_REG_O0, 0, SHIFT_SRA);
317
+ tcg_out_ext32s(s, data, TCG_REG_O0);
318
} else {
319
tcg_out_mov(s, TCG_TYPE_REG, data, TCG_REG_O0);
320
}
321
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
322
c = ARITH_UDIVX;
323
goto gen_arith;
324
case INDEX_op_ext_i32_i64:
325
- case INDEX_op_ext32s_i64:
326
- tcg_out_arithi(s, a0, a1, 0, SHIFT_SRA);
327
+ tcg_out_ext32s(s, a0, a1);
328
break;
329
case INDEX_op_extu_i32_i64:
330
case INDEX_op_ext32u_i64:
331
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
332
case INDEX_op_ext16s_i64:
333
case INDEX_op_ext16u_i32:
334
case INDEX_op_ext16u_i64:
335
+ case INDEX_op_ext32s_i64:
336
default:
337
g_assert_not_reached();
338
}
339
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
340
index XXXXXXX..XXXXXXX 100644
341
--- a/tcg/tci/tcg-target.c.inc
342
+++ b/tcg/tci/tcg-target.c.inc
343
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rs)
344
}
345
}
346
347
+static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
348
+{
349
+ tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
350
+ tcg_debug_assert(TCG_TARGET_HAS_ext32s_i64);
351
+ tcg_out_op_rr(s, INDEX_op_ext32s_i64, rd, rs);
352
+}
353
+
354
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
355
tcg_target_long imm)
356
{
357
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
358
359
CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
360
CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
361
- CASE_64(ext32s) /* Optional (TCG_TARGET_HAS_ext32s_i64). */
362
CASE_64(ext32u) /* Optional (TCG_TARGET_HAS_ext32u_i64). */
363
CASE_64(ext_i32)
364
CASE_64(extu_i32)
365
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
366
case INDEX_op_ext16s_i64:
367
case INDEX_op_ext16u_i32:
368
case INDEX_op_ext16u_i64:
369
+ case INDEX_op_ext32s_i64:
370
default:
371
g_assert_not_reached();
372
}
373
--
374
2.34.1
375
376
diff view generated by jsdifflib
Deleted patch
1
We will need a backend interface for performing 32-bit zero-extend.
2
Use it in tcg_reg_alloc_op in the meantime.
3
1
4
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
5
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
6
---
7
tcg/tcg.c | 4 ++++
8
tcg/aarch64/tcg-target.c.inc | 9 +++++++--
9
tcg/arm/tcg-target.c.inc | 5 +++++
10
tcg/i386/tcg-target.c.inc | 4 ++--
11
tcg/loongarch64/tcg-target.c.inc | 2 +-
12
tcg/mips/tcg-target.c.inc | 3 ++-
13
tcg/ppc/tcg-target.c.inc | 4 +++-
14
tcg/riscv/tcg-target.c.inc | 2 +-
15
tcg/s390x/tcg-target.c.inc | 20 ++++++++++----------
16
tcg/sparc64/tcg-target.c.inc | 17 +++++++++++------
17
tcg/tci/tcg-target.c.inc | 9 ++++++++-
18
11 files changed, 54 insertions(+), 25 deletions(-)
19
20
diff --git a/tcg/tcg.c b/tcg/tcg.c
21
index XXXXXXX..XXXXXXX 100644
22
--- a/tcg/tcg.c
23
+++ b/tcg/tcg.c
24
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg ret, TCGReg arg);
25
static void tcg_out_ext8u(TCGContext *s, TCGReg ret, TCGReg arg);
26
static void tcg_out_ext16u(TCGContext *s, TCGReg ret, TCGReg arg);
27
static void tcg_out_ext32s(TCGContext *s, TCGReg ret, TCGReg arg);
28
+static void tcg_out_ext32u(TCGContext *s, TCGReg ret, TCGReg arg);
29
static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
30
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
31
static void tcg_out_goto_tb(TCGContext *s, int which);
32
@@ -XXX,XX +XXX,XX @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
33
case INDEX_op_ext32s_i64:
34
tcg_out_ext32s(s, new_args[0], new_args[1]);
35
break;
36
+ case INDEX_op_ext32u_i64:
37
+ tcg_out_ext32u(s, new_args[0], new_args[1]);
38
+ break;
39
default:
40
if (def->flags & TCG_OPF_VECTOR) {
41
tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
42
diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc
43
index XXXXXXX..XXXXXXX 100644
44
--- a/tcg/aarch64/tcg-target.c.inc
45
+++ b/tcg/aarch64/tcg-target.c.inc
46
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16u(TCGContext *s, TCGReg rd, TCGReg rn)
47
tcg_out_uxt(s, MO_16, rd, rn);
48
}
49
50
+static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rn)
51
+{
52
+ tcg_out_movr(s, TCG_TYPE_I32, rd, rn);
53
+}
54
+
55
static void tcg_out_addsubi(TCGContext *s, int ext, TCGReg rd,
56
TCGReg rn, int64_t aimm)
57
{
58
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
59
tcg_out_sxt(s, TCG_TYPE_I64, MO_32, a0, a1);
60
break;
61
case INDEX_op_extu_i32_i64:
62
- case INDEX_op_ext32u_i64:
63
- tcg_out_movr(s, TCG_TYPE_I32, a0, a1);
64
+ tcg_out_ext32u(s, a0, a1);
65
break;
66
67
case INDEX_op_deposit_i64:
68
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
69
case INDEX_op_ext16u_i64:
70
case INDEX_op_ext16u_i32:
71
case INDEX_op_ext32s_i64:
72
+ case INDEX_op_ext32u_i64:
73
default:
74
g_assert_not_reached();
75
}
76
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
77
index XXXXXXX..XXXXXXX 100644
78
--- a/tcg/arm/tcg-target.c.inc
79
+++ b/tcg/arm/tcg-target.c.inc
80
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rn)
81
g_assert_not_reached();
82
}
83
84
+static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rn)
85
+{
86
+ g_assert_not_reached();
87
+}
88
+
89
static void tcg_out_bswap16(TCGContext *s, ARMCond cond,
90
TCGReg rd, TCGReg rn, int flags)
91
{
92
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
93
index XXXXXXX..XXXXXXX 100644
94
--- a/tcg/i386/tcg-target.c.inc
95
+++ b/tcg/i386/tcg-target.c.inc
96
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16s(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
97
tcg_out_modrm(s, OPC_MOVSWL + rexw, dest, src);
98
}
99
100
-static inline void tcg_out_ext32u(TCGContext *s, int dest, int src)
101
+static void tcg_out_ext32u(TCGContext *s, TCGReg dest, TCGReg src)
102
{
103
/* 32-bit mov zero extends. */
104
tcg_out_modrm(s, OPC_MOVL_GvEv, dest, src);
105
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
106
tcg_out_bswap64(s, a0);
107
break;
108
case INDEX_op_extu_i32_i64:
109
- case INDEX_op_ext32u_i64:
110
case INDEX_op_extrl_i64_i32:
111
tcg_out_ext32u(s, a0, a1);
112
break;
113
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
114
case INDEX_op_ext16u_i32:
115
case INDEX_op_ext16u_i64:
116
case INDEX_op_ext32s_i64:
117
+ case INDEX_op_ext32u_i64:
118
default:
119
g_assert_not_reached();
120
}
121
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
122
index XXXXXXX..XXXXXXX 100644
123
--- a/tcg/loongarch64/tcg-target.c.inc
124
+++ b/tcg/loongarch64/tcg-target.c.inc
125
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
126
tcg_out_brcond(s, a2, a0, a1, arg_label(args[3]));
127
break;
128
129
- case INDEX_op_ext32u_i64:
130
case INDEX_op_extu_i32_i64:
131
tcg_out_ext32u(s, a0, a1);
132
break;
133
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
134
case INDEX_op_ext16u_i32:
135
case INDEX_op_ext16u_i64:
136
case INDEX_op_ext32s_i64:
137
+ case INDEX_op_ext32u_i64:
138
default:
139
g_assert_not_reached();
140
}
141
diff --git a/tcg/mips/tcg-target.c.inc b/tcg/mips/tcg-target.c.inc
142
index XXXXXXX..XXXXXXX 100644
143
--- a/tcg/mips/tcg-target.c.inc
144
+++ b/tcg/mips/tcg-target.c.inc
145
@@ -XXX,XX +XXX,XX @@ static void tcg_out_bswap64(TCGContext *s, TCGReg ret, TCGReg arg)
146
147
static void tcg_out_ext32u(TCGContext *s, TCGReg ret, TCGReg arg)
148
{
149
+ tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
150
if (use_mips32r2_instructions) {
151
tcg_out_opc_bf(s, OPC_DEXT, ret, arg, 31, 0);
152
} else {
153
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
154
case INDEX_op_extrl_i64_i32:
155
tcg_out_ext32s(s, a0, a1);
156
break;
157
- case INDEX_op_ext32u_i64:
158
case INDEX_op_extu_i32_i64:
159
tcg_out_ext32u(s, a0, a1);
160
break;
161
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
162
case INDEX_op_ext16s_i32:
163
case INDEX_op_ext16s_i64:
164
case INDEX_op_ext32s_i64:
165
+ case INDEX_op_ext32u_i64:
166
default:
167
g_assert_not_reached();
168
}
169
diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc
170
index XXXXXXX..XXXXXXX 100644
171
--- a/tcg/ppc/tcg-target.c.inc
172
+++ b/tcg/ppc/tcg-target.c.inc
173
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32s(TCGContext *s, TCGReg dst, TCGReg src)
174
tcg_out32(s, EXTSW | RA(dst) | RS(src));
175
}
176
177
-static inline void tcg_out_ext32u(TCGContext *s, TCGReg dst, TCGReg src)
178
+static void tcg_out_ext32u(TCGContext *s, TCGReg dst, TCGReg src)
179
{
180
+ tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
181
tcg_out_rld(s, RLDICL, dst, src, 0, 32);
182
}
183
184
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
185
case INDEX_op_ext16u_i32:
186
case INDEX_op_ext16u_i64:
187
case INDEX_op_ext32s_i64:
188
+ case INDEX_op_ext32u_i64:
189
default:
190
g_assert_not_reached();
191
}
192
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
193
index XXXXXXX..XXXXXXX 100644
194
--- a/tcg/riscv/tcg-target.c.inc
195
+++ b/tcg/riscv/tcg-target.c.inc
196
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
197
tcg_out_qemu_st(s, args, true);
198
break;
199
200
- case INDEX_op_ext32u_i64:
201
case INDEX_op_extu_i32_i64:
202
tcg_out_ext32u(s, a0, a1);
203
break;
204
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
205
case INDEX_op_ext16u_i32:
206
case INDEX_op_ext16u_i64:
207
case INDEX_op_ext32s_i64:
208
+ case INDEX_op_ext32u_i64:
209
default:
210
g_assert_not_reached();
211
}
212
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
213
index XXXXXXX..XXXXXXX 100644
214
--- a/tcg/s390x/tcg-target.c.inc
215
+++ b/tcg/s390x/tcg-target.c.inc
216
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32s(TCGContext *s, TCGReg dest, TCGReg src)
217
tcg_out_insn(s, RRE, LGFR, dest, src);
218
}
219
220
-static inline void tgen_ext32u(TCGContext *s, TCGReg dest, TCGReg src)
221
+static void tcg_out_ext32u(TCGContext *s, TCGReg dest, TCGReg src)
222
{
223
tcg_out_insn(s, RRE, LLGFR, dest, src);
224
}
225
@@ -XXX,XX +XXX,XX @@ static void tgen_andi(TCGContext *s, TCGType type, TCGReg dest, uint64_t val)
226
227
/* Look for the zero-extensions. */
228
if ((val & valid) == 0xffffffff) {
229
- tgen_ext32u(s, dest, dest);
230
+ tcg_out_ext32u(s, dest, dest);
231
return;
232
}
233
if ((val & valid) == 0xff) {
234
@@ -XXX,XX +XXX,XX @@ static void tgen_ctpop(TCGContext *s, TCGType type, TCGReg dest, TCGReg src)
235
/* With MIE3, and bit 0 of m4 set, we get the complete result. */
236
if (HAVE_FACILITY(MISC_INSN_EXT3)) {
237
if (type == TCG_TYPE_I32) {
238
- tgen_ext32u(s, dest, src);
239
+ tcg_out_ext32u(s, dest, src);
240
src = dest;
241
}
242
tcg_out_insn(s, RRFc, POPCNT, dest, src, 8);
243
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld_direct(TCGContext *s, MemOp opc, TCGReg data,
244
case MO_UL | MO_BSWAP:
245
/* swapped unsigned int load with upper bits zeroed */
246
tcg_out_insn(s, RXY, LRV, data, base, index, disp);
247
- tgen_ext32u(s, data, data);
248
+ tcg_out_ext32u(s, data, data);
249
break;
250
case MO_UL:
251
tcg_out_insn(s, RXY, LLGF, data, base, index, disp);
252
@@ -XXX,XX +XXX,XX @@ static TCGReg tcg_out_tlb_read(TCGContext *s, TCGReg addr_reg, MemOp opc,
253
offsetof(CPUTLBEntry, addend));
254
255
if (TARGET_LONG_BITS == 32) {
256
- tgen_ext32u(s, TCG_REG_R3, addr_reg);
257
+ tcg_out_ext32u(s, TCG_REG_R3, addr_reg);
258
return TCG_REG_R3;
259
}
260
return addr_reg;
261
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
262
tcg_out_ext16u(s, TCG_REG_R4, data_reg);
263
break;
264
case MO_UL:
265
- tgen_ext32u(s, TCG_REG_R4, data_reg);
266
+ tcg_out_ext32u(s, TCG_REG_R4, data_reg);
267
break;
268
case MO_UQ:
269
tcg_out_mov(s, TCG_TYPE_I64, TCG_REG_R4, data_reg);
270
@@ -XXX,XX +XXX,XX @@ static void tcg_prepare_user_ldst(TCGContext *s, TCGReg *addr_reg,
271
TCGReg *index_reg, tcg_target_long *disp)
272
{
273
if (TARGET_LONG_BITS == 32) {
274
- tgen_ext32u(s, TCG_TMP0, *addr_reg);
275
+ tcg_out_ext32u(s, TCG_TMP0, *addr_reg);
276
*addr_reg = TCG_TMP0;
277
}
278
if (guest_base < 0x80000) {
279
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
280
if (a2 & TCG_BSWAP_OS) {
281
tcg_out_ext32s(s, a0, a0);
282
} else if ((a2 & (TCG_BSWAP_IZ | TCG_BSWAP_OZ)) == TCG_BSWAP_OZ) {
283
- tgen_ext32u(s, a0, a0);
284
+ tcg_out_ext32u(s, a0, a0);
285
}
286
break;
287
288
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
289
tcg_out_ext32s(s, args[0], args[1]);
290
break;
291
case INDEX_op_extu_i32_i64:
292
- case INDEX_op_ext32u_i64:
293
- tgen_ext32u(s, args[0], args[1]);
294
+ tcg_out_ext32u(s, args[0], args[1]);
295
break;
296
297
case INDEX_op_add2_i64:
298
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
299
case INDEX_op_ext16u_i32:
300
case INDEX_op_ext16u_i64:
301
case INDEX_op_ext32s_i64:
302
+ case INDEX_op_ext32u_i64:
303
default:
304
g_assert_not_reached();
305
}
306
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
307
index XXXXXXX..XXXXXXX 100644
308
--- a/tcg/sparc64/tcg-target.c.inc
309
+++ b/tcg/sparc64/tcg-target.c.inc
310
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
311
tcg_out_arithi(s, rd, rs, 0, SHIFT_SRA);
312
}
313
314
+static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
315
+{
316
+ tcg_out_arithi(s, rd, rs, 0, SHIFT_SRL);
317
+}
318
+
319
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
320
tcg_target_long imm)
321
{
322
@@ -XXX,XX +XXX,XX @@ static void emit_extend(TCGContext *s, TCGReg r, int op)
323
tcg_out_ext16u(s, r, r);
324
break;
325
case MO_32:
326
- tcg_out_arith(s, r, r, 0, SHIFT_SRL);
327
+ tcg_out_ext32u(s, r, r);
328
break;
329
case MO_64:
330
break;
331
@@ -XXX,XX +XXX,XX @@ static TCGReg tcg_out_tlb_load(TCGContext *s, TCGReg addr, int mem_index,
332
333
/* If the guest address must be zero-extended, do so now. */
334
if (TARGET_LONG_BITS == 32) {
335
- tcg_out_arithi(s, r0, addr, 0, SHIFT_SRL);
336
+ tcg_out_ext32u(s, r0, addr);
337
return r0;
338
}
339
return addr;
340
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld(TCGContext *s, TCGReg data, TCGReg addr,
341
unsigned t_bits;
342
343
if (TARGET_LONG_BITS == 32) {
344
- tcg_out_arithi(s, TCG_REG_T1, addr, 0, SHIFT_SRL);
345
+ tcg_out_ext32u(s, TCG_REG_T1, addr);
346
addr = TCG_REG_T1;
347
}
348
349
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_st(TCGContext *s, TCGReg data, TCGReg addr,
350
unsigned t_bits;
351
352
if (TARGET_LONG_BITS == 32) {
353
- tcg_out_arithi(s, TCG_REG_T1, addr, 0, SHIFT_SRL);
354
+ tcg_out_ext32u(s, TCG_REG_T1, addr);
355
addr = TCG_REG_T1;
356
}
357
358
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
359
tcg_out_ext32s(s, a0, a1);
360
break;
361
case INDEX_op_extu_i32_i64:
362
- case INDEX_op_ext32u_i64:
363
- tcg_out_arithi(s, a0, a1, 0, SHIFT_SRL);
364
+ tcg_out_ext32u(s, a0, a1);
365
break;
366
case INDEX_op_extrl_i64_i32:
367
tcg_out_mov(s, TCG_TYPE_I32, a0, a1);
368
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
369
case INDEX_op_ext16u_i32:
370
case INDEX_op_ext16u_i64:
371
case INDEX_op_ext32s_i64:
372
+ case INDEX_op_ext32u_i64:
373
default:
374
g_assert_not_reached();
375
}
376
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
377
index XXXXXXX..XXXXXXX 100644
378
--- a/tcg/tci/tcg-target.c.inc
379
+++ b/tcg/tci/tcg-target.c.inc
380
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
381
tcg_out_op_rr(s, INDEX_op_ext32s_i64, rd, rs);
382
}
383
384
+static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
385
+{
386
+ tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
387
+ tcg_debug_assert(TCG_TARGET_HAS_ext32u_i64);
388
+ tcg_out_op_rr(s, INDEX_op_ext32u_i64, rd, rs);
389
+}
390
+
391
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
392
tcg_target_long imm)
393
{
394
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
395
396
CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
397
CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
398
- CASE_64(ext32u) /* Optional (TCG_TARGET_HAS_ext32u_i64). */
399
CASE_64(ext_i32)
400
CASE_64(extu_i32)
401
CASE_32_64(ctpop) /* Optional (TCG_TARGET_HAS_ctpop_*). */
402
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
403
case INDEX_op_ext16u_i32:
404
case INDEX_op_ext16u_i64:
405
case INDEX_op_ext32s_i64:
406
+ case INDEX_op_ext32u_i64:
407
default:
408
g_assert_not_reached();
409
}
410
--
411
2.34.1
412
413
diff view generated by jsdifflib
Deleted patch
1
We will need a backend interface for type extension with sign.
2
Use it in tcg_reg_alloc_op in the meantime.
3
1
4
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
5
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
6
---
7
tcg/tcg.c | 4 ++++
8
tcg/aarch64/tcg-target.c.inc | 9 ++++++---
9
tcg/arm/tcg-target.c.inc | 5 +++++
10
tcg/i386/tcg-target.c.inc | 9 ++++++---
11
tcg/loongarch64/tcg-target.c.inc | 7 ++++++-
12
tcg/mips/tcg-target.c.inc | 7 ++++++-
13
tcg/ppc/tcg-target.c.inc | 9 ++++++---
14
tcg/riscv/tcg-target.c.inc | 7 ++++++-
15
tcg/s390x/tcg-target.c.inc | 9 ++++++---
16
tcg/sparc64/tcg-target.c.inc | 9 ++++++---
17
tcg/tci/tcg-target.c.inc | 7 ++++++-
18
11 files changed, 63 insertions(+), 19 deletions(-)
19
20
diff --git a/tcg/tcg.c b/tcg/tcg.c
21
index XXXXXXX..XXXXXXX 100644
22
--- a/tcg/tcg.c
23
+++ b/tcg/tcg.c
24
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext8u(TCGContext *s, TCGReg ret, TCGReg arg);
25
static void tcg_out_ext16u(TCGContext *s, TCGReg ret, TCGReg arg);
26
static void tcg_out_ext32s(TCGContext *s, TCGReg ret, TCGReg arg);
27
static void tcg_out_ext32u(TCGContext *s, TCGReg ret, TCGReg arg);
28
+static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg);
29
static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
30
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
31
static void tcg_out_goto_tb(TCGContext *s, int which);
32
@@ -XXX,XX +XXX,XX @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
33
case INDEX_op_ext32u_i64:
34
tcg_out_ext32u(s, new_args[0], new_args[1]);
35
break;
36
+ case INDEX_op_ext_i32_i64:
37
+ tcg_out_exts_i32_i64(s, new_args[0], new_args[1]);
38
+ break;
39
default:
40
if (def->flags & TCG_OPF_VECTOR) {
41
tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
42
diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc
43
index XXXXXXX..XXXXXXX 100644
44
--- a/tcg/aarch64/tcg-target.c.inc
45
+++ b/tcg/aarch64/tcg-target.c.inc
46
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rn)
47
tcg_out_sxt(s, TCG_TYPE_I64, MO_32, rd, rn);
48
}
49
50
+static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rn)
51
+{
52
+ tcg_out_ext32s(s, rd, rn);
53
+}
54
+
55
static inline void tcg_out_uxt(TCGContext *s, MemOp s_bits,
56
TCGReg rd, TCGReg rn)
57
{
58
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
59
}
60
break;
61
62
- case INDEX_op_ext_i32_i64:
63
- tcg_out_sxt(s, TCG_TYPE_I64, MO_32, a0, a1);
64
- break;
65
case INDEX_op_extu_i32_i64:
66
tcg_out_ext32u(s, a0, a1);
67
break;
68
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
69
case INDEX_op_ext16u_i32:
70
case INDEX_op_ext32s_i64:
71
case INDEX_op_ext32u_i64:
72
+ case INDEX_op_ext_i32_i64:
73
default:
74
g_assert_not_reached();
75
}
76
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
77
index XXXXXXX..XXXXXXX 100644
78
--- a/tcg/arm/tcg-target.c.inc
79
+++ b/tcg/arm/tcg-target.c.inc
80
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rn)
81
g_assert_not_reached();
82
}
83
84
+static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rn)
85
+{
86
+ g_assert_not_reached();
87
+}
88
+
89
static void tcg_out_bswap16(TCGContext *s, ARMCond cond,
90
TCGReg rd, TCGReg rn, int flags)
91
{
92
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
93
index XXXXXXX..XXXXXXX 100644
94
--- a/tcg/i386/tcg-target.c.inc
95
+++ b/tcg/i386/tcg-target.c.inc
96
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32s(TCGContext *s, TCGReg dest, TCGReg src)
97
tcg_out_modrm(s, OPC_MOVSLQ, dest, src);
98
}
99
100
+static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg dest, TCGReg src)
101
+{
102
+ tcg_out_ext32s(s, dest, src);
103
+}
104
+
105
static inline void tcg_out_bswap64(TCGContext *s, int reg)
106
{
107
tcg_out_opc(s, OPC_BSWAP + P_REXW + LOWREGMASK(reg), 0, reg, 0);
108
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
109
case INDEX_op_extrl_i64_i32:
110
tcg_out_ext32u(s, a0, a1);
111
break;
112
- case INDEX_op_ext_i32_i64:
113
- tcg_out_ext32s(s, a0, a1);
114
- break;
115
case INDEX_op_extrh_i64_i32:
116
tcg_out_shifti(s, SHIFT_SHR + P_REXW, a0, 32);
117
break;
118
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
119
case INDEX_op_ext16u_i64:
120
case INDEX_op_ext32s_i64:
121
case INDEX_op_ext32u_i64:
122
+ case INDEX_op_ext_i32_i64:
123
default:
124
g_assert_not_reached();
125
}
126
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
127
index XXXXXXX..XXXXXXX 100644
128
--- a/tcg/loongarch64/tcg-target.c.inc
129
+++ b/tcg/loongarch64/tcg-target.c.inc
130
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32s(TCGContext *s, TCGReg ret, TCGReg arg)
131
tcg_out_opc_addi_w(s, ret, arg, 0);
132
}
133
134
+static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg)
135
+{
136
+ tcg_out_ext32s(s, ret, arg);
137
+}
138
+
139
static void tcg_out_clzctz(TCGContext *s, LoongArchInsn opc,
140
TCGReg a0, TCGReg a1, TCGReg a2,
141
bool c2, bool is_32bit)
142
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
143
break;
144
145
case INDEX_op_extrl_i64_i32:
146
- case INDEX_op_ext_i32_i64:
147
tcg_out_ext32s(s, a0, a1);
148
break;
149
150
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
151
case INDEX_op_ext16u_i64:
152
case INDEX_op_ext32s_i64:
153
case INDEX_op_ext32u_i64:
154
+ case INDEX_op_ext_i32_i64:
155
default:
156
g_assert_not_reached();
157
}
158
diff --git a/tcg/mips/tcg-target.c.inc b/tcg/mips/tcg-target.c.inc
159
index XXXXXXX..XXXXXXX 100644
160
--- a/tcg/mips/tcg-target.c.inc
161
+++ b/tcg/mips/tcg-target.c.inc
162
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32s(TCGContext *s, TCGReg rd, TCGReg rs)
163
tcg_out_opc_sa(s, OPC_SLL, rd, rs, 0);
164
}
165
166
+static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
167
+{
168
+ tcg_out_ext32s(s, rd, rs);
169
+}
170
+
171
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
172
tcg_target_long imm)
173
{
174
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
175
case INDEX_op_extrh_i64_i32:
176
tcg_out_dsra(s, a0, a1, 32);
177
break;
178
- case INDEX_op_ext_i32_i64:
179
case INDEX_op_extrl_i64_i32:
180
tcg_out_ext32s(s, a0, a1);
181
break;
182
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
183
case INDEX_op_ext16s_i64:
184
case INDEX_op_ext32s_i64:
185
case INDEX_op_ext32u_i64:
186
+ case INDEX_op_ext_i32_i64:
187
default:
188
g_assert_not_reached();
189
}
190
diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc
191
index XXXXXXX..XXXXXXX 100644
192
--- a/tcg/ppc/tcg-target.c.inc
193
+++ b/tcg/ppc/tcg-target.c.inc
194
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32u(TCGContext *s, TCGReg dst, TCGReg src)
195
tcg_out_rld(s, RLDICL, dst, src, 0, 32);
196
}
197
198
+static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg dst, TCGReg src)
199
+{
200
+ tcg_out_ext32s(s, dst, src);
201
+}
202
+
203
static inline void tcg_out_shli32(TCGContext *s, TCGReg dst, TCGReg src, int c)
204
{
205
tcg_out_rlw(s, RLWINM, dst, src, c, 0, 31 - c);
206
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
207
tcg_out_qemu_st(s, args, true);
208
break;
209
210
- case INDEX_op_ext_i32_i64:
211
- tcg_out_ext32s(s, args[0], args[1]);
212
- break;
213
case INDEX_op_extu_i32_i64:
214
tcg_out_ext32u(s, args[0], args[1]);
215
break;
216
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
217
case INDEX_op_ext16u_i64:
218
case INDEX_op_ext32s_i64:
219
case INDEX_op_ext32u_i64:
220
+ case INDEX_op_ext_i32_i64:
221
default:
222
g_assert_not_reached();
223
}
224
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
225
index XXXXXXX..XXXXXXX 100644
226
--- a/tcg/riscv/tcg-target.c.inc
227
+++ b/tcg/riscv/tcg-target.c.inc
228
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32s(TCGContext *s, TCGReg ret, TCGReg arg)
229
tcg_out_opc_imm(s, OPC_ADDIW, ret, arg, 0);
230
}
231
232
+static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg)
233
+{
234
+ tcg_out_ext32s(s, ret, arg);
235
+}
236
+
237
static void tcg_out_ldst(TCGContext *s, RISCVInsn opc, TCGReg data,
238
TCGReg addr, intptr_t offset)
239
{
240
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
241
break;
242
243
case INDEX_op_extrl_i64_i32:
244
- case INDEX_op_ext_i32_i64:
245
tcg_out_ext32s(s, a0, a1);
246
break;
247
248
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
249
case INDEX_op_ext16u_i64:
250
case INDEX_op_ext32s_i64:
251
case INDEX_op_ext32u_i64:
252
+ case INDEX_op_ext_i32_i64:
253
default:
254
g_assert_not_reached();
255
}
256
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
257
index XXXXXXX..XXXXXXX 100644
258
--- a/tcg/s390x/tcg-target.c.inc
259
+++ b/tcg/s390x/tcg-target.c.inc
260
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32u(TCGContext *s, TCGReg dest, TCGReg src)
261
tcg_out_insn(s, RRE, LLGFR, dest, src);
262
}
263
264
+static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg dest, TCGReg src)
265
+{
266
+ tcg_out_ext32s(s, dest, src);
267
+}
268
+
269
static void tgen_andi_risbg(TCGContext *s, TCGReg out, TCGReg in, uint64_t val)
270
{
271
int msb, lsb;
272
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
273
}
274
break;
275
276
- case INDEX_op_ext_i32_i64:
277
- tcg_out_ext32s(s, args[0], args[1]);
278
- break;
279
case INDEX_op_extu_i32_i64:
280
tcg_out_ext32u(s, args[0], args[1]);
281
break;
282
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
283
case INDEX_op_ext16u_i64:
284
case INDEX_op_ext32s_i64:
285
case INDEX_op_ext32u_i64:
286
+ case INDEX_op_ext_i32_i64:
287
default:
288
g_assert_not_reached();
289
}
290
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
291
index XXXXXXX..XXXXXXX 100644
292
--- a/tcg/sparc64/tcg-target.c.inc
293
+++ b/tcg/sparc64/tcg-target.c.inc
294
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
295
tcg_out_arithi(s, rd, rs, 0, SHIFT_SRL);
296
}
297
298
+static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
299
+{
300
+ tcg_out_ext32s(s, rd, rs);
301
+}
302
+
303
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
304
tcg_target_long imm)
305
{
306
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
307
case INDEX_op_divu_i64:
308
c = ARITH_UDIVX;
309
goto gen_arith;
310
- case INDEX_op_ext_i32_i64:
311
- tcg_out_ext32s(s, a0, a1);
312
- break;
313
case INDEX_op_extu_i32_i64:
314
tcg_out_ext32u(s, a0, a1);
315
break;
316
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
317
case INDEX_op_ext16u_i64:
318
case INDEX_op_ext32s_i64:
319
case INDEX_op_ext32u_i64:
320
+ case INDEX_op_ext_i32_i64:
321
default:
322
g_assert_not_reached();
323
}
324
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
325
index XXXXXXX..XXXXXXX 100644
326
--- a/tcg/tci/tcg-target.c.inc
327
+++ b/tcg/tci/tcg-target.c.inc
328
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rs)
329
tcg_out_op_rr(s, INDEX_op_ext32u_i64, rd, rs);
330
}
331
332
+static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
333
+{
334
+ tcg_out_ext32s(s, rd, rs);
335
+}
336
+
337
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
338
tcg_target_long imm)
339
{
340
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
341
342
CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
343
CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
344
- CASE_64(ext_i32)
345
CASE_64(extu_i32)
346
CASE_32_64(ctpop) /* Optional (TCG_TARGET_HAS_ctpop_*). */
347
case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
348
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
349
case INDEX_op_ext16u_i64:
350
case INDEX_op_ext32s_i64:
351
case INDEX_op_ext32u_i64:
352
+ case INDEX_op_ext_i32_i64:
353
default:
354
g_assert_not_reached();
355
}
356
--
357
2.34.1
358
359
diff view generated by jsdifflib
Deleted patch
1
We will need a backend interface for type extension with zero.
2
Use it in tcg_reg_alloc_op in the meantime.
3
1
4
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
5
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
6
---
7
tcg/tcg.c | 4 ++++
8
tcg/aarch64/tcg-target.c.inc | 10 ++++++----
9
tcg/arm/tcg-target.c.inc | 5 +++++
10
tcg/i386/tcg-target.c.inc | 7 ++++++-
11
tcg/loongarch64/tcg-target.c.inc | 10 ++++++----
12
tcg/mips/tcg-target.c.inc | 9 ++++++---
13
tcg/ppc/tcg-target.c.inc | 10 ++++++----
14
tcg/riscv/tcg-target.c.inc | 10 ++++++----
15
tcg/s390x/tcg-target.c.inc | 10 ++++++----
16
tcg/sparc64/tcg-target.c.inc | 9 ++++++---
17
tcg/tci/tcg-target.c.inc | 7 ++++++-
18
11 files changed, 63 insertions(+), 28 deletions(-)
19
20
diff --git a/tcg/tcg.c b/tcg/tcg.c
21
index XXXXXXX..XXXXXXX 100644
22
--- a/tcg/tcg.c
23
+++ b/tcg/tcg.c
24
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext16u(TCGContext *s, TCGReg ret, TCGReg arg);
25
static void tcg_out_ext32s(TCGContext *s, TCGReg ret, TCGReg arg);
26
static void tcg_out_ext32u(TCGContext *s, TCGReg ret, TCGReg arg);
27
static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg);
28
+static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg);
29
static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
30
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
31
static void tcg_out_goto_tb(TCGContext *s, int which);
32
@@ -XXX,XX +XXX,XX @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
33
case INDEX_op_ext_i32_i64:
34
tcg_out_exts_i32_i64(s, new_args[0], new_args[1]);
35
break;
36
+ case INDEX_op_extu_i32_i64:
37
+ tcg_out_extu_i32_i64(s, new_args[0], new_args[1]);
38
+ break;
39
default:
40
if (def->flags & TCG_OPF_VECTOR) {
41
tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
42
diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc
43
index XXXXXXX..XXXXXXX 100644
44
--- a/tcg/aarch64/tcg-target.c.inc
45
+++ b/tcg/aarch64/tcg-target.c.inc
46
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32u(TCGContext *s, TCGReg rd, TCGReg rn)
47
tcg_out_movr(s, TCG_TYPE_I32, rd, rn);
48
}
49
50
+static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rn)
51
+{
52
+ tcg_out_ext32u(s, rd, rn);
53
+}
54
+
55
static void tcg_out_addsubi(TCGContext *s, int ext, TCGReg rd,
56
TCGReg rn, int64_t aimm)
57
{
58
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
59
}
60
break;
61
62
- case INDEX_op_extu_i32_i64:
63
- tcg_out_ext32u(s, a0, a1);
64
- break;
65
-
66
case INDEX_op_deposit_i64:
67
case INDEX_op_deposit_i32:
68
tcg_out_dep(s, ext, a0, REG0(2), args[3], args[4]);
69
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
70
case INDEX_op_ext32s_i64:
71
case INDEX_op_ext32u_i64:
72
case INDEX_op_ext_i32_i64:
73
+ case INDEX_op_extu_i32_i64:
74
default:
75
g_assert_not_reached();
76
}
77
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
78
index XXXXXXX..XXXXXXX 100644
79
--- a/tcg/arm/tcg-target.c.inc
80
+++ b/tcg/arm/tcg-target.c.inc
81
@@ -XXX,XX +XXX,XX @@ static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rn)
82
g_assert_not_reached();
83
}
84
85
+static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rn)
86
+{
87
+ g_assert_not_reached();
88
+}
89
+
90
static void tcg_out_bswap16(TCGContext *s, ARMCond cond,
91
TCGReg rd, TCGReg rn, int flags)
92
{
93
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
94
index XXXXXXX..XXXXXXX 100644
95
--- a/tcg/i386/tcg-target.c.inc
96
+++ b/tcg/i386/tcg-target.c.inc
97
@@ -XXX,XX +XXX,XX @@ static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg dest, TCGReg src)
98
tcg_out_ext32s(s, dest, src);
99
}
100
101
+static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg dest, TCGReg src)
102
+{
103
+ tcg_out_ext32u(s, dest, src);
104
+}
105
+
106
static inline void tcg_out_bswap64(TCGContext *s, int reg)
107
{
108
tcg_out_opc(s, OPC_BSWAP + P_REXW + LOWREGMASK(reg), 0, reg, 0);
109
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
110
case INDEX_op_bswap64_i64:
111
tcg_out_bswap64(s, a0);
112
break;
113
- case INDEX_op_extu_i32_i64:
114
case INDEX_op_extrl_i64_i32:
115
tcg_out_ext32u(s, a0, a1);
116
break;
117
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
118
case INDEX_op_ext32s_i64:
119
case INDEX_op_ext32u_i64:
120
case INDEX_op_ext_i32_i64:
121
+ case INDEX_op_extu_i32_i64:
122
default:
123
g_assert_not_reached();
124
}
125
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
126
index XXXXXXX..XXXXXXX 100644
127
--- a/tcg/loongarch64/tcg-target.c.inc
128
+++ b/tcg/loongarch64/tcg-target.c.inc
129
@@ -XXX,XX +XXX,XX @@ static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg)
130
tcg_out_ext32s(s, ret, arg);
131
}
132
133
+static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg)
134
+{
135
+ tcg_out_ext32u(s, ret, arg);
136
+}
137
+
138
static void tcg_out_clzctz(TCGContext *s, LoongArchInsn opc,
139
TCGReg a0, TCGReg a1, TCGReg a2,
140
bool c2, bool is_32bit)
141
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
142
tcg_out_brcond(s, a2, a0, a1, arg_label(args[3]));
143
break;
144
145
- case INDEX_op_extu_i32_i64:
146
- tcg_out_ext32u(s, a0, a1);
147
- break;
148
-
149
case INDEX_op_extrl_i64_i32:
150
tcg_out_ext32s(s, a0, a1);
151
break;
152
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
153
case INDEX_op_ext32s_i64:
154
case INDEX_op_ext32u_i64:
155
case INDEX_op_ext_i32_i64:
156
+ case INDEX_op_extu_i32_i64:
157
default:
158
g_assert_not_reached();
159
}
160
diff --git a/tcg/mips/tcg-target.c.inc b/tcg/mips/tcg-target.c.inc
161
index XXXXXXX..XXXXXXX 100644
162
--- a/tcg/mips/tcg-target.c.inc
163
+++ b/tcg/mips/tcg-target.c.inc
164
@@ -XXX,XX +XXX,XX @@ static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
165
tcg_out_ext32s(s, rd, rs);
166
}
167
168
+static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
169
+{
170
+ tcg_out_ext32u(s, rd, rs);
171
+}
172
+
173
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
174
tcg_target_long imm)
175
{
176
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
177
case INDEX_op_extrl_i64_i32:
178
tcg_out_ext32s(s, a0, a1);
179
break;
180
- case INDEX_op_extu_i32_i64:
181
- tcg_out_ext32u(s, a0, a1);
182
- break;
183
184
case INDEX_op_sar_i32:
185
i1 = OPC_SRAV, i2 = OPC_SRA;
186
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
187
case INDEX_op_ext32s_i64:
188
case INDEX_op_ext32u_i64:
189
case INDEX_op_ext_i32_i64:
190
+ case INDEX_op_extu_i32_i64:
191
default:
192
g_assert_not_reached();
193
}
194
diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc
195
index XXXXXXX..XXXXXXX 100644
196
--- a/tcg/ppc/tcg-target.c.inc
197
+++ b/tcg/ppc/tcg-target.c.inc
198
@@ -XXX,XX +XXX,XX @@ static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg dst, TCGReg src)
199
tcg_out_ext32s(s, dst, src);
200
}
201
202
+static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg dst, TCGReg src)
203
+{
204
+ tcg_out_ext32u(s, dst, src);
205
+}
206
+
207
static inline void tcg_out_shli32(TCGContext *s, TCGReg dst, TCGReg src, int c)
208
{
209
tcg_out_rlw(s, RLWINM, dst, src, c, 0, 31 - c);
210
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
211
tcg_out_qemu_st(s, args, true);
212
break;
213
214
- case INDEX_op_extu_i32_i64:
215
- tcg_out_ext32u(s, args[0], args[1]);
216
- break;
217
-
218
case INDEX_op_setcond_i32:
219
tcg_out_setcond(s, TCG_TYPE_I32, args[3], args[0], args[1], args[2],
220
const_args[2]);
221
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
222
case INDEX_op_ext32s_i64:
223
case INDEX_op_ext32u_i64:
224
case INDEX_op_ext_i32_i64:
225
+ case INDEX_op_extu_i32_i64:
226
default:
227
g_assert_not_reached();
228
}
229
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
230
index XXXXXXX..XXXXXXX 100644
231
--- a/tcg/riscv/tcg-target.c.inc
232
+++ b/tcg/riscv/tcg-target.c.inc
233
@@ -XXX,XX +XXX,XX @@ static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg)
234
tcg_out_ext32s(s, ret, arg);
235
}
236
237
+static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg)
238
+{
239
+ tcg_out_ext32u(s, ret, arg);
240
+}
241
+
242
static void tcg_out_ldst(TCGContext *s, RISCVInsn opc, TCGReg data,
243
TCGReg addr, intptr_t offset)
244
{
245
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
246
tcg_out_qemu_st(s, args, true);
247
break;
248
249
- case INDEX_op_extu_i32_i64:
250
- tcg_out_ext32u(s, a0, a1);
251
- break;
252
-
253
case INDEX_op_extrl_i64_i32:
254
tcg_out_ext32s(s, a0, a1);
255
break;
256
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
257
case INDEX_op_ext32s_i64:
258
case INDEX_op_ext32u_i64:
259
case INDEX_op_ext_i32_i64:
260
+ case INDEX_op_extu_i32_i64:
261
default:
262
g_assert_not_reached();
263
}
264
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
265
index XXXXXXX..XXXXXXX 100644
266
--- a/tcg/s390x/tcg-target.c.inc
267
+++ b/tcg/s390x/tcg-target.c.inc
268
@@ -XXX,XX +XXX,XX @@ static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg dest, TCGReg src)
269
tcg_out_ext32s(s, dest, src);
270
}
271
272
+static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg dest, TCGReg src)
273
+{
274
+ tcg_out_ext32u(s, dest, src);
275
+}
276
+
277
static void tgen_andi_risbg(TCGContext *s, TCGReg out, TCGReg in, uint64_t val)
278
{
279
int msb, lsb;
280
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
281
}
282
break;
283
284
- case INDEX_op_extu_i32_i64:
285
- tcg_out_ext32u(s, args[0], args[1]);
286
- break;
287
-
288
case INDEX_op_add2_i64:
289
if (const_args[4]) {
290
if ((int64_t)args[4] >= 0) {
291
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
292
case INDEX_op_ext32s_i64:
293
case INDEX_op_ext32u_i64:
294
case INDEX_op_ext_i32_i64:
295
+ case INDEX_op_extu_i32_i64:
296
default:
297
g_assert_not_reached();
298
}
299
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
300
index XXXXXXX..XXXXXXX 100644
301
--- a/tcg/sparc64/tcg-target.c.inc
302
+++ b/tcg/sparc64/tcg-target.c.inc
303
@@ -XXX,XX +XXX,XX @@ static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
304
tcg_out_ext32s(s, rd, rs);
305
}
306
307
+static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
308
+{
309
+ tcg_out_ext32u(s, rd, rs);
310
+}
311
+
312
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
313
tcg_target_long imm)
314
{
315
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
316
case INDEX_op_divu_i64:
317
c = ARITH_UDIVX;
318
goto gen_arith;
319
- case INDEX_op_extu_i32_i64:
320
- tcg_out_ext32u(s, a0, a1);
321
- break;
322
case INDEX_op_extrl_i64_i32:
323
tcg_out_mov(s, TCG_TYPE_I32, a0, a1);
324
break;
325
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
326
case INDEX_op_ext32s_i64:
327
case INDEX_op_ext32u_i64:
328
case INDEX_op_ext_i32_i64:
329
+ case INDEX_op_extu_i32_i64:
330
default:
331
g_assert_not_reached();
332
}
333
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
334
index XXXXXXX..XXXXXXX 100644
335
--- a/tcg/tci/tcg-target.c.inc
336
+++ b/tcg/tci/tcg-target.c.inc
337
@@ -XXX,XX +XXX,XX @@ static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
338
tcg_out_ext32s(s, rd, rs);
339
}
340
341
+static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
342
+{
343
+ tcg_out_ext32u(s, rd, rs);
344
+}
345
+
346
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
347
tcg_target_long imm)
348
{
349
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
350
351
CASE_32_64(neg) /* Optional (TCG_TARGET_HAS_neg_*). */
352
CASE_32_64(not) /* Optional (TCG_TARGET_HAS_not_*). */
353
- CASE_64(extu_i32)
354
CASE_32_64(ctpop) /* Optional (TCG_TARGET_HAS_ctpop_*). */
355
case INDEX_op_bswap32_i32: /* Optional (TCG_TARGET_HAS_bswap32_i32). */
356
case INDEX_op_bswap64_i64: /* Optional (TCG_TARGET_HAS_bswap64_i64). */
357
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
358
case INDEX_op_ext32s_i64:
359
case INDEX_op_ext32u_i64:
360
case INDEX_op_ext_i32_i64:
361
+ case INDEX_op_extu_i32_i64:
362
default:
363
g_assert_not_reached();
364
}
365
--
366
2.34.1
367
368
diff view generated by jsdifflib
Deleted patch
1
We will need a backend interface for type truncation. For those backends
2
that did not enable TCG_TARGET_HAS_extrl_i64_i32, use tcg_out_mov.
3
Use it in tcg_reg_alloc_op in the meantime.
4
1
5
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
6
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
7
---
8
tcg/tcg.c | 4 ++++
9
tcg/aarch64/tcg-target.c.inc | 6 ++++++
10
tcg/arm/tcg-target.c.inc | 5 +++++
11
tcg/i386/tcg-target.c.inc | 9 ++++++---
12
tcg/loongarch64/tcg-target.c.inc | 10 ++++++----
13
tcg/mips/tcg-target.c.inc | 9 ++++++---
14
tcg/ppc/tcg-target.c.inc | 7 +++++++
15
tcg/riscv/tcg-target.c.inc | 10 ++++++----
16
tcg/s390x/tcg-target.c.inc | 6 ++++++
17
tcg/sparc64/tcg-target.c.inc | 9 ++++++---
18
tcg/tci/tcg-target.c.inc | 7 +++++++
19
11 files changed, 65 insertions(+), 17 deletions(-)
20
21
diff --git a/tcg/tcg.c b/tcg/tcg.c
22
index XXXXXXX..XXXXXXX 100644
23
--- a/tcg/tcg.c
24
+++ b/tcg/tcg.c
25
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32s(TCGContext *s, TCGReg ret, TCGReg arg);
26
static void tcg_out_ext32u(TCGContext *s, TCGReg ret, TCGReg arg);
27
static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg);
28
static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg);
29
+static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg ret, TCGReg arg);
30
static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
31
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
32
static void tcg_out_goto_tb(TCGContext *s, int which);
33
@@ -XXX,XX +XXX,XX @@ static void tcg_reg_alloc_op(TCGContext *s, const TCGOp *op)
34
case INDEX_op_extu_i32_i64:
35
tcg_out_extu_i32_i64(s, new_args[0], new_args[1]);
36
break;
37
+ case INDEX_op_extrl_i64_i32:
38
+ tcg_out_extrl_i64_i32(s, new_args[0], new_args[1]);
39
+ break;
40
default:
41
if (def->flags & TCG_OPF_VECTOR) {
42
tcg_out_vec_op(s, op->opc, TCGOP_VECL(op), TCGOP_VECE(op),
43
diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc
44
index XXXXXXX..XXXXXXX 100644
45
--- a/tcg/aarch64/tcg-target.c.inc
46
+++ b/tcg/aarch64/tcg-target.c.inc
47
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rn)
48
tcg_out_ext32u(s, rd, rn);
49
}
50
51
+static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rn)
52
+{
53
+ tcg_out_mov(s, TCG_TYPE_I32, rd, rn);
54
+}
55
+
56
static void tcg_out_addsubi(TCGContext *s, int ext, TCGReg rd,
57
TCGReg rn, int64_t aimm)
58
{
59
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
60
case INDEX_op_ext32u_i64:
61
case INDEX_op_ext_i32_i64:
62
case INDEX_op_extu_i32_i64:
63
+ case INDEX_op_extrl_i64_i32:
64
default:
65
g_assert_not_reached();
66
}
67
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
68
index XXXXXXX..XXXXXXX 100644
69
--- a/tcg/arm/tcg-target.c.inc
70
+++ b/tcg/arm/tcg-target.c.inc
71
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rn)
72
g_assert_not_reached();
73
}
74
75
+static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rn)
76
+{
77
+ g_assert_not_reached();
78
+}
79
+
80
static void tcg_out_bswap16(TCGContext *s, ARMCond cond,
81
TCGReg rd, TCGReg rn, int flags)
82
{
83
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
84
index XXXXXXX..XXXXXXX 100644
85
--- a/tcg/i386/tcg-target.c.inc
86
+++ b/tcg/i386/tcg-target.c.inc
87
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg dest, TCGReg src)
88
tcg_out_ext32u(s, dest, src);
89
}
90
91
+static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg dest, TCGReg src)
92
+{
93
+ tcg_out_ext32u(s, dest, src);
94
+}
95
+
96
static inline void tcg_out_bswap64(TCGContext *s, int reg)
97
{
98
tcg_out_opc(s, OPC_BSWAP + P_REXW + LOWREGMASK(reg), 0, reg, 0);
99
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
100
case INDEX_op_bswap64_i64:
101
tcg_out_bswap64(s, a0);
102
break;
103
- case INDEX_op_extrl_i64_i32:
104
- tcg_out_ext32u(s, a0, a1);
105
- break;
106
case INDEX_op_extrh_i64_i32:
107
tcg_out_shifti(s, SHIFT_SHR + P_REXW, a0, 32);
108
break;
109
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
110
case INDEX_op_ext32u_i64:
111
case INDEX_op_ext_i32_i64:
112
case INDEX_op_extu_i32_i64:
113
+ case INDEX_op_extrl_i64_i32:
114
default:
115
g_assert_not_reached();
116
}
117
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
118
index XXXXXXX..XXXXXXX 100644
119
--- a/tcg/loongarch64/tcg-target.c.inc
120
+++ b/tcg/loongarch64/tcg-target.c.inc
121
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg)
122
tcg_out_ext32u(s, ret, arg);
123
}
124
125
+static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg ret, TCGReg arg)
126
+{
127
+ tcg_out_ext32s(s, ret, arg);
128
+}
129
+
130
static void tcg_out_clzctz(TCGContext *s, LoongArchInsn opc,
131
TCGReg a0, TCGReg a1, TCGReg a2,
132
bool c2, bool is_32bit)
133
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
134
tcg_out_brcond(s, a2, a0, a1, arg_label(args[3]));
135
break;
136
137
- case INDEX_op_extrl_i64_i32:
138
- tcg_out_ext32s(s, a0, a1);
139
- break;
140
-
141
case INDEX_op_extrh_i64_i32:
142
tcg_out_opc_srai_d(s, a0, a1, 32);
143
break;
144
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
145
case INDEX_op_ext32u_i64:
146
case INDEX_op_ext_i32_i64:
147
case INDEX_op_extu_i32_i64:
148
+ case INDEX_op_extrl_i64_i32:
149
default:
150
g_assert_not_reached();
151
}
152
diff --git a/tcg/mips/tcg-target.c.inc b/tcg/mips/tcg-target.c.inc
153
index XXXXXXX..XXXXXXX 100644
154
--- a/tcg/mips/tcg-target.c.inc
155
+++ b/tcg/mips/tcg-target.c.inc
156
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
157
tcg_out_ext32u(s, rd, rs);
158
}
159
160
+static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
161
+{
162
+ tcg_out_ext32s(s, rd, rs);
163
+}
164
+
165
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
166
tcg_target_long imm)
167
{
168
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
169
case INDEX_op_extrh_i64_i32:
170
tcg_out_dsra(s, a0, a1, 32);
171
break;
172
- case INDEX_op_extrl_i64_i32:
173
- tcg_out_ext32s(s, a0, a1);
174
- break;
175
176
case INDEX_op_sar_i32:
177
i1 = OPC_SRAV, i2 = OPC_SRA;
178
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
179
case INDEX_op_ext32u_i64:
180
case INDEX_op_ext_i32_i64:
181
case INDEX_op_extu_i32_i64:
182
+ case INDEX_op_extrl_i64_i32:
183
default:
184
g_assert_not_reached();
185
}
186
diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc
187
index XXXXXXX..XXXXXXX 100644
188
--- a/tcg/ppc/tcg-target.c.inc
189
+++ b/tcg/ppc/tcg-target.c.inc
190
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg dst, TCGReg src)
191
tcg_out_ext32u(s, dst, src);
192
}
193
194
+static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rn)
195
+{
196
+ tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
197
+ tcg_out_mov(s, TCG_TYPE_I32, rd, rn);
198
+}
199
+
200
static inline void tcg_out_shli32(TCGContext *s, TCGReg dst, TCGReg src, int c)
201
{
202
tcg_out_rlw(s, RLWINM, dst, src, c, 0, 31 - c);
203
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
204
case INDEX_op_ext32u_i64:
205
case INDEX_op_ext_i32_i64:
206
case INDEX_op_extu_i32_i64:
207
+ case INDEX_op_extrl_i64_i32:
208
default:
209
g_assert_not_reached();
210
}
211
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
212
index XXXXXXX..XXXXXXX 100644
213
--- a/tcg/riscv/tcg-target.c.inc
214
+++ b/tcg/riscv/tcg-target.c.inc
215
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg)
216
tcg_out_ext32u(s, ret, arg);
217
}
218
219
+static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg ret, TCGReg arg)
220
+{
221
+ tcg_out_ext32s(s, ret, arg);
222
+}
223
+
224
static void tcg_out_ldst(TCGContext *s, RISCVInsn opc, TCGReg data,
225
TCGReg addr, intptr_t offset)
226
{
227
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
228
tcg_out_qemu_st(s, args, true);
229
break;
230
231
- case INDEX_op_extrl_i64_i32:
232
- tcg_out_ext32s(s, a0, a1);
233
- break;
234
-
235
case INDEX_op_extrh_i64_i32:
236
tcg_out_opc_imm(s, OPC_SRAI, a0, a1, 32);
237
break;
238
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
239
case INDEX_op_ext32u_i64:
240
case INDEX_op_ext_i32_i64:
241
case INDEX_op_extu_i32_i64:
242
+ case INDEX_op_extrl_i64_i32:
243
default:
244
g_assert_not_reached();
245
}
246
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
247
index XXXXXXX..XXXXXXX 100644
248
--- a/tcg/s390x/tcg-target.c.inc
249
+++ b/tcg/s390x/tcg-target.c.inc
250
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg dest, TCGReg src)
251
tcg_out_ext32u(s, dest, src);
252
}
253
254
+static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg dest, TCGReg src)
255
+{
256
+ tcg_out_mov(s, TCG_TYPE_I32, dest, src);
257
+}
258
+
259
static void tgen_andi_risbg(TCGContext *s, TCGReg out, TCGReg in, uint64_t val)
260
{
261
int msb, lsb;
262
@@ -XXX,XX +XXX,XX @@ static inline void tcg_out_op(TCGContext *s, TCGOpcode opc,
263
case INDEX_op_ext32u_i64:
264
case INDEX_op_ext_i32_i64:
265
case INDEX_op_extu_i32_i64:
266
+ case INDEX_op_extrl_i64_i32:
267
default:
268
g_assert_not_reached();
269
}
270
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
271
index XXXXXXX..XXXXXXX 100644
272
--- a/tcg/sparc64/tcg-target.c.inc
273
+++ b/tcg/sparc64/tcg-target.c.inc
274
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
275
tcg_out_ext32u(s, rd, rs);
276
}
277
278
+static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
279
+{
280
+ tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
281
+}
282
+
283
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
284
tcg_target_long imm)
285
{
286
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
287
case INDEX_op_divu_i64:
288
c = ARITH_UDIVX;
289
goto gen_arith;
290
- case INDEX_op_extrl_i64_i32:
291
- tcg_out_mov(s, TCG_TYPE_I32, a0, a1);
292
- break;
293
case INDEX_op_extrh_i64_i32:
294
tcg_out_arithi(s, a0, a1, 32, SHIFT_SRLX);
295
break;
296
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
297
case INDEX_op_ext32u_i64:
298
case INDEX_op_ext_i32_i64:
299
case INDEX_op_extu_i32_i64:
300
+ case INDEX_op_extrl_i64_i32:
301
default:
302
g_assert_not_reached();
303
}
304
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
305
index XXXXXXX..XXXXXXX 100644
306
--- a/tcg/tci/tcg-target.c.inc
307
+++ b/tcg/tci/tcg-target.c.inc
308
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg rd, TCGReg rs)
309
tcg_out_ext32u(s, rd, rs);
310
}
311
312
+static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
313
+{
314
+ tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
315
+ tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
316
+}
317
+
318
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
319
tcg_target_long imm)
320
{
321
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
322
case INDEX_op_ext32u_i64:
323
case INDEX_op_ext_i32_i64:
324
case INDEX_op_extu_i32_i64:
325
+ case INDEX_op_extrl_i64_i32:
326
default:
327
g_assert_not_reached();
328
}
329
--
330
2.34.1
331
332
diff view generated by jsdifflib
Deleted patch
1
This is common code in most qemu_{ld,st} slow paths, extending the
2
input value for the store helper data argument or extending the
3
return value from the load helper.
4
1
5
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
6
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
7
---
8
tcg/tcg.c | 63 ++++++++++++++++++++++++++++++++
9
tcg/aarch64/tcg-target.c.inc | 8 +---
10
tcg/arm/tcg-target.c.inc | 16 ++------
11
tcg/i386/tcg-target.c.inc | 30 +++------------
12
tcg/loongarch64/tcg-target.c.inc | 53 ++++-----------------------
13
tcg/ppc/tcg-target.c.inc | 38 +++++--------------
14
tcg/riscv/tcg-target.c.inc | 13 +------
15
tcg/s390x/tcg-target.c.inc | 19 ++--------
16
tcg/sparc64/tcg-target.c.inc | 31 +++-------------
17
9 files changed, 103 insertions(+), 168 deletions(-)
18
19
diff --git a/tcg/tcg.c b/tcg/tcg.c
20
index XXXXXXX..XXXXXXX 100644
21
--- a/tcg/tcg.c
22
+++ b/tcg/tcg.c
23
@@ -XXX,XX +XXX,XX @@ void tcg_raise_tb_overflow(TCGContext *s)
24
siglongjmp(s->jmp_trans, -2);
25
}
26
27
+/**
28
+ * tcg_out_movext -- move and extend
29
+ * @s: tcg context
30
+ * @dst_type: integral type for destination
31
+ * @dst: destination register
32
+ * @src_type: integral type for source
33
+ * @src_ext: extension to apply to source
34
+ * @src: source register
35
+ *
36
+ * Move or extend @src into @dst, depending on @src_ext and the types.
37
+ */
38
+static void __attribute__((unused))
39
+tcg_out_movext(TCGContext *s, TCGType dst_type, TCGReg dst,
40
+ TCGType src_type, MemOp src_ext, TCGReg src)
41
+{
42
+ switch (src_ext) {
43
+ case MO_UB:
44
+ tcg_out_ext8u(s, dst, src);
45
+ break;
46
+ case MO_SB:
47
+ tcg_out_ext8s(s, dst_type, dst, src);
48
+ break;
49
+ case MO_UW:
50
+ tcg_out_ext16u(s, dst, src);
51
+ break;
52
+ case MO_SW:
53
+ tcg_out_ext16s(s, dst_type, dst, src);
54
+ break;
55
+ case MO_UL:
56
+ case MO_SL:
57
+ if (dst_type == TCG_TYPE_I32) {
58
+ if (src_type == TCG_TYPE_I32) {
59
+ tcg_out_mov(s, TCG_TYPE_I32, dst, src);
60
+ } else {
61
+ tcg_out_extrl_i64_i32(s, dst, src);
62
+ }
63
+ } else if (src_type == TCG_TYPE_I32) {
64
+ if (src_ext & MO_SIGN) {
65
+ tcg_out_exts_i32_i64(s, dst, src);
66
+ } else {
67
+ tcg_out_extu_i32_i64(s, dst, src);
68
+ }
69
+ } else {
70
+ if (src_ext & MO_SIGN) {
71
+ tcg_out_ext32s(s, dst, src);
72
+ } else {
73
+ tcg_out_ext32u(s, dst, src);
74
+ }
75
+ }
76
+ break;
77
+ case MO_UQ:
78
+ tcg_debug_assert(TCG_TARGET_REG_BITS == 64);
79
+ if (dst_type == TCG_TYPE_I32) {
80
+ tcg_out_extrl_i64_i32(s, dst, src);
81
+ } else {
82
+ tcg_out_mov(s, TCG_TYPE_I64, dst, src);
83
+ }
84
+ break;
85
+ default:
86
+ g_assert_not_reached();
87
+ }
88
+}
89
+
90
#define C_PFX1(P, A) P##A
91
#define C_PFX2(P, A, B) P##A##_##B
92
#define C_PFX3(P, A, B, C) P##A##_##B##_##C
93
diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc
94
index XXXXXXX..XXXXXXX 100644
95
--- a/tcg/aarch64/tcg-target.c.inc
96
+++ b/tcg/aarch64/tcg-target.c.inc
97
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
98
{
99
MemOpIdx oi = lb->oi;
100
MemOp opc = get_memop(oi);
101
- MemOp size = opc & MO_SIZE;
102
103
if (!reloc_pc19(lb->label_ptr[0], tcg_splitwx_to_rx(s->code_ptr))) {
104
return false;
105
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
106
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_X2, oi);
107
tcg_out_adr(s, TCG_REG_X3, lb->raddr);
108
tcg_out_call_int(s, qemu_ld_helpers[opc & MO_SIZE]);
109
- if (opc & MO_SIGN) {
110
- tcg_out_sxt(s, lb->type, size, lb->datalo_reg, TCG_REG_X0);
111
- } else {
112
- tcg_out_mov(s, size == MO_64, lb->datalo_reg, TCG_REG_X0);
113
- }
114
115
+ tcg_out_movext(s, lb->type, lb->datalo_reg,
116
+ TCG_TYPE_REG, opc & MO_SSIZE, TCG_REG_X0);
117
tcg_out_goto(s, lb->raddr);
118
return true;
119
}
120
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
121
index XXXXXXX..XXXXXXX 100644
122
--- a/tcg/arm/tcg-target.c.inc
123
+++ b/tcg/arm/tcg-target.c.inc
124
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
125
126
datalo = lb->datalo_reg;
127
datahi = lb->datahi_reg;
128
- switch (opc & MO_SSIZE) {
129
- case MO_SB:
130
- tcg_out_ext8s(s, TCG_TYPE_I32, datalo, TCG_REG_R0);
131
- break;
132
- case MO_SW:
133
- tcg_out_ext16s(s, TCG_TYPE_I32, datalo, TCG_REG_R0);
134
- break;
135
- default:
136
- tcg_out_mov_reg(s, COND_AL, datalo, TCG_REG_R0);
137
- break;
138
- case MO_UQ:
139
+ if ((opc & MO_SIZE) == MO_64) {
140
if (datalo != TCG_REG_R1) {
141
tcg_out_mov_reg(s, COND_AL, datalo, TCG_REG_R0);
142
tcg_out_mov_reg(s, COND_AL, datahi, TCG_REG_R1);
143
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
144
tcg_out_mov_reg(s, COND_AL, datahi, TCG_REG_R1);
145
tcg_out_mov_reg(s, COND_AL, datalo, TCG_REG_TMP);
146
}
147
- break;
148
+ } else {
149
+ tcg_out_movext(s, TCG_TYPE_I32, datalo,
150
+ TCG_TYPE_I32, opc & MO_SSIZE, TCG_REG_R0);
151
}
152
153
tcg_out_goto(s, COND_AL, lb->raddr);
154
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
155
index XXXXXXX..XXXXXXX 100644
156
--- a/tcg/i386/tcg-target.c.inc
157
+++ b/tcg/i386/tcg-target.c.inc
158
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
159
tcg_out_branch(s, 1, qemu_ld_helpers[opc & (MO_BSWAP | MO_SIZE)]);
160
161
data_reg = l->datalo_reg;
162
- switch (opc & MO_SSIZE) {
163
- case MO_SB:
164
- tcg_out_ext8s(s, l->type, data_reg, TCG_REG_EAX);
165
- break;
166
- case MO_SW:
167
- tcg_out_ext16s(s, l->type, data_reg, TCG_REG_EAX);
168
- break;
169
-#if TCG_TARGET_REG_BITS == 64
170
- case MO_SL:
171
- tcg_out_ext32s(s, data_reg, TCG_REG_EAX);
172
- break;
173
-#endif
174
- case MO_UB:
175
- case MO_UW:
176
- /* Note that the helpers have zero-extended to tcg_target_long. */
177
- case MO_UL:
178
- tcg_out_mov(s, TCG_TYPE_I32, data_reg, TCG_REG_EAX);
179
- break;
180
- case MO_UQ:
181
- if (TCG_TARGET_REG_BITS == 64) {
182
- tcg_out_mov(s, TCG_TYPE_I64, data_reg, TCG_REG_RAX);
183
- } else if (data_reg == TCG_REG_EDX) {
184
+ if (TCG_TARGET_REG_BITS == 32 && (opc & MO_SIZE) == MO_64) {
185
+ if (data_reg == TCG_REG_EDX) {
186
/* xchg %edx, %eax */
187
tcg_out_opc(s, OPC_XCHG_ax_r32 + TCG_REG_EDX, 0, 0, 0);
188
tcg_out_mov(s, TCG_TYPE_I32, l->datahi_reg, TCG_REG_EAX);
189
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
190
tcg_out_mov(s, TCG_TYPE_I32, data_reg, TCG_REG_EAX);
191
tcg_out_mov(s, TCG_TYPE_I32, l->datahi_reg, TCG_REG_EDX);
192
}
193
- break;
194
- default:
195
- g_assert_not_reached();
196
+ } else {
197
+ tcg_out_movext(s, l->type, data_reg,
198
+ TCG_TYPE_REG, opc & MO_SSIZE, TCG_REG_EAX);
199
}
200
201
/* Jump to the code corresponding to next IR of qemu_st */
202
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
203
index XXXXXXX..XXXXXXX 100644
204
--- a/tcg/loongarch64/tcg-target.c.inc
205
+++ b/tcg/loongarch64/tcg-target.c.inc
206
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
207
MemOpIdx oi = l->oi;
208
MemOp opc = get_memop(oi);
209
MemOp size = opc & MO_SIZE;
210
- TCGType type = l->type;
211
212
/* resolve label address */
213
if (!reloc_br_sk16(l->label_ptr[0], tcg_splitwx_to_rx(s->code_ptr))) {
214
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
215
216
tcg_out_call_int(s, qemu_ld_helpers[size], false);
217
218
- switch (opc & MO_SSIZE) {
219
- case MO_SB:
220
- tcg_out_ext8s(s, type, l->datalo_reg, TCG_REG_A0);
221
- break;
222
- case MO_SW:
223
- tcg_out_ext16s(s, type, l->datalo_reg, TCG_REG_A0);
224
- break;
225
- case MO_SL:
226
- tcg_out_ext32s(s, l->datalo_reg, TCG_REG_A0);
227
- break;
228
- case MO_UL:
229
- if (type == TCG_TYPE_I32) {
230
- /* MO_UL loads of i32 should be sign-extended too */
231
- tcg_out_ext32s(s, l->datalo_reg, TCG_REG_A0);
232
- break;
233
- }
234
- /* fallthrough */
235
- default:
236
- tcg_out_mov(s, type, l->datalo_reg, TCG_REG_A0);
237
- break;
238
- }
239
-
240
+ tcg_out_movext(s, l->type, l->datalo_reg,
241
+ TCG_TYPE_REG, opc & MO_SSIZE, TCG_REG_A0);
242
return tcg_out_goto(s, l->raddr);
243
}
244
245
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
246
/* call store helper */
247
tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_A0, TCG_AREG0);
248
tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_A1, l->addrlo_reg);
249
- switch (size) {
250
- case MO_8:
251
- tcg_out_ext8u(s, TCG_REG_A2, l->datalo_reg);
252
- break;
253
- case MO_16:
254
- tcg_out_ext16u(s, TCG_REG_A2, l->datalo_reg);
255
- break;
256
- case MO_32:
257
- tcg_out_ext32u(s, TCG_REG_A2, l->datalo_reg);
258
- break;
259
- case MO_64:
260
- tcg_out_mov(s, TCG_TYPE_I64, TCG_REG_A2, l->datalo_reg);
261
- break;
262
- default:
263
- g_assert_not_reached();
264
- break;
265
- }
266
+ tcg_out_movext(s, size == MO_64 ? TCG_TYPE_I32 : TCG_TYPE_I32, TCG_REG_A2,
267
+ l->type, size, l->datalo_reg);
268
tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_A3, oi);
269
tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_A4, (tcg_target_long)l->raddr);
270
271
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_st_indexed(TCGContext *s, TCGReg data,
272
}
273
}
274
275
-static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
276
+static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args, TCGType type)
277
{
278
TCGReg addr_regl;
279
TCGReg data_regl;
280
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_st(TCGContext *s, const TCGArg *args)
281
tcg_out_tlb_load(s, addr_regl, oi, label_ptr, 0);
282
base = tcg_out_zext_addr_if_32_bit(s, addr_regl, TCG_REG_TMP0);
283
tcg_out_qemu_st_indexed(s, data_regl, base, TCG_REG_TMP2, opc);
284
- add_qemu_ldst_label(s, 0, oi,
285
- 0, /* type param is unused for stores */
286
+ add_qemu_ldst_label(s, 0, oi, type,
287
data_regl, addr_regl,
288
s->code_ptr, label_ptr);
289
#else
290
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
291
tcg_out_qemu_ld(s, args, TCG_TYPE_I64);
292
break;
293
case INDEX_op_qemu_st_i32:
294
- tcg_out_qemu_st(s, args);
295
+ tcg_out_qemu_st(s, args, TCG_TYPE_I32);
296
break;
297
case INDEX_op_qemu_st_i64:
298
- tcg_out_qemu_st(s, args);
299
+ tcg_out_qemu_st(s, args, TCG_TYPE_I64);
300
break;
301
302
case INDEX_op_mov_i32: /* Always emitted via tcg_out_mov. */
303
diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc
304
index XXXXXXX..XXXXXXX 100644
305
--- a/tcg/ppc/tcg-target.c.inc
306
+++ b/tcg/ppc/tcg-target.c.inc
307
@@ -XXX,XX +XXX,XX @@ static const uint32_t qemu_stx_opc[(MO_SIZE + MO_BSWAP) + 1] = {
308
[MO_BSWAP | MO_UQ] = STDBRX,
309
};
310
311
-static const uint32_t qemu_exts_opc[4] = {
312
- EXTSB, EXTSH, EXTSW, 0
313
-};
314
-
315
#if defined (CONFIG_SOFTMMU)
316
/* helper signature: helper_ld_mmu(CPUState *env, target_ulong addr,
317
* int mmu_idx, uintptr_t ra)
318
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_ld_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
319
if (TCG_TARGET_REG_BITS == 32 && (opc & MO_SIZE) == MO_64) {
320
tcg_out_mov(s, TCG_TYPE_I32, lo, TCG_REG_R4);
321
tcg_out_mov(s, TCG_TYPE_I32, hi, TCG_REG_R3);
322
- } else if (opc & MO_SIGN) {
323
- uint32_t insn = qemu_exts_opc[opc & MO_SIZE];
324
- tcg_out32(s, insn | RA(lo) | RS(TCG_REG_R3));
325
} else {
326
- tcg_out_mov(s, TCG_TYPE_REG, lo, TCG_REG_R3);
327
+ tcg_out_movext(s, lb->type, lo,
328
+ TCG_TYPE_REG, opc & MO_SSIZE, TCG_REG_R3);
329
}
330
331
tcg_out_b(s, 0, lb->raddr);
332
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
333
334
lo = lb->datalo_reg;
335
hi = lb->datahi_reg;
336
- if (TCG_TARGET_REG_BITS == 32) {
337
- switch (s_bits) {
338
- case MO_64:
339
- arg |= (TCG_TARGET_CALL_ARG_I64 == TCG_CALL_ARG_EVEN);
340
- tcg_out_mov(s, TCG_TYPE_I32, arg++, hi);
341
- /* FALLTHRU */
342
- case MO_32:
343
- tcg_out_mov(s, TCG_TYPE_I32, arg++, lo);
344
- break;
345
- default:
346
- tcg_out_rlw(s, RLWINM, arg++, lo, 0, 32 - (8 << s_bits), 31);
347
- break;
348
- }
349
+ if (TCG_TARGET_REG_BITS == 32 && s_bits == MO_64) {
350
+ arg |= (TCG_TARGET_CALL_ARG_I64 == TCG_CALL_ARG_EVEN);
351
+ tcg_out_mov(s, TCG_TYPE_I32, arg++, hi);
352
+ tcg_out_mov(s, TCG_TYPE_I32, arg++, lo);
353
} else {
354
- if (s_bits == MO_64) {
355
- tcg_out_mov(s, TCG_TYPE_I64, arg++, lo);
356
- } else {
357
- tcg_out_rld(s, RLDICL, arg++, lo, 0, 64 - (8 << s_bits));
358
- }
359
+ tcg_out_movext(s, s_bits == MO_64 ? TCG_TYPE_I64 : TCG_TYPE_I32,
360
+ arg++, lb->type, s_bits, lo);
361
}
362
363
tcg_out_movi(s, TCG_TYPE_I32, arg++, oi);
364
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld(TCGContext *s, const TCGArg *args, bool is_64)
365
} else {
366
insn = qemu_ldx_opc[opc & (MO_SIZE | MO_BSWAP)];
367
tcg_out32(s, insn | TAB(datalo, rbase, addrlo));
368
- insn = qemu_exts_opc[s_bits];
369
- tcg_out32(s, insn | RA(datalo) | RS(datalo));
370
+ tcg_out_movext(s, TCG_TYPE_REG, datalo,
371
+ TCG_TYPE_REG, opc & MO_SSIZE, datalo);
372
}
373
}
374
375
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
376
index XXXXXXX..XXXXXXX 100644
377
--- a/tcg/riscv/tcg-target.c.inc
378
+++ b/tcg/riscv/tcg-target.c.inc
379
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *l)
380
/* call store helper */
381
tcg_out_mov(s, TCG_TYPE_PTR, a0, TCG_AREG0);
382
tcg_out_mov(s, TCG_TYPE_PTR, a1, l->addrlo_reg);
383
- tcg_out_mov(s, TCG_TYPE_PTR, a2, l->datalo_reg);
384
- switch (s_bits) {
385
- case MO_8:
386
- tcg_out_ext8u(s, a2, a2);
387
- break;
388
- case MO_16:
389
- tcg_out_ext16u(s, a2, a2);
390
- break;
391
- default:
392
- break;
393
- }
394
+ tcg_out_movext(s, s_bits == MO_64 ? TCG_TYPE_I64 : TCG_TYPE_I32, a2,
395
+ l->type, s_bits, l->datalo_reg);
396
tcg_out_movi(s, TCG_TYPE_PTR, a3, oi);
397
tcg_out_movi(s, TCG_TYPE_PTR, a4, (tcg_target_long)l->raddr);
398
399
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
400
index XXXXXXX..XXXXXXX 100644
401
--- a/tcg/s390x/tcg-target.c.inc
402
+++ b/tcg/s390x/tcg-target.c.inc
403
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
404
TCGReg data_reg = lb->datalo_reg;
405
MemOpIdx oi = lb->oi;
406
MemOp opc = get_memop(oi);
407
+ MemOp size = opc & MO_SIZE;
408
409
if (!patch_reloc(lb->label_ptr[0], R_390_PC16DBL,
410
(intptr_t)tcg_splitwx_to_rx(s->code_ptr), 2)) {
411
@@ -XXX,XX +XXX,XX @@ static bool tcg_out_qemu_st_slow_path(TCGContext *s, TCGLabelQemuLdst *lb)
412
if (TARGET_LONG_BITS == 64) {
413
tcg_out_mov(s, TCG_TYPE_I64, TCG_REG_R3, addr_reg);
414
}
415
- switch (opc & MO_SIZE) {
416
- case MO_UB:
417
- tcg_out_ext8u(s, TCG_REG_R4, data_reg);
418
- break;
419
- case MO_UW:
420
- tcg_out_ext16u(s, TCG_REG_R4, data_reg);
421
- break;
422
- case MO_UL:
423
- tcg_out_ext32u(s, TCG_REG_R4, data_reg);
424
- break;
425
- case MO_UQ:
426
- tcg_out_mov(s, TCG_TYPE_I64, TCG_REG_R4, data_reg);
427
- break;
428
- default:
429
- g_assert_not_reached();
430
- }
431
+ tcg_out_movext(s, size == MO_64 ? TCG_TYPE_I64 : TCG_TYPE_I32,
432
+ TCG_REG_R4, lb->type, size, data_reg);
433
tcg_out_movi(s, TCG_TYPE_I32, TCG_REG_R5, oi);
434
tcg_out_movi(s, TCG_TYPE_PTR, TCG_REG_R6, (uintptr_t)lb->raddr);
435
tcg_out_call_int(s, qemu_st_helpers[opc & (MO_BSWAP | MO_SIZE)]);
436
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
437
index XXXXXXX..XXXXXXX 100644
438
--- a/tcg/sparc64/tcg-target.c.inc
439
+++ b/tcg/sparc64/tcg-target.c.inc
440
@@ -XXX,XX +XXX,XX @@ static void tcg_out_mb(TCGContext *s, TCGArg a0)
441
static const tcg_insn_unit *qemu_ld_trampoline[(MO_SSIZE | MO_BSWAP) + 1];
442
static const tcg_insn_unit *qemu_st_trampoline[(MO_SIZE | MO_BSWAP) + 1];
443
444
-static void emit_extend(TCGContext *s, TCGReg r, int op)
445
-{
446
- /* Emit zero extend of 8, 16 or 32 bit data as
447
- * required by the MO_* value op; do nothing for 64 bit.
448
- */
449
- switch (op & MO_SIZE) {
450
- case MO_8:
451
- tcg_out_ext8u(s, r, r);
452
- break;
453
- case MO_16:
454
- tcg_out_ext16u(s, r, r);
455
- break;
456
- case MO_32:
457
- tcg_out_ext32u(s, r, r);
458
- break;
459
- case MO_64:
460
- break;
461
- }
462
-}
463
-
464
static void build_trampolines(TCGContext *s)
465
{
466
static void * const qemu_ld_helpers[] = {
467
@@ -XXX,XX +XXX,XX @@ static void build_trampolines(TCGContext *s)
468
}
469
qemu_st_trampoline[i] = tcg_splitwx_to_rx(s->code_ptr);
470
471
- emit_extend(s, TCG_REG_O2, i);
472
-
473
/* Set the retaddr operand. */
474
tcg_out_mov(s, TCG_TYPE_PTR, TCG_REG_O4, TCG_REG_O7);
475
476
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_ld(TCGContext *s, TCGReg data, TCGReg addr,
477
}
478
479
static void tcg_out_qemu_st(TCGContext *s, TCGReg data, TCGReg addr,
480
- MemOpIdx oi)
481
+ MemOpIdx oi, TCGType data_type)
482
{
483
MemOp memop = get_memop(oi);
484
tcg_insn_unit *label_ptr;
485
@@ -XXX,XX +XXX,XX @@ static void tcg_out_qemu_st(TCGContext *s, TCGReg data, TCGReg addr,
486
/* TLB Miss. */
487
488
tcg_out_mov(s, TCG_TYPE_REG, TCG_REG_O1, addrz);
489
- tcg_out_mov(s, TCG_TYPE_REG, TCG_REG_O2, data);
490
+ tcg_out_movext(s, (memop & MO_SIZE) == MO_64 ? TCG_TYPE_I64 : TCG_TYPE_I32,
491
+ TCG_REG_O2, data_type, memop & MO_SIZE, data);
492
493
func = qemu_st_trampoline[memop & (MO_BSWAP | MO_SIZE)];
494
tcg_debug_assert(func != NULL);
495
@@ -XXX,XX +XXX,XX @@ static void tcg_out_op(TCGContext *s, TCGOpcode opc,
496
tcg_out_qemu_ld(s, a0, a1, a2, true);
497
break;
498
case INDEX_op_qemu_st_i32:
499
+ tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I32);
500
+ break;
501
case INDEX_op_qemu_st_i64:
502
- tcg_out_qemu_st(s, a0, a1, a2);
503
+ tcg_out_qemu_st(s, a0, a1, a2, TCG_TYPE_I64);
504
break;
505
506
case INDEX_op_ld32s_i64:
507
--
508
2.34.1
509
510
diff view generated by jsdifflib
Deleted patch
1
We will want a backend interface for register swapping.
2
This is only properly defined for x86; all others get a
3
stub version that always indicates failure.
4
1
5
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
6
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
7
---
8
tcg/tcg.c | 2 ++
9
tcg/aarch64/tcg-target.c.inc | 5 +++++
10
tcg/arm/tcg-target.c.inc | 5 +++++
11
tcg/i386/tcg-target.c.inc | 8 ++++++++
12
tcg/loongarch64/tcg-target.c.inc | 5 +++++
13
tcg/mips/tcg-target.c.inc | 5 +++++
14
tcg/ppc/tcg-target.c.inc | 5 +++++
15
tcg/riscv/tcg-target.c.inc | 5 +++++
16
tcg/s390x/tcg-target.c.inc | 5 +++++
17
tcg/sparc64/tcg-target.c.inc | 5 +++++
18
tcg/tci/tcg-target.c.inc | 5 +++++
19
11 files changed, 55 insertions(+)
20
21
diff --git a/tcg/tcg.c b/tcg/tcg.c
22
index XXXXXXX..XXXXXXX 100644
23
--- a/tcg/tcg.c
24
+++ b/tcg/tcg.c
25
@@ -XXX,XX +XXX,XX @@ static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg);
26
static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg);
27
static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg ret, TCGReg arg);
28
static void tcg_out_addi_ptr(TCGContext *s, TCGReg, TCGReg, tcg_target_long);
29
+static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
30
+ __attribute__((unused));
31
static void tcg_out_exit_tb(TCGContext *s, uintptr_t arg);
32
static void tcg_out_goto_tb(TCGContext *s, int which);
33
static void tcg_out_op(TCGContext *s, TCGOpcode opc,
34
diff --git a/tcg/aarch64/tcg-target.c.inc b/tcg/aarch64/tcg-target.c.inc
35
index XXXXXXX..XXXXXXX 100644
36
--- a/tcg/aarch64/tcg-target.c.inc
37
+++ b/tcg/aarch64/tcg-target.c.inc
38
@@ -XXX,XX +XXX,XX @@ static void tcg_out_movi(TCGContext *s, TCGType type, TCGReg rd,
39
tcg_out_insn(s, 3305, LDR, 0, rd);
40
}
41
42
+static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
43
+{
44
+ return false;
45
+}
46
+
47
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
48
tcg_target_long imm)
49
{
50
diff --git a/tcg/arm/tcg-target.c.inc b/tcg/arm/tcg-target.c.inc
51
index XXXXXXX..XXXXXXX 100644
52
--- a/tcg/arm/tcg-target.c.inc
53
+++ b/tcg/arm/tcg-target.c.inc
54
@@ -XXX,XX +XXX,XX @@ static void tcg_out_movi(TCGContext *s, TCGType type,
55
tcg_out_movi32(s, COND_AL, ret, arg);
56
}
57
58
+static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
59
+{
60
+ return false;
61
+}
62
+
63
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
64
tcg_target_long imm)
65
{
66
diff --git a/tcg/i386/tcg-target.c.inc b/tcg/i386/tcg-target.c.inc
67
index XXXXXXX..XXXXXXX 100644
68
--- a/tcg/i386/tcg-target.c.inc
69
+++ b/tcg/i386/tcg-target.c.inc
70
@@ -XXX,XX +XXX,XX @@ static bool tcg_target_const_match(int64_t val, TCGType type, int ct)
71
#define OPC_VPTERNLOGQ (0x25 | P_EXT3A | P_DATA16 | P_VEXW | P_EVEX)
72
#define OPC_VZEROUPPER (0x77 | P_EXT)
73
#define OPC_XCHG_ax_r32    (0x90)
74
+#define OPC_XCHG_EvGv (0x87)
75
76
#define OPC_GRP3_Eb (0xf6)
77
#define OPC_GRP3_Ev (0xf7)
78
@@ -XXX,XX +XXX,XX @@ static void tcg_out_movi(TCGContext *s, TCGType type,
79
}
80
}
81
82
+static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
83
+{
84
+ int rexw = type == TCG_TYPE_I32 ? 0 : P_REXW;
85
+ tcg_out_modrm(s, OPC_XCHG_EvGv + rexw, r1, r2);
86
+ return true;
87
+}
88
+
89
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
90
tcg_target_long imm)
91
{
92
diff --git a/tcg/loongarch64/tcg-target.c.inc b/tcg/loongarch64/tcg-target.c.inc
93
index XXXXXXX..XXXXXXX 100644
94
--- a/tcg/loongarch64/tcg-target.c.inc
95
+++ b/tcg/loongarch64/tcg-target.c.inc
96
@@ -XXX,XX +XXX,XX @@ static void tcg_out_addi(TCGContext *s, TCGType type, TCGReg rd,
97
}
98
}
99
100
+static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
101
+{
102
+ return false;
103
+}
104
+
105
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
106
tcg_target_long imm)
107
{
108
diff --git a/tcg/mips/tcg-target.c.inc b/tcg/mips/tcg-target.c.inc
109
index XXXXXXX..XXXXXXX 100644
110
--- a/tcg/mips/tcg-target.c.inc
111
+++ b/tcg/mips/tcg-target.c.inc
112
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
113
tcg_out_ext32s(s, rd, rs);
114
}
115
116
+static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
117
+{
118
+ return false;
119
+}
120
+
121
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
122
tcg_target_long imm)
123
{
124
diff --git a/tcg/ppc/tcg-target.c.inc b/tcg/ppc/tcg-target.c.inc
125
index XXXXXXX..XXXXXXX 100644
126
--- a/tcg/ppc/tcg-target.c.inc
127
+++ b/tcg/ppc/tcg-target.c.inc
128
@@ -XXX,XX +XXX,XX @@ static void tcg_out_movi(TCGContext *s, TCGType type, TCGReg ret,
129
}
130
}
131
132
+static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
133
+{
134
+ return false;
135
+}
136
+
137
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
138
tcg_target_long imm)
139
{
140
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
141
index XXXXXXX..XXXXXXX 100644
142
--- a/tcg/riscv/tcg-target.c.inc
143
+++ b/tcg/riscv/tcg-target.c.inc
144
@@ -XXX,XX +XXX,XX @@ static void tcg_out_movi(TCGContext *s, TCGType type, TCGReg rd,
145
tcg_out_opc_imm(s, OPC_LD, rd, rd, 0);
146
}
147
148
+static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
149
+{
150
+ return false;
151
+}
152
+
153
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
154
tcg_target_long imm)
155
{
156
diff --git a/tcg/s390x/tcg-target.c.inc b/tcg/s390x/tcg-target.c.inc
157
index XXXXXXX..XXXXXXX 100644
158
--- a/tcg/s390x/tcg-target.c.inc
159
+++ b/tcg/s390x/tcg-target.c.inc
160
@@ -XXX,XX +XXX,XX @@ static inline bool tcg_out_sti(TCGContext *s, TCGType type, TCGArg val,
161
return false;
162
}
163
164
+static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
165
+{
166
+ return false;
167
+}
168
+
169
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
170
tcg_target_long imm)
171
{
172
diff --git a/tcg/sparc64/tcg-target.c.inc b/tcg/sparc64/tcg-target.c.inc
173
index XXXXXXX..XXXXXXX 100644
174
--- a/tcg/sparc64/tcg-target.c.inc
175
+++ b/tcg/sparc64/tcg-target.c.inc
176
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
177
tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
178
}
179
180
+static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
181
+{
182
+ return false;
183
+}
184
+
185
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
186
tcg_target_long imm)
187
{
188
diff --git a/tcg/tci/tcg-target.c.inc b/tcg/tci/tcg-target.c.inc
189
index XXXXXXX..XXXXXXX 100644
190
--- a/tcg/tci/tcg-target.c.inc
191
+++ b/tcg/tci/tcg-target.c.inc
192
@@ -XXX,XX +XXX,XX @@ static void tcg_out_extrl_i64_i32(TCGContext *s, TCGReg rd, TCGReg rs)
193
tcg_out_mov(s, TCG_TYPE_I32, rd, rs);
194
}
195
196
+static bool tcg_out_xchg(TCGContext *s, TCGType type, TCGReg r1, TCGReg r2)
197
+{
198
+ return false;
199
+}
200
+
201
static void tcg_out_addi_ptr(TCGContext *s, TCGReg rd, TCGReg rs,
202
tcg_target_long imm)
203
{
204
--
205
2.34.1
206
207
diff view generated by jsdifflib
Deleted patch
1
Reviewed-by: Philippe Mathieu-Daudé <philmd@linaro.org>
2
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
3
---
4
tcg/tcg-ldst.c.inc | 1 +
5
1 file changed, 1 insertion(+)
6
1
7
diff --git a/tcg/tcg-ldst.c.inc b/tcg/tcg-ldst.c.inc
8
index XXXXXXX..XXXXXXX 100644
9
--- a/tcg/tcg-ldst.c.inc
10
+++ b/tcg/tcg-ldst.c.inc
11
@@ -XXX,XX +XXX,XX @@ static inline TCGLabelQemuLdst *new_ldst_label(TCGContext *s)
12
{
13
TCGLabelQemuLdst *l = tcg_malloc(sizeof(*l));
14
15
+ memset(l, 0, sizeof(*l));
16
QSIMPLEQ_INSERT_TAIL(&s->ldst_labels, l, next);
17
18
return l;
19
--
20
2.34.1
21
22
diff view generated by jsdifflib
Deleted patch
1
Since TCG_TYPE_I32 values are kept sign-extended in registers, via "w"
2
instructions, we don't need to extend if the register matches.
3
This is already relied upon by comparisons.
4
1
5
Reviewed-by: Daniel Henrique Barboza <dbarboza@ventanamicro.com>
6
Signed-off-by: Richard Henderson <richard.henderson@linaro.org>
7
---
8
tcg/riscv/tcg-target.c.inc | 4 +++-
9
1 file changed, 3 insertions(+), 1 deletion(-)
10
11
diff --git a/tcg/riscv/tcg-target.c.inc b/tcg/riscv/tcg-target.c.inc
12
index XXXXXXX..XXXXXXX 100644
13
--- a/tcg/riscv/tcg-target.c.inc
14
+++ b/tcg/riscv/tcg-target.c.inc
15
@@ -XXX,XX +XXX,XX @@ static void tcg_out_ext32s(TCGContext *s, TCGReg ret, TCGReg arg)
16
17
static void tcg_out_exts_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg)
18
{
19
- tcg_out_ext32s(s, ret, arg);
20
+ if (ret != arg) {
21
+ tcg_out_ext32s(s, ret, arg);
22
+ }
23
}
24
25
static void tcg_out_extu_i32_i64(TCGContext *s, TCGReg ret, TCGReg arg)
26
--
27
2.34.1
diff view generated by jsdifflib