Previously, the TBI bit was used to mediate whether tag checks happened.
With MTE4, if the MTX bits are enabled, then tag checking happens even
if TBI is disabled. See AccessIsTagChecked.
Signed-off-by: Gabriel Brookman <brookmangabriel@gmail.com>
---
target/arm/helper.c | 10 ++++++++++
target/arm/internals.h | 10 +++++++++-
target/arm/tcg/helper-a64.c | 9 +++++----
target/arm/tcg/hflags.c | 9 +++++----
target/arm/tcg/mte_helper.c | 9 ++++++---
5 files changed, 35 insertions(+), 12 deletions(-)
diff --git a/target/arm/helper.c b/target/arm/helper.c
index 987539524a..56858367fd 100644
--- a/target/arm/helper.c
+++ b/target/arm/helper.c
@@ -9613,6 +9613,16 @@ uint64_t arm_sctlr(CPUARMState *env, int el)
return env->cp15.sctlr_el[el];
}
+int aa64_va_parameter_mtx(uint64_t tcr, ARMMMUIdx mmu_idx)
+{
+ if (regime_has_2_ranges(mmu_idx)) {
+ return extract64(tcr, 60, 2);
+ } else {
+ /* Replicate the single MTX bit so we always have 2 bits. */
+ return extract64(tcr, 33, 1) * 3;
+ }
+}
+
int aa64_va_parameter_tbi(uint64_t tcr, ARMMMUIdx mmu_idx)
{
if (regime_has_2_ranges(mmu_idx)) {
diff --git a/target/arm/internals.h b/target/arm/internals.h
index 8ec2750847..a45119caa2 100644
--- a/target/arm/internals.h
+++ b/target/arm/internals.h
@@ -1411,6 +1411,7 @@ ARMVAParameters aa64_va_parameters(CPUARMState *env, uint64_t va,
ARMMMUIdx mmu_idx, bool data,
bool el1_is_aa32);
+int aa64_va_parameter_mtx(uint64_t tcr, ARMMMUIdx mmu_idx);
int aa64_va_parameter_tbi(uint64_t tcr, ARMMMUIdx mmu_idx);
int aa64_va_parameter_tbid(uint64_t tcr, ARMMMUIdx mmu_idx);
int aa64_va_parameter_tcma(uint64_t tcr, ARMMMUIdx mmu_idx);
@@ -1546,7 +1547,8 @@ FIELD(MTEDESC, TBI, 4, 2)
FIELD(MTEDESC, TCMA, 6, 2)
FIELD(MTEDESC, WRITE, 8, 1)
FIELD(MTEDESC, ALIGN, 9, 3)
-FIELD(MTEDESC, SIZEM1, 12, 32 - 12) /* size - 1 */
+FIELD(MTEDESC, MTX, 12, 2)
+FIELD(MTEDESC, SIZEM1, 14, 32 - 14) /* size - 1 */
bool mte_probe(CPUARMState *env, uint32_t desc, uint64_t ptr);
uint64_t mte_check(CPUARMState *env, uint32_t desc, uint64_t ptr, uintptr_t ra);
@@ -1622,6 +1624,12 @@ static inline bool tbi_check(uint32_t desc, int bit55)
return (desc >> (R_MTEDESC_TBI_SHIFT + bit55)) & 1;
}
+/* Return true if mtx bits mean that the access is canonically checked. */
+static inline bool mtx_check(uint32_t desc, int bit55)
+{
+ return (desc >> (R_MTEDESC_MTX_SHIFT + bit55)) & 1;
+}
+
/* Return true if tcma bits mean that the access is unchecked. */
static inline bool tcma_check(uint32_t desc, int bit55, int ptr_tag)
{
diff --git a/target/arm/tcg/helper-a64.c b/target/arm/tcg/helper-a64.c
index 2dec587d38..5f739d999c 100644
--- a/target/arm/tcg/helper-a64.c
+++ b/target/arm/tcg/helper-a64.c
@@ -1054,7 +1054,7 @@ static int mops_sizereg(uint32_t syndrome)
}
/*
- * Return true if TCMA and TBI bits mean we need to do MTE checks.
+ * Return true if the TCMA, TBI, and MTX bits mean we need to do MTE checks.
* We only need to do this once per MOPS insn, not for every page.
*/
static bool mte_checks_needed(uint64_t ptr, uint32_t desc)
@@ -1062,12 +1062,13 @@ static bool mte_checks_needed(uint64_t ptr, uint32_t desc)
int bit55 = extract64(ptr, 55, 1);
/*
- * Note that tbi_check() returns true for "access checked" but
- * tcma_check() returns true for "access unchecked".
+ * Note that tbi_check() and mtx_check() return true for "access checked",
+ * but tcma_check() returns true for "access unchecked".
*/
- if (!tbi_check(desc, bit55)) {
+ if (!tbi_check(desc, bit55) && !mtx_check(desc, bit55)) {
return false;
}
+
return !tcma_check(desc, bit55, allocation_tag_from_addr(ptr));
}
diff --git a/target/arm/tcg/hflags.c b/target/arm/tcg/hflags.c
index 75c55b1a6d..e753124c4c 100644
--- a/target/arm/tcg/hflags.c
+++ b/target/arm/tcg/hflags.c
@@ -245,13 +245,14 @@ static CPUARMTBFlags rebuild_hflags_a64(CPUARMState *env, int el, int fp_el,
uint64_t tcr = regime_tcr(env, mmu_idx);
uint64_t hcr = arm_hcr_el2_eff(env);
uint64_t sctlr;
- int tbii, tbid;
+ int tbii, tbid, mtx;
DP_TBFLAG_ANY(flags, AARCH64_STATE, 1);
/* Get control bits for tagged addresses. */
tbid = aa64_va_parameter_tbi(tcr, mmu_idx);
tbii = tbid & ~aa64_va_parameter_tbid(tcr, mmu_idx);
+ mtx = aa64_va_parameter_mtx(tcr, mmu_idx);
DP_TBFLAG_A64(flags, TBII, tbii);
DP_TBFLAG_A64(flags, TBID, tbid);
@@ -403,14 +404,14 @@ static CPUARMTBFlags rebuild_hflags_a64(CPUARMState *env, int el, int fp_el,
/*
* Set MTE_ACTIVE if any access may be Checked, and leave clear
* if all accesses must be Unchecked:
- * 1) If no TBI, then there are no tags in the address to check,
+ * 1) If TBI and MTX are both unset, accesses are Unchecked.
* 2) If Tag Check Override, then all accesses are Unchecked,
* 3) If Tag Check Fail == 0, then Checked access have no effect,
* 4) If no Allocation Tag Access, then all accesses are Unchecked.
*/
if (allocation_tag_access_enabled(env, el, sctlr)) {
DP_TBFLAG_A64(flags, ATA, 1);
- if (tbid
+ if ((tbid || mtx)
&& !(env->pstate & PSTATE_TCO)
&& (sctlr & (el == 0 ? SCTLR_TCF0 : SCTLR_TCF))) {
DP_TBFLAG_A64(flags, MTE_ACTIVE, 1);
@@ -436,7 +437,7 @@ static CPUARMTBFlags rebuild_hflags_a64(CPUARMState *env, int el, int fp_el,
}
/* And again for unprivileged accesses, if required. */
if (EX_TBFLAG_A64(flags, UNPRIV)
- && tbid
+ && (tbid || mtx)
&& !(env->pstate & PSTATE_TCO)
&& (sctlr & SCTLR_TCF0)
&& allocation_tag_access_enabled(env, 0, sctlr)) {
diff --git a/target/arm/tcg/mte_helper.c b/target/arm/tcg/mte_helper.c
index 4deec80208..1484087a19 100644
--- a/target/arm/tcg/mte_helper.c
+++ b/target/arm/tcg/mte_helper.c
@@ -819,8 +819,11 @@ static int mte_probe_int(CPUARMState *env, uint32_t desc, uint64_t ptr,
bit55 = extract64(ptr, 55, 1);
*fault = ptr;
- /* If TBI is disabled, the access is unchecked, and ptr is not dirty. */
- if (unlikely(!tbi_check(desc, bit55))) {
+ /*
+ * If TBI and MTX are disabled, the access is unchecked, and ptr is not
+ * dirty.
+ */
+ if (unlikely(!tbi_check(desc, bit55) && !mtx_check(desc, bit55))) {
return -1;
}
@@ -961,7 +964,7 @@ uint64_t HELPER(mte_check_zva)(CPUARMState *env, uint32_t desc, uint64_t ptr)
bit55 = extract64(ptr, 55, 1);
/* If TBI is disabled, the access is unchecked, and ptr is not dirty. */
- if (unlikely(!tbi_check(desc, bit55))) {
+ if (unlikely(!tbi_check(desc, bit55) && !mtx_check(desc, bit55))) {
return ptr;
}
--
2.52.0