From nobody Sun Nov 24 14:23:32 2024 Received: from smtp.kernel.org (aws-us-west-2-korg-mail-1.web.codeaurora.org [10.30.226.201]) (using TLSv1.2 with cipher ECDHE-RSA-AES256-GCM-SHA384 (256/256 bits)) (No client certificate requested) by smtp.subspace.kernel.org (Postfix) with ESMTPS id 38A221D9A79 for ; Tue, 5 Nov 2024 14:19:08 +0000 (UTC) Authentication-Results: smtp.subspace.kernel.org; arc=none smtp.client-ip=10.30.226.201 ARC-Seal: i=1; a=rsa-sha256; d=subspace.kernel.org; s=arc-20240116; t=1730816348; cv=none; b=JYTjv+WoKb/MJIBnIJLyNpTbAPJ1Awm4jLWBjCPe9IlAfF8jgrAZ+FjDDxVGKOfuLJKJpwfjQZ27E4w6RsZXkdjcn3Yp1ALjcJgOT6Vm0/7hmXykKyM/X35kneOE9vQAZ+iEE0sQ1aehInX1UlOL5y62tZVDY/S/B2pIQVmCkVk= ARC-Message-Signature: i=1; a=rsa-sha256; d=subspace.kernel.org; s=arc-20240116; t=1730816348; c=relaxed/simple; bh=l4XOID8bnuKps52BWJR7xCc+TANCTqoi5N4lav6Lt78=; h=Message-ID:Date:From:To:Cc:Subject:References:MIME-Version: Content-Type; b=NaxiG3d29x1PmJr+0hfjjxYEpmI1KkEPTnx56x5sRRhMiOK5DdfaJMugFczjUBdWIn96B1SLapsIFHnlpL9K/k8CQKIwv/36D6k7nOqS8hvtp67A9KNvACVszBhRPvSz6Z/gyNaXKePG+KBaD7NyCrzWkkxFkbkTXONjK/JkjxY= ARC-Authentication-Results: i=1; smtp.subspace.kernel.org; arc=none smtp.client-ip=10.30.226.201 Received: by smtp.kernel.org (Postfix) with ESMTPSA id E3DC0C4CEE0; Tue, 5 Nov 2024 14:19:07 +0000 (UTC) Received: from rostedt by gandalf with local (Exim 4.98) (envelope-from ) id 1t8KOj-00000000Wjx-1eaU; Tue, 05 Nov 2024 09:19:09 -0500 Message-ID: <20241105141909.240243748@goodmis.org> User-Agent: quilt/0.68 Date: Tue, 05 Nov 2024 09:18:50 -0500 From: Steven Rostedt To: linux-kernel@vger.kernel.org Cc: Masami Hiramatsu , Mark Rutland , Mathieu Desnoyers , Andrew Morton , Josh Poimboeuf , Jason Baron , Ard Biesheuvel , Alex Gaynor , Wedson Almeida Filho , Boqun Feng , Gary Guo , " =?utf-8?q?Bj=C3=B6rn_Roy_Baron?= " , Benno Lossin , Andreas Hindborg , Arnd Bergmann , Thomas Gleixner , Ingo Molnar , Borislav Petkov , Dave Hansen , "H. Peter Anvin" , Sean Christopherson , Uros Bizjak , Will Deacon , Marc Zyngier , Oliver Upton , Ryan Roberts , Fuad Tabba , Paul Walmsley , Palmer Dabbelt , Albert Ou , Anup Patel , Andrew Jones , Alexandre Ghiti , Conor Dooley , Samuel Holland , Huacai Chen , WANG Xuerui , Bibo Mao , Tiezhu Yang , Tianrui Zhao , Palmer Dabbelt , "Peter Zijlstra (Intel)" , Miguel Ojeda , Catalin Marinas , Alice Ryhl Subject: [for-next][PATCH 4/5] jump_label: adjust inline asm to be consistent References: <20241105141846.641050484@goodmis.org> Precedence: bulk X-Mailing-List: linux-kernel@vger.kernel.org List-Id: List-Subscribe: List-Unsubscribe: MIME-Version: 1.0 Content-Transfer-Encoding: quoted-printable Content-Type: text/plain; charset="utf-8" From: Alice Ryhl To avoid duplication of inline asm between C and Rust, we need to import the inline asm from the relevant `jump_label.h` header into Rust. To make that easier, this patch updates the header files to expose the inline asm via a new ARCH_STATIC_BRANCH_ASM macro. The header files are all updated to define a ARCH_STATIC_BRANCH_ASM that takes the same arguments in a consistent order so that Rust can use the same logic for every architecture. Cc: Masami Hiramatsu Cc: Mathieu Desnoyers Cc: Josh Poimboeuf Cc: Jason Baron Cc: Ard Biesheuvel Cc: Alex Gaynor Cc: Wedson Almeida Filho Cc: Boqun Feng Cc: Gary Guo Cc: " =3D?utf-8?q?Bj=3DC3=3DB6rn_Roy_Baron?=3D " Cc: Benno Lossin Cc: Andreas Hindborg Cc: Arnd Bergmann Cc: Thomas Gleixner Cc: Ingo Molnar Cc: Borislav Petkov Cc: Dave Hansen Cc: "H. Peter Anvin" Cc: Sean Christopherson Cc: Uros Bizjak Cc: Will Deacon Cc: Marc Zyngier Cc: Oliver Upton Cc: Mark Rutland Cc: Ryan Roberts Cc: Fuad Tabba Cc: Paul Walmsley Cc: Palmer Dabbelt Cc: Albert Ou Cc: Anup Patel Cc: Andrew Jones Cc: Alexandre Ghiti Cc: Conor Dooley Cc: Samuel Holland Cc: Huacai Chen Cc: WANG Xuerui Cc: Bibo Mao Cc: Tiezhu Yang Cc: Andrew Morton Cc: Tianrui Zhao Cc: Palmer Dabbelt Link: https://lore.kernel.org/20241030-tracepoint-v12-4-eec7f0f8ad22@google= .com Suggested-by: Peter Zijlstra (Intel) Co-developed-by: Miguel Ojeda Signed-off-by: Miguel Ojeda Acked-by: Peter Zijlstra (Intel) Acked-by: Catalin Marinas Acked-by: Palmer Dabbelt # RISC-V Signed-off-by: Alice Ryhl Signed-off-by: Steven Rostedt (Google) --- arch/arm/include/asm/jump_label.h | 14 ++++--- arch/arm64/include/asm/jump_label.h | 20 ++++++---- arch/loongarch/include/asm/jump_label.h | 16 +++++--- arch/riscv/include/asm/jump_label.h | 50 ++++++++++++++----------- arch/x86/include/asm/jump_label.h | 35 +++++++---------- 5 files changed, 73 insertions(+), 62 deletions(-) diff --git a/arch/arm/include/asm/jump_label.h b/arch/arm/include/asm/jump_= label.h index e4eb54f6cd9f..a35aba7f548c 100644 --- a/arch/arm/include/asm/jump_label.h +++ b/arch/arm/include/asm/jump_label.h @@ -9,13 +9,17 @@ =20 #define JUMP_LABEL_NOP_SIZE 4 =20 +/* This macro is also expanded on the Rust side. */ +#define ARCH_STATIC_BRANCH_ASM(key, label) \ + "1:\n\t" \ + WASM(nop) "\n\t" \ + ".pushsection __jump_table, \"aw\"\n\t" \ + ".word 1b, " label ", " key "\n\t" \ + ".popsection\n\t" \ + static __always_inline bool arch_static_branch(struct static_key *key, boo= l branch) { - asm goto("1:\n\t" - WASM(nop) "\n\t" - ".pushsection __jump_table, \"aw\"\n\t" - ".word 1b, %l[l_yes], %c0\n\t" - ".popsection\n\t" + asm goto(ARCH_STATIC_BRANCH_ASM("%c0", "%l[l_yes]") : : "i" (&((char *)key)[branch]) : : l_yes); =20 return false; diff --git a/arch/arm64/include/asm/jump_label.h b/arch/arm64/include/asm/j= ump_label.h index a0a5bbae7229..424ed421cd97 100644 --- a/arch/arm64/include/asm/jump_label.h +++ b/arch/arm64/include/asm/jump_label.h @@ -19,10 +19,14 @@ #define JUMP_TABLE_ENTRY(key, label) \ ".pushsection __jump_table, \"aw\"\n\t" \ ".align 3\n\t" \ - ".long 1b - ., %l["#label"] - .\n\t" \ - ".quad %c0 - .\n\t" \ - ".popsection\n\t" \ - : : "i"(key) : : label + ".long 1b - ., " label " - .\n\t" \ + ".quad " key " - .\n\t" \ + ".popsection\n\t" + +/* This macro is also expanded on the Rust side. */ +#define ARCH_STATIC_BRANCH_ASM(key, label) \ + "1: nop\n\t" \ + JUMP_TABLE_ENTRY(key, label) =20 static __always_inline bool arch_static_branch(struct static_key * const k= ey, const bool branch) @@ -30,8 +34,8 @@ static __always_inline bool arch_static_branch(struct sta= tic_key * const key, char *k =3D &((char *)key)[branch]; =20 asm goto( - "1: nop \n\t" - JUMP_TABLE_ENTRY(k, l_yes) + ARCH_STATIC_BRANCH_ASM("%c0", "%l[l_yes]") + : : "i"(k) : : l_yes ); =20 return false; @@ -43,9 +47,11 @@ static __always_inline bool arch_static_branch_jump(stru= ct static_key * const ke const bool branch) { char *k =3D &((char *)key)[branch]; + asm goto( "1: b %l[l_yes] \n\t" - JUMP_TABLE_ENTRY(k, l_yes) + JUMP_TABLE_ENTRY("%c0", "%l[l_yes]") + : : "i"(k) : : l_yes ); return false; l_yes: diff --git a/arch/loongarch/include/asm/jump_label.h b/arch/loongarch/inclu= de/asm/jump_label.h index 29acfe3de3fa..8a924bd69d19 100644 --- a/arch/loongarch/include/asm/jump_label.h +++ b/arch/loongarch/include/asm/jump_label.h @@ -13,18 +13,22 @@ =20 #define JUMP_LABEL_NOP_SIZE 4 =20 -#define JUMP_TABLE_ENTRY \ +/* This macro is also expanded on the Rust side. */ +#define JUMP_TABLE_ENTRY(key, label) \ ".pushsection __jump_table, \"aw\" \n\t" \ ".align 3 \n\t" \ - ".long 1b - ., %l[l_yes] - . \n\t" \ - ".quad %0 - . \n\t" \ + ".long 1b - ., " label " - . \n\t" \ + ".quad " key " - . \n\t" \ ".popsection \n\t" =20 +#define ARCH_STATIC_BRANCH_ASM(key, label) \ + "1: nop \n\t" \ + JUMP_TABLE_ENTRY(key, label) + static __always_inline bool arch_static_branch(struct static_key * const k= ey, const bool branch) { asm goto( - "1: nop \n\t" - JUMP_TABLE_ENTRY + ARCH_STATIC_BRANCH_ASM("%0", "%l[l_yes]") : : "i"(&((char *)key)[branch]) : : l_yes); =20 return false; @@ -37,7 +41,7 @@ static __always_inline bool arch_static_branch_jump(struc= t static_key * const ke { asm goto( "1: b %l[l_yes] \n\t" - JUMP_TABLE_ENTRY + JUMP_TABLE_ENTRY("%0", "%l[l_yes]") : : "i"(&((char *)key)[branch]) : : l_yes); =20 return false; diff --git a/arch/riscv/include/asm/jump_label.h b/arch/riscv/include/asm/j= ump_label.h index 1c768d02bd0c..87a71cc6d146 100644 --- a/arch/riscv/include/asm/jump_label.h +++ b/arch/riscv/include/asm/jump_label.h @@ -16,21 +16,28 @@ =20 #define JUMP_LABEL_NOP_SIZE 4 =20 +#define JUMP_TABLE_ENTRY(key, label) \ + ".pushsection __jump_table, \"aw\" \n\t" \ + ".align " RISCV_LGPTR " \n\t" \ + ".long 1b - ., " label " - . \n\t" \ + "" RISCV_PTR " " key " - . \n\t" \ + ".popsection \n\t" + +/* This macro is also expanded on the Rust side. */ +#define ARCH_STATIC_BRANCH_ASM(key, label) \ + " .align 2 \n\t" \ + " .option push \n\t" \ + " .option norelax \n\t" \ + " .option norvc \n\t" \ + "1: nop \n\t" \ + " .option pop \n\t" \ + JUMP_TABLE_ENTRY(key, label) + static __always_inline bool arch_static_branch(struct static_key * const k= ey, const bool branch) { asm goto( - " .align 2 \n\t" - " .option push \n\t" - " .option norelax \n\t" - " .option norvc \n\t" - "1: nop \n\t" - " .option pop \n\t" - " .pushsection __jump_table, \"aw\" \n\t" - " .align " RISCV_LGPTR " \n\t" - " .long 1b - ., %l[label] - . \n\t" - " " RISCV_PTR " %0 - . \n\t" - " .popsection \n\t" + ARCH_STATIC_BRANCH_ASM("%0", "%l[label]") : : "i"(&((char *)key)[branch]) : : label); =20 return false; @@ -38,21 +45,20 @@ static __always_inline bool arch_static_branch(struct s= tatic_key * const key, return true; } =20 +#define ARCH_STATIC_BRANCH_JUMP_ASM(key, label) \ + " .align 2 \n\t" \ + " .option push \n\t" \ + " .option norelax \n\t" \ + " .option norvc \n\t" \ + "1: j " label " \n\t" \ + " .option pop \n\t" \ + JUMP_TABLE_ENTRY(key, label) + static __always_inline bool arch_static_branch_jump(struct static_key * co= nst key, const bool branch) { asm goto( - " .align 2 \n\t" - " .option push \n\t" - " .option norelax \n\t" - " .option norvc \n\t" - "1: j %l[label] \n\t" - " .option pop \n\t" - " .pushsection __jump_table, \"aw\" \n\t" - " .align " RISCV_LGPTR " \n\t" - " .long 1b - ., %l[label] - . \n\t" - " " RISCV_PTR " %0 - . \n\t" - " .popsection \n\t" + ARCH_STATIC_BRANCH_JUMP_ASM("%0", "%l[label]") : : "i"(&((char *)key)[branch]) : : label); =20 return false; diff --git a/arch/x86/include/asm/jump_label.h b/arch/x86/include/asm/jump_= label.h index cbbef32517f0..3f1c1d6c0da1 100644 --- a/arch/x86/include/asm/jump_label.h +++ b/arch/x86/include/asm/jump_label.h @@ -12,35 +12,28 @@ #include #include =20 -#define JUMP_TABLE_ENTRY \ +#define JUMP_TABLE_ENTRY(key, label) \ ".pushsection __jump_table, \"aw\" \n\t" \ _ASM_ALIGN "\n\t" \ ".long 1b - . \n\t" \ - ".long %l[l_yes] - . \n\t" \ - _ASM_PTR "%c0 + %c1 - .\n\t" \ + ".long " label " - . \n\t" \ + _ASM_PTR " " key " - . \n\t" \ ".popsection \n\t" =20 +/* This macro is also expanded on the Rust side. */ #ifdef CONFIG_HAVE_JUMP_LABEL_HACK - -static __always_inline bool arch_static_branch(struct static_key *key, boo= l branch) -{ - asm goto("1:" - "jmp %l[l_yes] # objtool NOPs this \n\t" - JUMP_TABLE_ENTRY - : : "i" (key), "i" (2 | branch) : : l_yes); - - return false; -l_yes: - return true; -} - +#define ARCH_STATIC_BRANCH_ASM(key, label) \ + "1: jmp " label " # objtool NOPs this \n\t" \ + JUMP_TABLE_ENTRY(key " + 2", label) #else /* !CONFIG_HAVE_JUMP_LABEL_HACK */ +#define ARCH_STATIC_BRANCH_ASM(key, label) \ + "1: .byte " __stringify(BYTES_NOP5) "\n\t" \ + JUMP_TABLE_ENTRY(key, label) +#endif /* CONFIG_HAVE_JUMP_LABEL_HACK */ =20 static __always_inline bool arch_static_branch(struct static_key * const k= ey, const bool branch) { - asm goto("1:" - ".byte " __stringify(BYTES_NOP5) "\n\t" - JUMP_TABLE_ENTRY + asm goto(ARCH_STATIC_BRANCH_ASM("%c0 + %c1", "%l[l_yes]") : : "i" (key), "i" (branch) : : l_yes); =20 return false; @@ -48,13 +41,11 @@ static __always_inline bool arch_static_branch(struct s= tatic_key * const key, co return true; } =20 -#endif /* CONFIG_HAVE_JUMP_LABEL_HACK */ - static __always_inline bool arch_static_branch_jump(struct static_key * co= nst key, const bool branch) { asm goto("1:" "jmp %l[l_yes]\n\t" - JUMP_TABLE_ENTRY + JUMP_TABLE_ENTRY("%c0 + %c1", "%l[l_yes]") : : "i" (key), "i" (branch) : : l_yes); =20 return false; --=20 2.45.2