From c0be8f18a3bfcfd369eba21337e6c89a4bb8b0e8 Mon Sep 17 00:00:00 2001 From: Ard Biesheuvel Date: Fri, 24 Jun 2022 17:06:46 +0200 Subject: arm64: head: factor out TTBR1 assignment into a macro Create a macro load_ttbr1 to avoid having to repeat the same instruction sequence 3 times in a subsequent patch. No functional change intended. Signed-off-by: Ard Biesheuvel Link: https://lore.kernel.org/r/20220624150651.1358849-17-ardb@kernel.org Signed-off-by: Will Deacon --- arch/arm64/include/asm/assembler.h | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) (limited to 'arch/arm64/include/asm/assembler.h') diff --git a/arch/arm64/include/asm/assembler.h b/arch/arm64/include/asm/assembler.h index 9468f45c07a6..b2584709c332 100644 --- a/arch/arm64/include/asm/assembler.h +++ b/arch/arm64/include/asm/assembler.h @@ -479,6 +479,18 @@ alternative_endif _cond_extable .Licache_op\@, \fixup .endm +/* + * load_ttbr1 - install @pgtbl as a TTBR1 page table + * pgtbl preserved + * tmp1/tmp2 clobbered, either may overlap with pgtbl + */ + .macro load_ttbr1, pgtbl, tmp1, tmp2 + phys_to_ttbr \tmp1, \pgtbl + offset_ttbr1 \tmp1, \tmp2 + msr ttbr1_el1, \tmp1 + isb + .endm + /* * To prevent the possibility of old and new partial table walks being visible * in the tlb, switch the ttbr to a zero page when we invalidate the old @@ -492,10 +504,7 @@ alternative_endif isb tlbi vmalle1 dsb nsh - phys_to_ttbr \tmp, \page_table - offset_ttbr1 \tmp, \tmp2 - msr ttbr1_el1, \tmp - isb + load_ttbr1 \page_table, \tmp, \tmp2 .endm /* -- cgit