diff options
Diffstat (limited to 'arch/arm64/include/asm/assembler.h')
| -rw-r--r-- | arch/arm64/include/asm/assembler.h | 20 | 
1 files changed, 10 insertions, 10 deletions
diff --git a/arch/arm64/include/asm/assembler.h b/arch/arm64/include/asm/assembler.h index 5846145be523..e5957a53be39 100644 --- a/arch/arm64/include/asm/assembler.h +++ b/arch/arm64/include/asm/assembler.h @@ -293,7 +293,7 @@ alternative_endif  alternative_if_not ARM64_KVM_PROTECTED_MODE  	ASM_BUG()  alternative_else_nop_endif -alternative_cb kvm_compute_final_ctr_el0 +alternative_cb ARM64_ALWAYS_SYSTEM, kvm_compute_final_ctr_el0  	movz	\reg, #0  	movk	\reg, #0, lsl #16  	movk	\reg, #0, lsl #32 @@ -384,8 +384,8 @@ alternative_cb_end  	.macro	tcr_compute_pa_size, tcr, pos, tmp0, tmp1  	mrs	\tmp0, ID_AA64MMFR0_EL1  	// Narrow PARange to fit the PS field in TCR_ELx -	ubfx	\tmp0, \tmp0, #ID_AA64MMFR0_PARANGE_SHIFT, #3 -	mov	\tmp1, #ID_AA64MMFR0_PARANGE_MAX +	ubfx	\tmp0, \tmp0, #ID_AA64MMFR0_EL1_PARANGE_SHIFT, #3 +	mov	\tmp1, #ID_AA64MMFR0_EL1_PARANGE_MAX  	cmp	\tmp0, \tmp1  	csel	\tmp0, \tmp1, \tmp0, hi  	bfi	\tcr, \tmp0, \pos, #3 @@ -512,7 +512,7 @@ alternative_endif   */  	.macro	reset_pmuserenr_el0, tmpreg  	mrs	\tmpreg, id_aa64dfr0_el1 -	sbfx	\tmpreg, \tmpreg, #ID_AA64DFR0_PMUVER_SHIFT, #4 +	sbfx	\tmpreg, \tmpreg, #ID_AA64DFR0_EL1_PMUVer_SHIFT, #4  	cmp	\tmpreg, #1			// Skip if no PMU present  	b.lt	9000f  	msr	pmuserenr_el0, xzr		// Disable PMU access from EL0 @@ -524,7 +524,7 @@ alternative_endif   */  	.macro	reset_amuserenr_el0, tmpreg  	mrs	\tmpreg, id_aa64pfr0_el1	// Check ID_AA64PFR0_EL1 -	ubfx	\tmpreg, \tmpreg, #ID_AA64PFR0_AMU_SHIFT, #4 +	ubfx	\tmpreg, \tmpreg, #ID_AA64PFR0_EL1_AMU_SHIFT, #4  	cbz	\tmpreg, .Lskip_\@		// Skip if no AMU present  	msr_s	SYS_AMUSERENR_EL0, xzr		// Disable AMU access from EL0  .Lskip_\@: @@ -612,7 +612,7 @@ alternative_endif  	.macro	offset_ttbr1, ttbr, tmp  #ifdef CONFIG_ARM64_VA_BITS_52  	mrs_s	\tmp, SYS_ID_AA64MMFR2_EL1 -	and	\tmp, \tmp, #(0xf << ID_AA64MMFR2_LVA_SHIFT) +	and	\tmp, \tmp, #(0xf << ID_AA64MMFR2_EL1_VARange_SHIFT)  	cbnz	\tmp, .Lskipoffs_\@  	orr	\ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET  .Lskipoffs_\@ : @@ -877,7 +877,7 @@ alternative_endif  	.macro __mitigate_spectre_bhb_loop      tmp  #ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY -alternative_cb  spectre_bhb_patch_loop_iter +alternative_cb ARM64_ALWAYS_SYSTEM, spectre_bhb_patch_loop_iter  	mov	\tmp, #32		// Patched to correct the immediate  alternative_cb_end  .Lspectre_bhb_loop\@: @@ -890,7 +890,7 @@ alternative_cb_end  	.macro mitigate_spectre_bhb_loop	tmp  #ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY -alternative_cb	spectre_bhb_patch_loop_mitigation_enable +alternative_cb ARM64_ALWAYS_SYSTEM, spectre_bhb_patch_loop_mitigation_enable  	b	.L_spectre_bhb_loop_done\@	// Patched to NOP  alternative_cb_end  	__mitigate_spectre_bhb_loop	\tmp @@ -904,7 +904,7 @@ alternative_cb_end  	stp	x0, x1, [sp, #-16]!  	stp	x2, x3, [sp, #-16]!  	mov	w0, #ARM_SMCCC_ARCH_WORKAROUND_3 -alternative_cb	smccc_patch_fw_mitigation_conduit +alternative_cb ARM64_ALWAYS_SYSTEM, smccc_patch_fw_mitigation_conduit  	nop					// Patched to SMC/HVC #0  alternative_cb_end  	ldp	x2, x3, [sp], #16 @@ -914,7 +914,7 @@ alternative_cb_end  	.macro mitigate_spectre_bhb_clear_insn  #ifdef CONFIG_MITIGATE_SPECTRE_BRANCH_HISTORY -alternative_cb	spectre_bhb_patch_clearbhb +alternative_cb ARM64_ALWAYS_SYSTEM, spectre_bhb_patch_clearbhb  	/* Patched to NOP when not supported */  	clearbhb  	isb  |