aboutsummaryrefslogtreecommitdiff
path: root/arch/arm64/kvm/emulate-nested.c
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm64/kvm/emulate-nested.c')
-rw-r--r--arch/arm64/kvm/emulate-nested.c289
1 files changed, 175 insertions, 114 deletions
diff --git a/arch/arm64/kvm/emulate-nested.c b/arch/arm64/kvm/emulate-nested.c
index 0ab090553354..1ffbfd1c3cf2 100644
--- a/arch/arm64/kvm/emulate-nested.c
+++ b/arch/arm64/kvm/emulate-nested.c
@@ -16,9 +16,13 @@
enum trap_behaviour {
BEHAVE_HANDLE_LOCALLY = 0,
+
BEHAVE_FORWARD_READ = BIT(0),
BEHAVE_FORWARD_WRITE = BIT(1),
- BEHAVE_FORWARD_ANY = BEHAVE_FORWARD_READ | BEHAVE_FORWARD_WRITE,
+ BEHAVE_FORWARD_RW = BEHAVE_FORWARD_READ | BEHAVE_FORWARD_WRITE,
+
+ /* Traps that take effect in Host EL0, this is rare! */
+ BEHAVE_FORWARD_IN_HOST_EL0 = BIT(2),
};
struct trap_bits {
@@ -105,6 +109,7 @@ enum cgt_group_id {
CGT_HCR_TPU_TOCU,
CGT_HCR_NV1_nNV2_ENSCXT,
CGT_MDCR_TPM_TPMCR,
+ CGT_MDCR_TPM_HPMN,
CGT_MDCR_TDE_TDA,
CGT_MDCR_TDE_TDOSA,
CGT_MDCR_TDE_TDRA,
@@ -121,6 +126,7 @@ enum cgt_group_id {
CGT_CNTHCTL_EL1PTEN,
CGT_CPTR_TTA,
+ CGT_MDCR_HPMN,
/* Must be last */
__NR_CGT_GROUP_IDS__
@@ -137,7 +143,7 @@ static const struct trap_bits coarse_trap_bits[] = {
.index = HCR_EL2,
.value = HCR_TID2,
.mask = HCR_TID2,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TID3] = {
.index = HCR_EL2,
@@ -161,37 +167,37 @@ static const struct trap_bits coarse_trap_bits[] = {
.index = HCR_EL2,
.value = HCR_TIDCP,
.mask = HCR_TIDCP,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TACR] = {
.index = HCR_EL2,
.value = HCR_TACR,
.mask = HCR_TACR,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TSW] = {
.index = HCR_EL2,
.value = HCR_TSW,
.mask = HCR_TSW,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TPC] = { /* Also called TCPC when FEAT_DPB is implemented */
.index = HCR_EL2,
.value = HCR_TPC,
.mask = HCR_TPC,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TPU] = {
.index = HCR_EL2,
.value = HCR_TPU,
.mask = HCR_TPU,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TTLB] = {
.index = HCR_EL2,
.value = HCR_TTLB,
.mask = HCR_TTLB,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TVM] = {
.index = HCR_EL2,
@@ -203,7 +209,7 @@ static const struct trap_bits coarse_trap_bits[] = {
.index = HCR_EL2,
.value = HCR_TDZ,
.mask = HCR_TDZ,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TRVM] = {
.index = HCR_EL2,
@@ -215,199 +221,201 @@ static const struct trap_bits coarse_trap_bits[] = {
.index = HCR_EL2,
.value = HCR_TLOR,
.mask = HCR_TLOR,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TERR] = {
.index = HCR_EL2,
.value = HCR_TERR,
.mask = HCR_TERR,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_APK] = {
.index = HCR_EL2,
.value = 0,
.mask = HCR_APK,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_NV] = {
.index = HCR_EL2,
.value = HCR_NV,
.mask = HCR_NV,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_NV_nNV2] = {
.index = HCR_EL2,
.value = HCR_NV,
.mask = HCR_NV | HCR_NV2,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_NV1_nNV2] = {
.index = HCR_EL2,
.value = HCR_NV | HCR_NV1,
.mask = HCR_NV | HCR_NV1 | HCR_NV2,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_AT] = {
.index = HCR_EL2,
.value = HCR_AT,
.mask = HCR_AT,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_nFIEN] = {
.index = HCR_EL2,
.value = 0,
.mask = HCR_FIEN,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TID4] = {
.index = HCR_EL2,
.value = HCR_TID4,
.mask = HCR_TID4,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TICAB] = {
.index = HCR_EL2,
.value = HCR_TICAB,
.mask = HCR_TICAB,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TOCU] = {
.index = HCR_EL2,
.value = HCR_TOCU,
.mask = HCR_TOCU,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_ENSCXT] = {
.index = HCR_EL2,
.value = 0,
.mask = HCR_ENSCXT,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TTLBIS] = {
.index = HCR_EL2,
.value = HCR_TTLBIS,
.mask = HCR_TTLBIS,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCR_TTLBOS] = {
.index = HCR_EL2,
.value = HCR_TTLBOS,
.mask = HCR_TTLBOS,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_MDCR_TPMCR] = {
.index = MDCR_EL2,
.value = MDCR_EL2_TPMCR,
.mask = MDCR_EL2_TPMCR,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW |
+ BEHAVE_FORWARD_IN_HOST_EL0,
},
[CGT_MDCR_TPM] = {
.index = MDCR_EL2,
.value = MDCR_EL2_TPM,
.mask = MDCR_EL2_TPM,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW |
+ BEHAVE_FORWARD_IN_HOST_EL0,
},
[CGT_MDCR_TDE] = {
.index = MDCR_EL2,
.value = MDCR_EL2_TDE,
.mask = MDCR_EL2_TDE,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_MDCR_TDA] = {
.index = MDCR_EL2,
.value = MDCR_EL2_TDA,
.mask = MDCR_EL2_TDA,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_MDCR_TDOSA] = {
.index = MDCR_EL2,
.value = MDCR_EL2_TDOSA,
.mask = MDCR_EL2_TDOSA,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_MDCR_TDRA] = {
.index = MDCR_EL2,
.value = MDCR_EL2_TDRA,
.mask = MDCR_EL2_TDRA,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_MDCR_E2PB] = {
.index = MDCR_EL2,
.value = 0,
.mask = BIT(MDCR_EL2_E2PB_SHIFT),
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_MDCR_TPMS] = {
.index = MDCR_EL2,
.value = MDCR_EL2_TPMS,
.mask = MDCR_EL2_TPMS,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_MDCR_TTRF] = {
.index = MDCR_EL2,
.value = MDCR_EL2_TTRF,
.mask = MDCR_EL2_TTRF,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_MDCR_E2TB] = {
.index = MDCR_EL2,
.value = 0,
.mask = BIT(MDCR_EL2_E2TB_SHIFT),
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_MDCR_TDCC] = {
.index = MDCR_EL2,
.value = MDCR_EL2_TDCC,
.mask = MDCR_EL2_TDCC,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_CPTR_TAM] = {
.index = CPTR_EL2,
.value = CPTR_EL2_TAM,
.mask = CPTR_EL2_TAM,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_CPTR_TCPAC] = {
.index = CPTR_EL2,
.value = CPTR_EL2_TCPAC,
.mask = CPTR_EL2_TCPAC,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCRX_EnFPM] = {
.index = HCRX_EL2,
.value = 0,
.mask = HCRX_EL2_EnFPM,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_HCRX_TCR2En] = {
.index = HCRX_EL2,
.value = 0,
.mask = HCRX_EL2_TCR2En,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_ICH_HCR_TC] = {
.index = ICH_HCR_EL2,
.value = ICH_HCR_TC,
.mask = ICH_HCR_TC,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_ICH_HCR_TALL0] = {
.index = ICH_HCR_EL2,
.value = ICH_HCR_TALL0,
.mask = ICH_HCR_TALL0,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_ICH_HCR_TALL1] = {
.index = ICH_HCR_EL2,
.value = ICH_HCR_TALL1,
.mask = ICH_HCR_TALL1,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
[CGT_ICH_HCR_TDIR] = {
.index = ICH_HCR_EL2,
.value = ICH_HCR_TDIR,
.mask = ICH_HCR_TDIR,
- .behaviour = BEHAVE_FORWARD_ANY,
+ .behaviour = BEHAVE_FORWARD_RW,
},
};
@@ -428,6 +436,7 @@ static const enum cgt_group_id *coarse_control_combo[] = {
MCB(CGT_HCR_TPU_TOCU, CGT_HCR_TPU, CGT_HCR_TOCU),
MCB(CGT_HCR_NV1_nNV2_ENSCXT, CGT_HCR_NV1_nNV2, CGT_HCR_ENSCXT),
MCB(CGT_MDCR_TPM_TPMCR, CGT_MDCR_TPM, CGT_MDCR_TPMCR),
+ MCB(CGT_MDCR_TPM_HPMN, CGT_MDCR_TPM, CGT_MDCR_HPMN),
MCB(CGT_MDCR_TDE_TDA, CGT_MDCR_TDE, CGT_MDCR_TDA),
MCB(CGT_MDCR_TDE_TDOSA, CGT_MDCR_TDE, CGT_MDCR_TDOSA),
MCB(CGT_MDCR_TDE_TDRA, CGT_MDCR_TDE, CGT_MDCR_TDRA),
@@ -467,7 +476,7 @@ static enum trap_behaviour check_cnthctl_el1pcten(struct kvm_vcpu *vcpu)
if (get_sanitized_cnthctl(vcpu) & (CNTHCTL_EL1PCTEN << 10))
return BEHAVE_HANDLE_LOCALLY;
- return BEHAVE_FORWARD_ANY;
+ return BEHAVE_FORWARD_RW;
}
static enum trap_behaviour check_cnthctl_el1pten(struct kvm_vcpu *vcpu)
@@ -475,7 +484,7 @@ static enum trap_behaviour check_cnthctl_el1pten(struct kvm_vcpu *vcpu)
if (get_sanitized_cnthctl(vcpu) & (CNTHCTL_EL1PCEN << 10))
return BEHAVE_HANDLE_LOCALLY;
- return BEHAVE_FORWARD_ANY;
+ return BEHAVE_FORWARD_RW;
}
static enum trap_behaviour check_cptr_tta(struct kvm_vcpu *vcpu)
@@ -486,7 +495,35 @@ static enum trap_behaviour check_cptr_tta(struct kvm_vcpu *vcpu)
val = translate_cptr_el2_to_cpacr_el1(val);
if (val & CPACR_ELx_TTA)
- return BEHAVE_FORWARD_ANY;
+ return BEHAVE_FORWARD_RW;
+
+ return BEHAVE_HANDLE_LOCALLY;
+}
+
+static enum trap_behaviour check_mdcr_hpmn(struct kvm_vcpu *vcpu)
+{
+ u32 sysreg = esr_sys64_to_sysreg(kvm_vcpu_get_esr(vcpu));
+ unsigned int idx;
+
+
+ switch (sysreg) {
+ case SYS_PMEVTYPERn_EL0(0) ... SYS_PMEVTYPERn_EL0(30):
+ case SYS_PMEVCNTRn_EL0(0) ... SYS_PMEVCNTRn_EL0(30):
+ idx = (sys_reg_CRm(sysreg) & 0x3) << 3 | sys_reg_Op2(sysreg);
+ break;
+ case SYS_PMXEVTYPER_EL0:
+ case SYS_PMXEVCNTR_EL0:
+ idx = SYS_FIELD_GET(PMSELR_EL0, SEL,
+ __vcpu_sys_reg(vcpu, PMSELR_EL0));
+ break;
+ default:
+ /* Someone used this trap helper for something else... */
+ KVM_BUG_ON(1, vcpu->kvm);
+ return BEHAVE_HANDLE_LOCALLY;
+ }
+
+ if (kvm_pmu_counter_is_hyp(vcpu, idx))
+ return BEHAVE_FORWARD_RW | BEHAVE_FORWARD_IN_HOST_EL0;
return BEHAVE_HANDLE_LOCALLY;
}
@@ -498,6 +535,7 @@ static const complex_condition_check ccc[] = {
CCC(CGT_CNTHCTL_EL1PCTEN, check_cnthctl_el1pcten),
CCC(CGT_CNTHCTL_EL1PTEN, check_cnthctl_el1pten),
CCC(CGT_CPTR_TTA, check_cptr_tta),
+ CCC(CGT_MDCR_HPMN, check_mdcr_hpmn),
};
/*
@@ -916,77 +954,77 @@ static const struct encoding_to_trap_config encoding_to_cgt[] __initconst = {
SR_TRAP(SYS_PMOVSCLR_EL0, CGT_MDCR_TPM),
SR_TRAP(SYS_PMCEID0_EL0, CGT_MDCR_TPM),
SR_TRAP(SYS_PMCEID1_EL0, CGT_MDCR_TPM),
- SR_TRAP(SYS_PMXEVTYPER_EL0, CGT_MDCR_TPM),
+ SR_TRAP(SYS_PMXEVTYPER_EL0, CGT_MDCR_TPM_HPMN),
SR_TRAP(SYS_PMSWINC_EL0, CGT_MDCR_TPM),
SR_TRAP(SYS_PMSELR_EL0, CGT_MDCR_TPM),
- SR_TRAP(SYS_PMXEVCNTR_EL0, CGT_MDCR_TPM),
+ SR_TRAP(SYS_PMXEVCNTR_EL0, CGT_MDCR_TPM_HPMN),
SR_TRAP(SYS_PMCCNTR_EL0, CGT_MDCR_TPM),
SR_TRAP(SYS_PMUSERENR_EL0, CGT_MDCR_TPM),
SR_TRAP(SYS_PMINTENSET_EL1, CGT_MDCR_TPM),
SR_TRAP(SYS_PMINTENCLR_EL1, CGT_MDCR_TPM),
SR_TRAP(SYS_PMMIR_EL1, CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(0), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(1), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(2), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(3), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(4), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(5), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(6), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(7), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(8), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(9), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(10), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(11), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(12), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(13), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(14), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(15), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(16), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(17), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(18), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(19), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(20), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(21), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(22), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(23), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(24), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(25), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(26), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(27), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(28), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(29), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVCNTRn_EL0(30), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(0), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(1), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(2), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(3), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(4), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(5), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(6), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(7), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(8), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(9), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(10), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(11), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(12), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(13), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(14), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(15), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(16), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(17), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(18), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(19), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(20), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(21), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(22), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(23), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(24), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(25), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(26), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(27), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(28), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(29), CGT_MDCR_TPM),
- SR_TRAP(SYS_PMEVTYPERn_EL0(30), CGT_MDCR_TPM),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(0), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(1), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(2), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(3), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(4), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(5), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(6), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(7), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(8), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(9), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(10), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(11), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(12), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(13), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(14), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(15), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(16), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(17), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(18), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(19), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(20), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(21), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(22), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(23), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(24), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(25), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(26), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(27), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(28), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(29), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVCNTRn_EL0(30), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(0), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(1), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(2), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(3), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(4), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(5), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(6), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(7), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(8), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(9), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(10), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(11), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(12), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(13), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(14), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(15), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(16), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(17), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(18), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(19), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(20), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(21), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(22), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(23), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(24), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(25), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(26), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(27), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(28), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(29), CGT_MDCR_TPM_HPMN),
+ SR_TRAP(SYS_PMEVTYPERn_EL0(30), CGT_MDCR_TPM_HPMN),
SR_TRAP(SYS_PMCCFILTR_EL0, CGT_MDCR_TPM),
SR_TRAP(SYS_MDCCSR_EL0, CGT_MDCR_TDCC_TDE_TDA),
SR_TRAP(SYS_MDCCINT_EL1, CGT_MDCR_TDCC_TDE_TDA),
@@ -2017,7 +2055,8 @@ check_mcb:
cgids = coarse_control_combo[id - __MULTIPLE_CONTROL_BITS__];
for (int i = 0; cgids[i] != __RESERVED__; i++) {
- if (cgids[i] >= __MULTIPLE_CONTROL_BITS__) {
+ if (cgids[i] >= __MULTIPLE_CONTROL_BITS__ &&
+ cgids[i] < __COMPLEX_CONDITIONS__) {
kvm_err("Recursive MCB %d/%d\n", id, cgids[i]);
ret = -EINVAL;
}
@@ -2122,11 +2161,19 @@ static u64 kvm_get_sysreg_res0(struct kvm *kvm, enum vcpu_sysreg sr)
return masks->mask[sr - __VNCR_START__].res0;
}
-static bool check_fgt_bit(struct kvm *kvm, bool is_read,
+static bool check_fgt_bit(struct kvm_vcpu *vcpu, bool is_read,
u64 val, const union trap_config tc)
{
+ struct kvm *kvm = vcpu->kvm;
enum vcpu_sysreg sr;
+ /*
+ * KVM doesn't know about any FGTs that apply to the host, and hopefully
+ * that'll remain the case.
+ */
+ if (is_hyp_ctxt(vcpu))
+ return false;
+
if (tc.pol)
return (val & BIT(tc.bit));
@@ -2203,7 +2250,15 @@ bool triage_sysreg_trap(struct kvm_vcpu *vcpu, int *sr_index)
* If we're not nesting, immediately return to the caller, with the
* sysreg index, should we have it.
*/
- if (!vcpu_has_nv(vcpu) || is_hyp_ctxt(vcpu))
+ if (!vcpu_has_nv(vcpu))
+ goto local;
+
+ /*
+ * There are a few traps that take effect InHost, but are constrained
+ * to EL0. Don't bother with computing the trap behaviour if the vCPU
+ * isn't in EL0.
+ */
+ if (is_hyp_ctxt(vcpu) && !vcpu_is_host_el0(vcpu))
goto local;
switch ((enum fgt_group_id)tc.fgt) {
@@ -2249,12 +2304,14 @@ bool triage_sysreg_trap(struct kvm_vcpu *vcpu, int *sr_index)
goto local;
}
- if (tc.fgt != __NO_FGT_GROUP__ && check_fgt_bit(vcpu->kvm, is_read,
- val, tc))
+ if (tc.fgt != __NO_FGT_GROUP__ && check_fgt_bit(vcpu, is_read, val, tc))
goto inject;
b = compute_trap_behaviour(vcpu, tc);
+ if (!(b & BEHAVE_FORWARD_IN_HOST_EL0) && vcpu_is_host_el0(vcpu))
+ goto local;
+
if (((b & BEHAVE_FORWARD_READ) && is_read) ||
((b & BEHAVE_FORWARD_WRITE) && !is_read))
goto inject;
@@ -2389,6 +2446,8 @@ void kvm_emulate_nested_eret(struct kvm_vcpu *vcpu)
kvm_arch_vcpu_load(vcpu, smp_processor_id());
preempt_enable();
+
+ kvm_pmu_nested_transition(vcpu);
}
static void kvm_inject_el2_exception(struct kvm_vcpu *vcpu, u64 esr_el2,
@@ -2471,6 +2530,8 @@ static int kvm_inject_nested(struct kvm_vcpu *vcpu, u64 esr_el2,
kvm_arch_vcpu_load(vcpu, smp_processor_id());
preempt_enable();
+ kvm_pmu_nested_transition(vcpu);
+
return 1;
}