Skip to content

Commit c6cbe6a

Browse files
author
Marc Zyngier
committed
KVM: arm64: Use FGT feature maps to drive RES0 bits
Another benefit of mapping bits to features is that it becomes trivial to define which bits should be handled as RES0. Let's apply this principle to the guest's view of the FGT registers. Reviewed-by: Joey Gouly <joey.gouly@arm.com> Signed-off-by: Marc Zyngier <maz@kernel.org>
1 parent 938a79d commit c6cbe6a

File tree

3 files changed

+57
-119
lines changed

3 files changed

+57
-119
lines changed

arch/arm64/include/asm/kvm_host.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1611,6 +1611,7 @@ void kvm_set_vm_id_reg(struct kvm *kvm, u32 reg, u64 val);
16111611
(kvm_has_feat((k), ID_AA64MMFR3_EL1, S1POE, IMP))
16121612

16131613
void compute_fgu(struct kvm *kvm, enum fgt_group_id fgt);
1614+
void get_reg_fixed_bits(struct kvm *kvm, enum vcpu_sysreg reg, u64 *res0, u64 *res1);
16141615
void check_feature_map(void);
16151616

16161617
#endif /* __ARM64_KVM_HOST_H__ */

arch/arm64/kvm/config.c

Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -616,3 +616,49 @@ void compute_fgu(struct kvm *kvm, enum fgt_group_id fgt)
616616

617617
kvm->arch.fgu[fgt] = val;
618618
}
619+
620+
void get_reg_fixed_bits(struct kvm *kvm, enum vcpu_sysreg reg, u64 *res0, u64 *res1)
621+
{
622+
switch (reg) {
623+
case HFGRTR_EL2:
624+
*res0 = compute_res0_bits(kvm, hfgrtr_feat_map,
625+
ARRAY_SIZE(hfgrtr_feat_map), 0, 0);
626+
*res0 |= hfgrtr_masks.res0;
627+
*res1 = HFGRTR_EL2_RES1;
628+
break;
629+
case HFGWTR_EL2:
630+
*res0 = compute_res0_bits(kvm, hfgwtr_feat_map,
631+
ARRAY_SIZE(hfgwtr_feat_map), 0, 0);
632+
*res0 |= hfgwtr_masks.res0;
633+
*res1 = HFGWTR_EL2_RES1;
634+
break;
635+
case HFGITR_EL2:
636+
*res0 = compute_res0_bits(kvm, hfgitr_feat_map,
637+
ARRAY_SIZE(hfgitr_feat_map), 0, 0);
638+
*res0 |= hfgitr_masks.res0;
639+
*res1 = HFGITR_EL2_RES1;
640+
break;
641+
case HDFGRTR_EL2:
642+
*res0 = compute_res0_bits(kvm, hdfgrtr_feat_map,
643+
ARRAY_SIZE(hdfgrtr_feat_map), 0, 0);
644+
*res0 |= hdfgrtr_masks.res0;
645+
*res1 = HDFGRTR_EL2_RES1;
646+
break;
647+
case HDFGWTR_EL2:
648+
*res0 = compute_res0_bits(kvm, hdfgwtr_feat_map,
649+
ARRAY_SIZE(hdfgwtr_feat_map), 0, 0);
650+
*res0 |= hdfgwtr_masks.res0;
651+
*res1 = HDFGWTR_EL2_RES1;
652+
break;
653+
case HAFGRTR_EL2:
654+
*res0 = compute_res0_bits(kvm, hafgrtr_feat_map,
655+
ARRAY_SIZE(hafgrtr_feat_map), 0, 0);
656+
*res0 |= hafgrtr_masks.res0;
657+
*res1 = HAFGRTR_EL2_RES1;
658+
break;
659+
default:
660+
WARN_ON_ONCE(1);
661+
*res0 = *res1 = 0;
662+
break;
663+
}
664+
}

arch/arm64/kvm/nested.c

Lines changed: 10 additions & 119 deletions
Original file line numberDiff line numberDiff line change
@@ -1100,132 +1100,23 @@ int kvm_init_nv_sysregs(struct kvm_vcpu *vcpu)
11001100
set_sysreg_masks(kvm, HCRX_EL2, res0, res1);
11011101

11021102
/* HFG[RW]TR_EL2 */
1103-
res0 = res1 = 0;
1104-
if (!(kvm_vcpu_has_feature(kvm, KVM_ARM_VCPU_PTRAUTH_ADDRESS) &&
1105-
kvm_vcpu_has_feature(kvm, KVM_ARM_VCPU_PTRAUTH_GENERIC)))
1106-
res0 |= (HFGRTR_EL2_APDAKey | HFGRTR_EL2_APDBKey |
1107-
HFGRTR_EL2_APGAKey | HFGRTR_EL2_APIAKey |
1108-
HFGRTR_EL2_APIBKey);
1109-
if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, LO, IMP))
1110-
res0 |= (HFGRTR_EL2_LORC_EL1 | HFGRTR_EL2_LOREA_EL1 |
1111-
HFGRTR_EL2_LORID_EL1 | HFGRTR_EL2_LORN_EL1 |
1112-
HFGRTR_EL2_LORSA_EL1);
1113-
if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, CSV2, CSV2_2) &&
1114-
!kvm_has_feat(kvm, ID_AA64PFR1_EL1, CSV2_frac, CSV2_1p2))
1115-
res0 |= (HFGRTR_EL2_SCXTNUM_EL1 | HFGRTR_EL2_SCXTNUM_EL0);
1116-
if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, GIC, IMP))
1117-
res0 |= HFGRTR_EL2_ICC_IGRPENn_EL1;
1118-
if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, RAS, IMP))
1119-
res0 |= (HFGRTR_EL2_ERRIDR_EL1 | HFGRTR_EL2_ERRSELR_EL1 |
1120-
HFGRTR_EL2_ERXFR_EL1 | HFGRTR_EL2_ERXCTLR_EL1 |
1121-
HFGRTR_EL2_ERXSTATUS_EL1 | HFGRTR_EL2_ERXMISCn_EL1 |
1122-
HFGRTR_EL2_ERXPFGF_EL1 | HFGRTR_EL2_ERXPFGCTL_EL1 |
1123-
HFGRTR_EL2_ERXPFGCDN_EL1 | HFGRTR_EL2_ERXADDR_EL1);
1124-
if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, LS64, LS64_ACCDATA))
1125-
res0 |= HFGRTR_EL2_nACCDATA_EL1;
1126-
if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, GCS, IMP))
1127-
res0 |= (HFGRTR_EL2_nGCS_EL0 | HFGRTR_EL2_nGCS_EL1);
1128-
if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, SME, IMP))
1129-
res0 |= (HFGRTR_EL2_nSMPRI_EL1 | HFGRTR_EL2_nTPIDR2_EL0);
1130-
if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, THE, IMP))
1131-
res0 |= HFGRTR_EL2_nRCWMASK_EL1;
1132-
if (!kvm_has_s1pie(kvm))
1133-
res0 |= (HFGRTR_EL2_nPIRE0_EL1 | HFGRTR_EL2_nPIR_EL1);
1134-
if (!kvm_has_s1poe(kvm))
1135-
res0 |= (HFGRTR_EL2_nPOR_EL0 | HFGRTR_EL2_nPOR_EL1);
1136-
if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, S2POE, IMP))
1137-
res0 |= HFGRTR_EL2_nS2POR_EL1;
1138-
if (!kvm_has_feat(kvm, ID_AA64MMFR3_EL1, AIE, IMP))
1139-
res0 |= (HFGRTR_EL2_nMAIR2_EL1 | HFGRTR_EL2_nAMAIR2_EL1);
1140-
set_sysreg_masks(kvm, HFGRTR_EL2, res0 | hfgrtr_masks.res0, res1);
1141-
set_sysreg_masks(kvm, HFGWTR_EL2, res0 | hfgwtr_masks.res0, res1);
1103+
get_reg_fixed_bits(kvm, HFGRTR_EL2, &res0, &res1);
1104+
set_sysreg_masks(kvm, HFGRTR_EL2, res0, res1);
1105+
get_reg_fixed_bits(kvm, HFGWTR_EL2, &res0, &res1);
1106+
set_sysreg_masks(kvm, HFGWTR_EL2, res0, res1);
11421107

11431108
/* HDFG[RW]TR_EL2 */
1144-
res0 = res1 = 0;
1145-
if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, DoubleLock, IMP))
1146-
res0 |= HDFGRTR_EL2_OSDLR_EL1;
1147-
if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMUVer, IMP))
1148-
res0 |= (HDFGRTR_EL2_PMEVCNTRn_EL0 | HDFGRTR_EL2_PMEVTYPERn_EL0 |
1149-
HDFGRTR_EL2_PMCCFILTR_EL0 | HDFGRTR_EL2_PMCCNTR_EL0 |
1150-
HDFGRTR_EL2_PMCNTEN | HDFGRTR_EL2_PMINTEN |
1151-
HDFGRTR_EL2_PMOVS | HDFGRTR_EL2_PMSELR_EL0 |
1152-
HDFGRTR_EL2_PMMIR_EL1 | HDFGRTR_EL2_PMUSERENR_EL0 |
1153-
HDFGRTR_EL2_PMCEIDn_EL0);
1154-
if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMSVer, IMP))
1155-
res0 |= (HDFGRTR_EL2_PMBLIMITR_EL1 | HDFGRTR_EL2_PMBPTR_EL1 |
1156-
HDFGRTR_EL2_PMBSR_EL1 | HDFGRTR_EL2_PMSCR_EL1 |
1157-
HDFGRTR_EL2_PMSEVFR_EL1 | HDFGRTR_EL2_PMSFCR_EL1 |
1158-
HDFGRTR_EL2_PMSICR_EL1 | HDFGRTR_EL2_PMSIDR_EL1 |
1159-
HDFGRTR_EL2_PMSIRR_EL1 | HDFGRTR_EL2_PMSLATFR_EL1 |
1160-
HDFGRTR_EL2_PMBIDR_EL1);
1161-
if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceVer, IMP))
1162-
res0 |= (HDFGRTR_EL2_TRC | HDFGRTR_EL2_TRCAUTHSTATUS |
1163-
HDFGRTR_EL2_TRCAUXCTLR | HDFGRTR_EL2_TRCCLAIM |
1164-
HDFGRTR_EL2_TRCCNTVRn | HDFGRTR_EL2_TRCID |
1165-
HDFGRTR_EL2_TRCIMSPECn | HDFGRTR_EL2_TRCOSLSR |
1166-
HDFGRTR_EL2_TRCPRGCTLR | HDFGRTR_EL2_TRCSEQSTR |
1167-
HDFGRTR_EL2_TRCSSCSRn | HDFGRTR_EL2_TRCSTATR |
1168-
HDFGRTR_EL2_TRCVICTLR);
1169-
if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceBuffer, IMP))
1170-
res0 |= (HDFGRTR_EL2_TRBBASER_EL1 | HDFGRTR_EL2_TRBIDR_EL1 |
1171-
HDFGRTR_EL2_TRBLIMITR_EL1 | HDFGRTR_EL2_TRBMAR_EL1 |
1172-
HDFGRTR_EL2_TRBPTR_EL1 | HDFGRTR_EL2_TRBSR_EL1 |
1173-
HDFGRTR_EL2_TRBTRG_EL1);
1174-
if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, BRBE, IMP))
1175-
res0 |= (HDFGRTR_EL2_nBRBIDR | HDFGRTR_EL2_nBRBCTL |
1176-
HDFGRTR_EL2_nBRBDATA);
1177-
if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMSVer, V1P2))
1178-
res0 |= HDFGRTR_EL2_nPMSNEVFR_EL1;
1179-
set_sysreg_masks(kvm, HDFGRTR_EL2, res0 | hdfgrtr_masks.res0, res1);
1180-
1181-
/* Reuse the bits from the read-side and add the write-specific stuff */
1182-
if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, PMUVer, IMP))
1183-
res0 |= (HDFGWTR_EL2_PMCR_EL0 | HDFGWTR_EL2_PMSWINC_EL0);
1184-
if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceVer, IMP))
1185-
res0 |= HDFGWTR_EL2_TRCOSLAR;
1186-
if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, TraceFilt, IMP))
1187-
res0 |= HDFGWTR_EL2_TRFCR_EL1;
1188-
set_sysreg_masks(kvm, HFGWTR_EL2, res0 | hdfgwtr_masks.res0, res1);
1109+
get_reg_fixed_bits(kvm, HDFGRTR_EL2, &res0, &res1);
1110+
set_sysreg_masks(kvm, HDFGRTR_EL2, res0, res1);
1111+
get_reg_fixed_bits(kvm, HDFGWTR_EL2, &res0, &res1);
1112+
set_sysreg_masks(kvm, HDFGWTR_EL2, res0, res1);
11891113

11901114
/* HFGITR_EL2 */
1191-
res0 = hfgitr_masks.res0;
1192-
res1 = HFGITR_EL2_RES1;
1193-
if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, DPB, DPB2))
1194-
res0 |= HFGITR_EL2_DCCVADP;
1195-
if (!kvm_has_feat(kvm, ID_AA64MMFR1_EL1, PAN, PAN2))
1196-
res0 |= (HFGITR_EL2_ATS1E1RP | HFGITR_EL2_ATS1E1WP);
1197-
if (!kvm_has_feat(kvm, ID_AA64ISAR0_EL1, TLB, OS))
1198-
res0 |= (HFGITR_EL2_TLBIRVAALE1OS | HFGITR_EL2_TLBIRVALE1OS |
1199-
HFGITR_EL2_TLBIRVAAE1OS | HFGITR_EL2_TLBIRVAE1OS |
1200-
HFGITR_EL2_TLBIVAALE1OS | HFGITR_EL2_TLBIVALE1OS |
1201-
HFGITR_EL2_TLBIVAAE1OS | HFGITR_EL2_TLBIASIDE1OS |
1202-
HFGITR_EL2_TLBIVAE1OS | HFGITR_EL2_TLBIVMALLE1OS);
1203-
if (!kvm_has_feat(kvm, ID_AA64ISAR0_EL1, TLB, RANGE))
1204-
res0 |= (HFGITR_EL2_TLBIRVAALE1 | HFGITR_EL2_TLBIRVALE1 |
1205-
HFGITR_EL2_TLBIRVAAE1 | HFGITR_EL2_TLBIRVAE1 |
1206-
HFGITR_EL2_TLBIRVAALE1IS | HFGITR_EL2_TLBIRVALE1IS |
1207-
HFGITR_EL2_TLBIRVAAE1IS | HFGITR_EL2_TLBIRVAE1IS |
1208-
HFGITR_EL2_TLBIRVAALE1OS | HFGITR_EL2_TLBIRVALE1OS |
1209-
HFGITR_EL2_TLBIRVAAE1OS | HFGITR_EL2_TLBIRVAE1OS);
1210-
if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, SPECRES, IMP))
1211-
res0 |= (HFGITR_EL2_CFPRCTX | HFGITR_EL2_DVPRCTX |
1212-
HFGITR_EL2_CPPRCTX);
1213-
if (!kvm_has_feat(kvm, ID_AA64DFR0_EL1, BRBE, IMP))
1214-
res0 |= (HFGITR_EL2_nBRBINJ | HFGITR_EL2_nBRBIALL);
1215-
if (!kvm_has_feat(kvm, ID_AA64PFR1_EL1, GCS, IMP))
1216-
res0 |= (HFGITR_EL2_nGCSPUSHM_EL1 | HFGITR_EL2_nGCSSTR_EL1 |
1217-
HFGITR_EL2_nGCSEPP);
1218-
if (!kvm_has_feat(kvm, ID_AA64ISAR1_EL1, SPECRES, COSP_RCTX))
1219-
res0 |= HFGITR_EL2_COSPRCTX;
1220-
if (!kvm_has_feat(kvm, ID_AA64ISAR2_EL1, ATS1A, IMP))
1221-
res0 |= HFGITR_EL2_ATS1E1A;
1115+
get_reg_fixed_bits(kvm, HFGITR_EL2, &res0, &res1);
12221116
set_sysreg_masks(kvm, HFGITR_EL2, res0, res1);
12231117

12241118
/* HAFGRTR_EL2 - not a lot to see here */
1225-
res0 = hafgrtr_masks.res0;
1226-
res1 = HAFGRTR_EL2_RES1;
1227-
if (!kvm_has_feat(kvm, ID_AA64PFR0_EL1, AMU, V1P1))
1228-
res0 |= ~(res0 | res1);
1119+
get_reg_fixed_bits(kvm, HAFGRTR_EL2, &res0, &res1);
12291120
set_sysreg_masks(kvm, HAFGRTR_EL2, res0, res1);
12301121

12311122
/* TCR2_EL2 */

0 commit comments

Comments
 (0)