@@ -69,7 +69,10 @@ struct reg_bits_to_feat_map {
69
69
#define FEAT_TRBE ID_AA64DFR0_EL1, TraceBuffer, IMP
70
70
#define FEAT_DoubleLock ID_AA64DFR0_EL1, DoubleLock, IMP
71
71
#define FEAT_TRF ID_AA64DFR0_EL1, TraceFilt, IMP
72
+ #define FEAT_AA32EL0 ID_AA64PFR0_EL1, EL0, AARCH32
73
+ #define FEAT_AA32EL1 ID_AA64PFR0_EL1, EL1, AARCH32
72
74
#define FEAT_AA64EL1 ID_AA64PFR0_EL1, EL1, IMP
75
+ #define FEAT_AA64EL3 ID_AA64PFR0_EL1, EL3, IMP
73
76
#define FEAT_AIE ID_AA64MMFR3_EL1, AIE, IMP
74
77
#define FEAT_S2POE ID_AA64MMFR3_EL1, S2POE, IMP
75
78
#define FEAT_S1POE ID_AA64MMFR3_EL1, S1POE, IMP
@@ -92,6 +95,7 @@ struct reg_bits_to_feat_map {
92
95
#define FEAT_PAN2 ID_AA64MMFR1_EL1, PAN, PAN2
93
96
#define FEAT_DPB2 ID_AA64ISAR1_EL1, DPB, DPB2
94
97
#define FEAT_AMUv1 ID_AA64PFR0_EL1, AMU, IMP
98
+ #define FEAT_AMUv1p1 ID_AA64PFR0_EL1, AMU, V1P1
95
99
#define FEAT_CMOW ID_AA64MMFR1_EL1, CMOW, IMP
96
100
#define FEAT_D128 ID_AA64MMFR3_EL1, D128, IMP
97
101
#define FEAT_DoubleFault2 ID_AA64PFR1_EL1, DF2, IMP
@@ -102,6 +106,31 @@ struct reg_bits_to_feat_map {
102
106
#define FEAT_SYSREG128 ID_AA64ISAR2_EL1, SYSREG_128, IMP
103
107
#define FEAT_TCR2 ID_AA64MMFR3_EL1, TCRX, IMP
104
108
#define FEAT_XS ID_AA64ISAR1_EL1, XS, IMP
109
+ #define FEAT_EVT ID_AA64MMFR2_EL1, EVT, IMP
110
+ #define FEAT_EVT_TTLBxS ID_AA64MMFR2_EL1, EVT, TTLBxS
111
+ #define FEAT_MTE2 ID_AA64PFR1_EL1, MTE, MTE2
112
+ #define FEAT_RME ID_AA64PFR0_EL1, RME, IMP
113
+ #define FEAT_S2FWB ID_AA64MMFR2_EL1, FWB, IMP
114
+ #define FEAT_TME ID_AA64ISAR0_EL1, TME, IMP
115
+ #define FEAT_TWED ID_AA64MMFR1_EL1, TWED, IMP
116
+ #define FEAT_E2H0 ID_AA64MMFR4_EL1, E2H0, IMP
117
+
118
+ static bool not_feat_aa64el3 (struct kvm * kvm )
119
+ {
120
+ return !kvm_has_feat (kvm , FEAT_AA64EL3 );
121
+ }
122
+
123
+ static bool feat_nv2 (struct kvm * kvm )
124
+ {
125
+ return ((kvm_has_feat (kvm , ID_AA64MMFR4_EL1 , NV_frac , NV2_ONLY ) &&
126
+ kvm_has_feat_enum (kvm , ID_AA64MMFR2_EL1 , NV , NI )) ||
127
+ kvm_has_feat (kvm , ID_AA64MMFR2_EL1 , NV , NV2 ));
128
+ }
129
+
130
+ static bool feat_nv2_e2h0_ni (struct kvm * kvm )
131
+ {
132
+ return feat_nv2 (kvm ) && !kvm_has_feat (kvm , FEAT_E2H0 );
133
+ }
105
134
106
135
static bool feat_rasv1p1 (struct kvm * kvm )
107
136
{
@@ -151,6 +180,31 @@ static bool feat_sme_smps(struct kvm *kvm)
151
180
(read_sysreg_s (SYS_SMIDR_EL1 ) & SMIDR_EL1_SMPS ));
152
181
}
153
182
183
+ static bool compute_hcr_rw (struct kvm * kvm , u64 * bits )
184
+ {
185
+ /* This is purely academic: AArch32 and NV are mutually exclusive */
186
+ if (bits ) {
187
+ if (kvm_has_feat (kvm , FEAT_AA32EL1 ))
188
+ * bits &= ~HCR_EL2_RW ;
189
+ else
190
+ * bits |= HCR_EL2_RW ;
191
+ }
192
+
193
+ return true;
194
+ }
195
+
196
+ static bool compute_hcr_e2h (struct kvm * kvm , u64 * bits )
197
+ {
198
+ if (bits ) {
199
+ if (kvm_has_feat (kvm , FEAT_E2H0 ))
200
+ * bits &= ~HCR_EL2_E2H ;
201
+ else
202
+ * bits |= HCR_EL2_E2H ;
203
+ }
204
+
205
+ return true;
206
+ }
207
+
154
208
static const struct reg_bits_to_feat_map hfgrtr_feat_map [] = {
155
209
NEEDS_FEAT (HFGRTR_EL2_nAMAIR2_EL1 |
156
210
HFGRTR_EL2_nMAIR2_EL1 ,
@@ -564,6 +618,77 @@ static const struct reg_bits_to_feat_map hcrx_feat_map[] = {
564
618
NEEDS_FEAT (HCRX_EL2_EnAS0 , FEAT_LS64_ACCDATA ),
565
619
};
566
620
621
+ static const struct reg_bits_to_feat_map hcr_feat_map [] = {
622
+ NEEDS_FEAT (HCR_EL2_TID0 , FEAT_AA32EL0 ),
623
+ NEEDS_FEAT_FIXED (HCR_EL2_RW , compute_hcr_rw ),
624
+ NEEDS_FEAT (HCR_EL2_HCD , not_feat_aa64el3 ),
625
+ NEEDS_FEAT (HCR_EL2_AMO |
626
+ HCR_EL2_BSU |
627
+ HCR_EL2_CD |
628
+ HCR_EL2_DC |
629
+ HCR_EL2_FB |
630
+ HCR_EL2_FMO |
631
+ HCR_EL2_ID |
632
+ HCR_EL2_IMO |
633
+ HCR_EL2_MIOCNCE |
634
+ HCR_EL2_PTW |
635
+ HCR_EL2_SWIO |
636
+ HCR_EL2_TACR |
637
+ HCR_EL2_TDZ |
638
+ HCR_EL2_TGE |
639
+ HCR_EL2_TID1 |
640
+ HCR_EL2_TID2 |
641
+ HCR_EL2_TID3 |
642
+ HCR_EL2_TIDCP |
643
+ HCR_EL2_TPCP |
644
+ HCR_EL2_TPU |
645
+ HCR_EL2_TRVM |
646
+ HCR_EL2_TSC |
647
+ HCR_EL2_TSW |
648
+ HCR_EL2_TTLB |
649
+ HCR_EL2_TVM |
650
+ HCR_EL2_TWE |
651
+ HCR_EL2_TWI |
652
+ HCR_EL2_VF |
653
+ HCR_EL2_VI |
654
+ HCR_EL2_VM |
655
+ HCR_EL2_VSE ,
656
+ FEAT_AA64EL1 ),
657
+ NEEDS_FEAT (HCR_EL2_AMVOFFEN , FEAT_AMUv1p1 ),
658
+ NEEDS_FEAT (HCR_EL2_EnSCXT , feat_csv2_2_csv2_1p2 ),
659
+ NEEDS_FEAT (HCR_EL2_TICAB |
660
+ HCR_EL2_TID4 |
661
+ HCR_EL2_TOCU ,
662
+ FEAT_EVT ),
663
+ NEEDS_FEAT (HCR_EL2_TTLBIS |
664
+ HCR_EL2_TTLBOS ,
665
+ FEAT_EVT_TTLBxS ),
666
+ NEEDS_FEAT (HCR_EL2_TLOR , FEAT_LOR ),
667
+ NEEDS_FEAT (HCR_EL2_ATA |
668
+ HCR_EL2_DCT |
669
+ HCR_EL2_TID5 ,
670
+ FEAT_MTE2 ),
671
+ NEEDS_FEAT (HCR_EL2_AT | /* Ignore the original FEAT_NV */
672
+ HCR_EL2_NV2 |
673
+ HCR_EL2_NV ,
674
+ feat_nv2 ),
675
+ NEEDS_FEAT (HCR_EL2_NV1 , feat_nv2_e2h0_ni ), /* Missing from JSON */
676
+ NEEDS_FEAT (HCR_EL2_API |
677
+ HCR_EL2_APK ,
678
+ feat_pauth ),
679
+ NEEDS_FEAT (HCR_EL2_TEA |
680
+ HCR_EL2_TERR ,
681
+ FEAT_RAS ),
682
+ NEEDS_FEAT (HCR_EL2_FIEN , feat_rasv1p1 ),
683
+ NEEDS_FEAT (HCR_EL2_GPF , FEAT_RME ),
684
+ NEEDS_FEAT (HCR_EL2_FWB , FEAT_S2FWB ),
685
+ NEEDS_FEAT (HCR_EL2_TME , FEAT_TME ),
686
+ NEEDS_FEAT (HCR_EL2_TWEDEL |
687
+ HCR_EL2_TWEDEn ,
688
+ FEAT_TWED ),
689
+ NEEDS_FEAT_FIXED (HCR_EL2_E2H , compute_hcr_e2h ),
690
+ };
691
+
567
692
static void __init check_feat_map (const struct reg_bits_to_feat_map * map ,
568
693
int map_size , u64 res0 , const char * str )
569
694
{
@@ -593,6 +718,8 @@ void __init check_feature_map(void)
593
718
hafgrtr_masks .res0 , hafgrtr_masks .str );
594
719
check_feat_map (hcrx_feat_map , ARRAY_SIZE (hcrx_feat_map ),
595
720
__HCRX_EL2_RES0 , "HCRX_EL2" );
721
+ check_feat_map (hcr_feat_map , ARRAY_SIZE (hcr_feat_map ),
722
+ HCR_EL2_RES0 , "HCR_EL2" );
596
723
}
597
724
598
725
static bool idreg_feat_match (struct kvm * kvm , const struct reg_bits_to_feat_map * map )
@@ -651,6 +778,17 @@ static u64 compute_res0_bits(struct kvm *kvm,
651
778
require , exclude | FIXED_VALUE );
652
779
}
653
780
781
+ static u64 compute_fixed_bits (struct kvm * kvm ,
782
+ const struct reg_bits_to_feat_map * map ,
783
+ int map_size ,
784
+ u64 * fixed_bits ,
785
+ unsigned long require ,
786
+ unsigned long exclude )
787
+ {
788
+ return __compute_fixed_bits (kvm , map , map_size , fixed_bits ,
789
+ require | FIXED_VALUE , exclude );
790
+ }
791
+
654
792
void compute_fgu (struct kvm * kvm , enum fgt_group_id fgt )
655
793
{
656
794
u64 val = 0 ;
@@ -691,6 +829,8 @@ void compute_fgu(struct kvm *kvm, enum fgt_group_id fgt)
691
829
692
830
void get_reg_fixed_bits (struct kvm * kvm , enum vcpu_sysreg reg , u64 * res0 , u64 * res1 )
693
831
{
832
+ u64 fixed = 0 , mask ;
833
+
694
834
switch (reg ) {
695
835
case HFGRTR_EL2 :
696
836
* res0 = compute_res0_bits (kvm , hfgrtr_feat_map ,
@@ -734,6 +874,15 @@ void get_reg_fixed_bits(struct kvm *kvm, enum vcpu_sysreg reg, u64 *res0, u64 *r
734
874
* res0 |= __HCRX_EL2_RES0 ;
735
875
* res1 = __HCRX_EL2_RES1 ;
736
876
break ;
877
+ case HCR_EL2 :
878
+ mask = compute_fixed_bits (kvm , hcr_feat_map ,
879
+ ARRAY_SIZE (hcr_feat_map ), & fixed ,
880
+ 0 , 0 );
881
+ * res0 = compute_res0_bits (kvm , hcr_feat_map ,
882
+ ARRAY_SIZE (hcr_feat_map ), 0 , 0 );
883
+ * res0 |= HCR_EL2_RES0 | (mask & ~fixed );
884
+ * res1 = HCR_EL2_RES1 | (mask & fixed );
885
+ break ;
737
886
default :
738
887
WARN_ON_ONCE (1 );
739
888
* res0 = * res1 = 0 ;
0 commit comments