Browse Source

refactor(cpufeat): add macro to simplify is_feat_xx_present

In this patch, we are trying to introduce the wrapper macro
CREATE_FEATURE_PRESENT to get the following capability and
align it for all the features:

-> is_feat_xx_present(): Does Hardware implement the feature.
-> uniformity in naming the function across multiple features.
-> improved readability

The is_feat_xx_present() is implemented to check if the hardware
implements the feature and does not take into account the
ENABLE_FEAT_XXX flag enabled/disabled in software.

- CREATE_FEATURE_PRESENT(name, idreg, shift, mask, idval)
The wrapper macro reduces the function to a single line and
creates the is_feat_xx_present function that checks the
id register based on the shift and mask values and compares
this against a determined idvalue.

Change-Id: I7b91d2c9c6fbe55f94c693aa1b2c50be54fb9ecc
Signed-off-by: Sona Mathew <sonarebecca.mathew@arm.com>
pull/2000/merge
Sona Mathew 8 months ago
parent
commit
aaaf2cc313
  1. 4
      bl31/bl31_traps.c
  2. 37
      common/feat_detect.c
  3. 2
      drivers/arm/smmu/smmu_v3.c
  4. 213
      include/arch/aarch32/arch_features.h
  5. 451
      include/arch/aarch64/arch_features.h
  6. 2
      lib/el3_runtime/aarch32/context_mgmt.c
  7. 4
      lib/extensions/sme/sme.c
  8. 4
      lib/xlat_mpu/aarch64/xlat_mpu_arch.c
  9. 4
      lib/xlat_tables/aarch64/xlat_tables.c
  10. 4
      lib/xlat_tables_v2/aarch32/xlat_tables_arch.c
  11. 15
      lib/xlat_tables_v2/aarch64/xlat_tables_arch.c
  12. 4
      lib/xlat_tables_v2/xlat_tables_core.c
  13. 2
      plat/arm/common/arm_bl2_setup.c
  14. 2
      plat/qemu/common/qemu_bl2_setup.c
  15. 2
      services/std_svc/rmmd/rmmd_main.c

4
bl31/bl31_traps.c

@ -36,7 +36,7 @@ static bool is_tge_enabled(void)
{
u_register_t hcr_el2 = read_hcr_el2();
return ((read_feat_vhe_id_field() != 0U) && ((hcr_el2 & HCR_TGE_BIT) != 0U));
return ((is_feat_vhe_present()) && ((hcr_el2 & HCR_TGE_BIT) != 0U));
}
/*
@ -116,7 +116,7 @@ u_register_t create_spsr(u_register_t old_spsr, unsigned int target_el)
/* If FEAT_BTI is present, clear BTYPE bits */
new_spsr |= old_spsr & (SPSR_BTYPE_MASK_AARCH64 << SPSR_BTYPE_SHIFT_AARCH64);
if (is_armv8_5_bti_present()) {
if (is_feat_bti_present()) {
new_spsr &= ~(SPSR_BTYPE_MASK_AARCH64 << SPSR_BTYPE_SHIFT_AARCH64);
}

37
common/feat_detect.c

@ -76,7 +76,7 @@ static void read_feat_pauth(void)
static void read_feat_bti(void)
{
#if (ENABLE_BTI == FEAT_STATE_ALWAYS)
feat_detect_panic(is_armv8_5_bti_present(), "BTI");
feat_detect_panic(is_feat_bti_present(), "BTI");
#endif
}
@ -86,8 +86,7 @@ static void read_feat_bti(void)
static void read_feat_rme(void)
{
#if (ENABLE_RME == FEAT_STATE_ALWAYS)
feat_detect_panic((get_armv9_2_feat_rme_support() !=
RME_NOT_IMPLEMENTED), "RME");
feat_detect_panic(is_feat_rme_present(), "RME");
#endif
}
@ -129,10 +128,9 @@ void detect_arch_features(void)
tainted = false;
/* v8.0 features */
check_feature(ENABLE_FEAT_SB, read_feat_sb_id_field(), "SB",
SB_IMPLEMENTED, SB_IMPLEMENTED);
check_feature(ENABLE_FEAT_SB, read_feat_sb_id_field(), "SB", 1, 1);
check_feature(ENABLE_FEAT_CSV2_2, read_feat_csv2_id_field(),
"CSV2_2", CSV2_2_IMPLEMENTED, CSV2_3_IMPLEMENTED);
"CSV2_2", 2, 3);
/*
* Even though the PMUv3 is an OPTIONAL feature, it is always
* implemented and Arm prescribes so. So assume it will be there and do
@ -143,27 +141,25 @@ void detect_arch_features(void)
"PMUv3", 1, ID_AA64DFR0_PMUVER_PMUV3P7);
/* v8.1 features */
check_feature(ENABLE_FEAT_PAN, read_feat_pan_id_field(), "PAN",
PAN_IMPLEMENTED, PAN3_IMPLEMENTED);
check_feature(ENABLE_FEAT_PAN, read_feat_pan_id_field(), "PAN", 1, 3);
check_feature(ENABLE_FEAT_VHE, read_feat_vhe_id_field(), "VHE", 1, 1);
/* v8.2 features */
check_feature(ENABLE_SVE_FOR_NS, read_feat_sve_id_field(),
"SVE", SVE_IMPLEMENTED, SVE_IMPLEMENTED);
"SVE", 1, 1);
check_feature(ENABLE_FEAT_RAS, read_feat_ras_id_field(), "RAS", 1, 2);
/* v8.3 features */
read_feat_pauth();
/* v8.4 features */
check_feature(ENABLE_FEAT_DIT, read_feat_dit_id_field(), "DIT",
DIT_IMPLEMENTED, DIT_IMPLEMENTED);
check_feature(ENABLE_FEAT_DIT, read_feat_dit_id_field(), "DIT", 1, 1);
check_feature(ENABLE_FEAT_AMU, read_feat_amu_id_field(),
"AMUv1", 1, 2);
check_feature(ENABLE_FEAT_MPAM, read_feat_mpam_version(),
"MPAM", 1, 17);
check_feature(CTX_INCLUDE_NEVE_REGS, read_feat_nv_id_field(),
"NV2", NV2_IMPLEMENTED, NV2_IMPLEMENTED);
"NV2", 2, 2);
check_feature(ENABLE_FEAT_SEL2, read_feat_sel2_id_field(),
"SEL2", 1, 1);
check_feature(ENABLE_TRF_FOR_NS, read_feat_trf_id_field(),
@ -180,22 +176,19 @@ void detect_arch_features(void)
check_feature(ENABLE_FEAT_AMUv1p1, read_feat_amu_id_field(),
"AMUv1p1", 2, 2);
check_feature(ENABLE_FEAT_FGT, read_feat_fgt_id_field(), "FGT", 1, 1);
check_feature(ENABLE_FEAT_ECV, read_feat_ecv_id_field(), "ECV",
ECV_IMPLEMENTED, 2);
check_feature(ENABLE_FEAT_ECV, read_feat_ecv_id_field(), "ECV", 1, 2);
check_feature(ENABLE_FEAT_TWED, read_feat_twed_id_field(),
"TWED", TWED_IMPLEMENTED, TWED_IMPLEMENTED);
"TWED", 1, 1);
/*
* even though this is a "DISABLE" it does confusingly perform feature
* enablement duties like all other flags here. Check it against the HW
* feature when we intend to diverge from the default behaviour
*/
check_feature(DISABLE_MTPMU, read_feat_mtpmu_id_field(), "MTPMU",
MTPMU_IMPLEMENTED, MTPMU_IMPLEMENTED);
check_feature(DISABLE_MTPMU, read_feat_mtpmu_id_field(), "MTPMU", 1, 1);
/* v8.7 features */
check_feature(ENABLE_FEAT_HCX, read_feat_hcx_id_field(), "HCX",
HCX_IMPLEMENTED, HCX_IMPLEMENTED);
check_feature(ENABLE_FEAT_HCX, read_feat_hcx_id_field(), "HCX", 1, 1);
/* v8.9 features */
check_feature(ENABLE_FEAT_TCR2, read_feat_tcr2_id_field(),
@ -213,15 +206,15 @@ void detect_arch_features(void)
/* v9.0 features */
check_feature(ENABLE_BRBE_FOR_NS, read_feat_brbe_id_field(),
"BRBE", BRBE_IMPLEMENTED, 2);
"BRBE", 1, 2);
check_feature(ENABLE_TRBE_FOR_NS, read_feat_trbe_id_field(),
"TRBE", 1, 1);
/* v9.2 features */
check_feature(ENABLE_SME_FOR_NS, read_feat_sme_id_field(),
"SME", SME_IMPLEMENTED, SME2_IMPLEMENTED);
"SME", 1, 2);
check_feature(ENABLE_SME2_FOR_NS, read_feat_sme_id_field(),
"SME2", SME2_IMPLEMENTED, SME2_IMPLEMENTED);
"SME2", 2, 2);
/* v9.4 features */
check_feature(ENABLE_FEAT_GCS, read_feat_gcs_id_field(), "GCS", 1, 1);

2
drivers/arm/smmu/smmu_v3.c

@ -97,7 +97,7 @@ int __init smmuv3_init(uintptr_t smmu_base)
#if ENABLE_RME
if (get_armv9_2_feat_rme_support() != 0U) {
if (is_feat_rme_present()) {
if ((mmio_read_32(smmu_base + SMMU_ROOT_IDR0) &
SMMU_ROOT_IDR0_ROOT_IMPL) == 0U) {
WARN("Skip SMMU GPC configuration.\n");

213
include/arch/aarch32/arch_features.h

@ -12,132 +12,112 @@
#include <arch_helpers.h>
#include <common/feat_detect.h>
#define ISOLATE_FIELD(reg, feat) \
((unsigned int)(((reg) >> (feat ## _SHIFT)) & (feat ## _MASK)))
#define ISOLATE_FIELD(reg, feat, mask) \
((unsigned int)(((reg) >> (feat)) & mask))
static inline bool is_armv7_gentimer_present(void)
{
return ISOLATE_FIELD(read_id_pfr1(), ID_PFR1_GENTIMER) != 0U;
}
static inline bool is_armv8_2_ttcnp_present(void)
{
return ISOLATE_FIELD(read_id_mmfr4(), ID_MMFR4_CNP) != 0U;
}
static unsigned int read_feat_amu_id_field(void)
{
return ISOLATE_FIELD(read_id_pfr0(), ID_PFR0_AMU);
}
static inline bool is_feat_amu_supported(void)
{
if (ENABLE_FEAT_AMU == FEAT_STATE_DISABLED) {
return false;
}
if (ENABLE_FEAT_AMU == FEAT_STATE_ALWAYS) {
return true;
}
return read_feat_amu_id_field() >= ID_PFR0_AMU_V1;
#define CREATE_FEATURE_SUPPORTED(name, read_func, guard) \
static inline bool is_ ## name ## _supported(void) \
{ \
if ((guard) == FEAT_STATE_DISABLED) { \
return false; \
} \
if ((guard) == FEAT_STATE_ALWAYS) { \
return true; \
} \
return read_func(); \
}
static inline bool is_feat_amuv1p1_supported(void)
{
if (ENABLE_FEAT_AMUv1p1 == FEAT_STATE_DISABLED) {
return false;
}
if (ENABLE_FEAT_AMUv1p1 == FEAT_STATE_ALWAYS) {
return true;
}
return read_feat_amu_id_field() >= ID_PFR0_AMU_V1P1;
#define CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval) \
static inline bool is_ ## name ## _present(void) \
{ \
return (ISOLATE_FIELD(read_ ## idreg(), idfield, mask) >= idval) \
? true : false; \
}
static inline unsigned int read_feat_trf_id_field(void)
{
return ISOLATE_FIELD(read_id_dfr0(), ID_DFR0_TRACEFILT);
}
static inline bool is_feat_trf_supported(void)
{
if (ENABLE_TRF_FOR_NS == FEAT_STATE_DISABLED) {
return false;
}
#define CREATE_FEATURE_FUNCS(name, idreg, idfield, mask, idval, guard) \
CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval) \
CREATE_FEATURE_SUPPORTED(name, is_ ## name ## _present, guard)
if (ENABLE_TRF_FOR_NS == FEAT_STATE_ALWAYS) {
return true;
}
return read_feat_trf_id_field() != 0U;
}
static inline unsigned int read_feat_coptrc_id_field(void)
{
return ISOLATE_FIELD(read_id_dfr0(), ID_DFR0_COPTRC);
}
/*
* +----------------------------+
* | Features supported |
* +----------------------------+
* | GENTIMER |
* +----------------------------+
* | FEAT_TTCNP |
* +----------------------------+
* | FEAT_AMU |
* +----------------------------+
* | FEAT_AMUV1P1 |
* +----------------------------+
* | FEAT_TRF |
* +----------------------------+
* | FEAT_SYS_REG_TRACE |
* +----------------------------+
* | FEAT_DIT |
* +----------------------------+
* | FEAT_PAN |
* +----------------------------+
* | FEAT_SSBS |
* +----------------------------+
* | FEAT_PMUV3 |
* +----------------------------+
* | FEAT_MTPMU |
* +----------------------------+
*/
static inline bool is_feat_sys_reg_trace_supported(void)
/* GENTIMER */
static inline bool is_armv7_gentimer_present(void)
{
if (ENABLE_SYS_REG_TRACE_FOR_NS == FEAT_STATE_DISABLED) {
return false;
}
if (ENABLE_SYS_REG_TRACE_FOR_NS == FEAT_STATE_ALWAYS) {
return true;
}
return read_feat_coptrc_id_field() != 0U;
return ISOLATE_FIELD(read_id_pfr1(), ID_PFR1_GENTIMER_SHIFT,
ID_PFR1_GENTIMER_MASK) != 0U;
}
static inline unsigned int read_feat_dit_id_field(void)
{
return ISOLATE_FIELD(read_id_pfr0(), ID_PFR0_DIT);
}
/* FEAT_TTCNP: Translation table common not private */
CREATE_FEATURE_PRESENT(feat_ttcnp, id_mmfr4, ID_MMFR4_CNP_SHIFT,
ID_MMFR4_CNP_MASK, 1U)
static inline bool is_feat_dit_supported(void)
{
if (ENABLE_FEAT_DIT == FEAT_STATE_DISABLED) {
return false;
}
/* FEAT_AMU: Activity Monitors Extension */
CREATE_FEATURE_FUNCS(feat_amu, id_pfr0, ID_PFR0_AMU_SHIFT,
ID_PFR0_AMU_MASK, ID_PFR0_AMU_V1, ENABLE_FEAT_AMU)
if (ENABLE_FEAT_DIT == FEAT_STATE_ALWAYS) {
return true;
}
/* FEAT_AMUV1P1: AMU Extension v1.1 */
CREATE_FEATURE_FUNCS(feat_amuv1p1, id_pfr0, ID_PFR0_AMU_SHIFT,
ID_PFR0_AMU_MASK, ID_PFR0_AMU_V1P1, ENABLE_FEAT_AMUv1p1)
return read_feat_dit_id_field() != 0U;
}
/* FEAT_TRF: Tracefilter */
CREATE_FEATURE_FUNCS(feat_trf, id_dfr0, ID_DFR0_TRACEFILT_SHIFT,
ID_DFR0_TRACEFILT_MASK, 1U, ENABLE_TRF_FOR_NS)
static inline unsigned int read_feat_pan_id_field(void)
{
return ISOLATE_FIELD(read_id_mmfr3(), ID_MMFR3_PAN);
}
/* FEAT_SYS_REG_TRACE */
CREATE_FEATURE_FUNCS(feat_sys_reg_trace, id_dfr0, ID_DFR0_COPTRC_SHIFT,
ID_DFR0_COPTRC_MASK, 1U, ENABLE_SYS_REG_TRACE_FOR_NS)
static inline bool is_feat_pan_supported(void)
{
if (ENABLE_FEAT_PAN == FEAT_STATE_DISABLED) {
return false;
}
/* FEAT_DIT: Data independent timing */
CREATE_FEATURE_FUNCS(feat_dit, id_pfr0, ID_PFR0_DIT_SHIFT,
ID_PFR0_DIT_MASK, 1U, ENABLE_FEAT_DIT)
if (ENABLE_FEAT_PAN == FEAT_STATE_ALWAYS) {
return true;
}
/* FEAT_PAN: Privileged access never */
CREATE_FEATURE_FUNCS(feat_pan, id_mmfr3, ID_MMFR3_PAN_SHIFT,
ID_MMFR3_PAN_MASK, 1U, ENABLE_FEAT_PAN)
return read_feat_pan_id_field() != 0U;
}
/* FEAT_SSBS: Speculative store bypass safe */
CREATE_FEATURE_PRESENT(feat_ssbs, id_pfr2, ID_PFR2_SSBS_SHIFT,
ID_PFR2_SSBS_MASK, 1U)
static inline bool is_feat_pan_present(void)
{
return read_feat_pan_id_field() != 0U;
}
/* FEAT_PMUV3 */
CREATE_FEATURE_PRESENT(feat_pmuv3, id_dfr0, ID_DFR0_PERFMON_SHIFT,
ID_DFR0_PERFMON_MASK, 3U)
static inline unsigned int is_feat_ssbs_present(void)
/* FEAT_MTPMU */
static inline bool is_feat_mtpmu_present(void)
{
return ((read_id_pfr2() >> ID_PFR2_SSBS_SHIFT) &
ID_PFR2_SSBS_MASK) != SSBS_NOT_IMPLEMENTED;
unsigned int mtpmu = ISOLATE_FIELD(read_id_dfr1(), ID_DFR1_MTPMU_SHIFT,
ID_DFR1_MTPMU_MASK);
return (mtpmu != 0U) && (mtpmu != MTPMU_NOT_IMPLEMENTED);
}
CREATE_FEATURE_SUPPORTED(feat_mtpmu, is_feat_mtpmu_present, DISABLE_MTPMU)
/*
* TWED, ECV, CSV2, RAS are only used by the AArch64 EL2 context switch
@ -179,29 +159,4 @@ static inline bool is_feat_nmi_present(void) { return false; }
static inline bool is_feat_ebep_present(void) { return false; }
static inline bool is_feat_sebep_present(void) { return false; }
static inline unsigned int read_feat_pmuv3_id_field(void)
{
return ISOLATE_FIELD(read_id_dfr0(), ID_DFR0_PERFMON);
}
static inline unsigned int read_feat_mtpmu_id_field(void)
{
return ISOLATE_FIELD(read_id_dfr1(), ID_DFR1_MTPMU);
}
static inline bool is_feat_mtpmu_supported(void)
{
if (DISABLE_MTPMU == FEAT_STATE_DISABLED) {
return false;
}
if (DISABLE_MTPMU == FEAT_STATE_ALWAYS) {
return true;
}
unsigned int mtpmu = read_feat_mtpmu_id_field();
return ((mtpmu != 0U) && (mtpmu != MTPMU_NOT_IMPLEMENTED));
}
#endif /* ARCH_FEATURES_H */

451
include/arch/aarch64/arch_features.h

@ -12,27 +12,125 @@
#include <arch_helpers.h>
#include <common/feat_detect.h>
#define ISOLATE_FIELD(reg, feat) \
((unsigned int)(((reg) >> (feat)) & ID_REG_FIELD_MASK))
#define CREATE_FEATURE_FUNCS_VER(name, read_func, idvalue, guard) \
static inline bool is_ ## name ## _supported(void) \
{ \
if ((guard) == FEAT_STATE_DISABLED) { \
return false; \
} \
if ((guard) == FEAT_STATE_ALWAYS) { \
return true; \
} \
return read_func() >= (idvalue); \
#define ISOLATE_FIELD(reg, feat, mask) \
((unsigned int)(((reg) >> (feat)) & mask))
#define CREATE_FEATURE_SUPPORTED(name, read_func, guard) \
static inline bool is_ ## name ## _supported(void) \
{ \
if ((guard) == FEAT_STATE_DISABLED) { \
return false; \
} \
if ((guard) == FEAT_STATE_ALWAYS) { \
return true; \
} \
return read_func(); \
}
#define CREATE_FEATURE_FUNCS(name, idreg, idfield, guard) \
static unsigned int read_ ## name ## _id_field(void) \
{ \
return ISOLATE_FIELD(read_ ## idreg(), idfield); \
} \
CREATE_FEATURE_FUNCS_VER(name, read_ ## name ## _id_field, 1U, guard)
#define CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval) \
static inline bool is_ ## name ## _present(void) \
{ \
return (ISOLATE_FIELD(read_ ## idreg(), idfield, mask) >= idval) \
? true : false; \
}
#define CREATE_FEATURE_FUNCS(name, idreg, idfield, mask, idval, guard) \
CREATE_FEATURE_PRESENT(name, idreg, idfield, mask, idval) \
CREATE_FEATURE_SUPPORTED(name, is_ ## name ## _present, guard)
/* +----------------------------+
* | Features supported |
* +----------------------------+
* | GENTIMER |
* +----------------------------+
* | FEAT_PAN |
* +----------------------------+
* | FEAT_VHE |
* +----------------------------+
* | FEAT_TTCNP |
* +----------------------------+
* | FEAT_UAO |
* +----------------------------+
* | FEAT_PACQARMA3 |
* +----------------------------+
* | FEAT_PAUTH |
* +----------------------------+
* | FEAT_TTST |
* +----------------------------+
* | FEAT_BTI |
* +----------------------------+
* | FEAT_MTE2 |
* +----------------------------+
* | FEAT_SSBS |
* +----------------------------+
* | FEAT_NMI |
* +----------------------------+
* | FEAT_GCS |
* +----------------------------+
* | FEAT_EBEP |
* +----------------------------+
* | FEAT_SEBEP |
* +----------------------------+
* | FEAT_SEL2 |
* +----------------------------+
* | FEAT_TWED |
* +----------------------------+
* | FEAT_FGT |
* +----------------------------+
* | FEAT_EC/ECV2 |
* +----------------------------+
* | FEAT_RNG |
* +----------------------------+
* | FEAT_TCR2 |
* +----------------------------+
* | FEAT_S2POE |
* +----------------------------+
* | FEAT_S1POE |
* +----------------------------+
* | FEAT_S2PIE |
* +----------------------------+
* | FEAT_S1PIE |
* +----------------------------+
* | FEAT_AMU/AMUV1P1 |
* +----------------------------+
* | FEAT_MPAM |
* +----------------------------+
* | FEAT_HCX |
* +----------------------------+
* | FEAT_RNG_TRAP |
* +----------------------------+
* | FEAT_RME |
* +----------------------------+
* | FEAT_SB |
* +----------------------------+
* | FEAT_CSV2/CSV3 |
* +----------------------------+
* | FEAT_SPE |
* +----------------------------+
* | FEAT_SVE |
* +----------------------------+
* | FEAT_RAS |
* +----------------------------+
* | FEAT_DIT |
* +----------------------------+
* | FEAT_SYS_REG_TRACE |
* +----------------------------+
* | FEAT_TRF |
* +----------------------------+
* | FEAT_NV/NV2 |
* +----------------------------+
* | FEAT_BRBE |
* +----------------------------+
* | FEAT_TRBE |
* +----------------------------+
* | FEAT_SME/SME2 |
* +----------------------------+
* | FEAT_PMUV3 |
* +----------------------------+
* | FEAT_MTPMU |
* +----------------------------+
*/
static inline bool is_armv7_gentimer_present(void)
{
@ -40,38 +138,28 @@ static inline bool is_armv7_gentimer_present(void)
return true;
}
/* FEAT_PAN: Privileged access never */
CREATE_FEATURE_FUNCS(feat_pan, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_PAN_SHIFT,
ENABLE_FEAT_PAN)
static inline bool is_feat_pan_present(void)
{
return read_feat_pan_id_field() != 0U;
}
ID_AA64MMFR1_EL1_PAN_MASK, 1U, ENABLE_FEAT_PAN)
/* FEAT_VHE: Virtualization Host Extensions */
CREATE_FEATURE_FUNCS(feat_vhe, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_VHE_SHIFT,
ENABLE_FEAT_VHE)
ID_AA64MMFR1_EL1_VHE_MASK, 1U, ENABLE_FEAT_VHE)
static inline bool is_armv8_2_ttcnp_present(void)
{
return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_CNP_SHIFT) &
ID_AA64MMFR2_EL1_CNP_MASK) != 0U;
}
/* FEAT_TTCNP: Translation table common not private */
CREATE_FEATURE_PRESENT(feat_ttcnp, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_CNP_SHIFT,
ID_AA64MMFR2_EL1_CNP_MASK, 1U)
static inline bool is_feat_uao_present(void)
{
return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_UAO_SHIFT) &
ID_AA64MMFR2_EL1_UAO_MASK) != 0U;
}
/* FEAT_UAO: User access override */
CREATE_FEATURE_PRESENT(feat_uao, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_UAO_SHIFT,
ID_AA64MMFR2_EL1_UAO_MASK, 1U)
static inline bool is_feat_pacqarma3_present(void)
{
uint64_t mask_id_aa64isar2 =
(ID_AA64ISAR2_GPA3_MASK << ID_AA64ISAR2_GPA3_SHIFT) |
(ID_AA64ISAR2_APA3_MASK << ID_AA64ISAR2_APA3_SHIFT);
/* If any of the fields is not zero, QARMA3 algorithm is present */
return (read_id_aa64isar2_el1() & mask_id_aa64isar2) != 0U;
}
/* If any of the fields is not zero, QARMA3 algorithm is present */
CREATE_FEATURE_PRESENT(feat_pacqarma3, id_aa64isar2_el1, 0,
((ID_AA64ISAR2_GPA3_MASK << ID_AA64ISAR2_GPA3_SHIFT) |
(ID_AA64ISAR2_APA3_MASK << ID_AA64ISAR2_APA3_SHIFT)), 1U)
/* PAUTH */
static inline bool is_armv8_3_pauth_present(void)
{
uint64_t mask_id_aa64isar1 =
@ -88,89 +176,81 @@ static inline bool is_armv8_3_pauth_present(void)
is_feat_pacqarma3_present());
}
static inline bool is_armv8_4_ttst_present(void)
{
return ((read_id_aa64mmfr2_el1() >> ID_AA64MMFR2_EL1_ST_SHIFT) &
ID_AA64MMFR2_EL1_ST_MASK) == 1U;
}
/* FEAT_TTST: Small translation tables */
CREATE_FEATURE_PRESENT(feat_ttst, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_ST_SHIFT,
ID_AA64MMFR2_EL1_ST_MASK, 1U)
static inline bool is_armv8_5_bti_present(void)
{
return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_BT_SHIFT) &
ID_AA64PFR1_EL1_BT_MASK) == BTI_IMPLEMENTED;
}
/* FEAT_BTI: Branch target identification */
CREATE_FEATURE_PRESENT(feat_bti, id_aa64pfr1_el1, ID_AA64PFR1_EL1_BT_SHIFT,
ID_AA64PFR1_EL1_BT_MASK, BTI_IMPLEMENTED)
static inline unsigned int get_armv8_5_mte_support(void)
{
return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_MTE_SHIFT) &
ID_AA64PFR1_EL1_MTE_MASK);
}
static inline unsigned int is_feat_mte2_present(void)
{
return get_armv8_5_mte_support() >= MTE_IMPLEMENTED_ELX;
}
static inline bool is_feat_ssbs_present(void)
{
return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_SSBS_SHIFT) &
ID_AA64PFR1_EL1_SSBS_MASK) != SSBS_NOT_IMPLEMENTED;
}
/* FEAT_MTE2: Memory tagging extension */
CREATE_FEATURE_FUNCS(feat_mte2, id_aa64pfr1_el1, ID_AA64PFR1_EL1_MTE_SHIFT,
ID_AA64PFR1_EL1_MTE_MASK, MTE_IMPLEMENTED_ELX, ENABLE_FEAT_MTE2)
static inline bool is_feat_nmi_present(void)
{
return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_NMI_SHIFT) &
ID_AA64PFR1_EL1_NMI_MASK) == NMI_IMPLEMENTED;
}
/* FEAT_SSBS: Speculative store bypass safe */
CREATE_FEATURE_PRESENT(feat_ssbs, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SSBS_SHIFT,
ID_AA64PFR1_EL1_SSBS_MASK, 1U)
static inline bool is_feat_gcs_present(void)
{
return ((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_GCS_SHIFT) &
ID_AA64PFR1_EL1_GCS_MASK) == GCS_IMPLEMENTED;
}
/* FEAT_NMI: Non-maskable interrupts */
CREATE_FEATURE_PRESENT(feat_nmi, id_aa64pfr1_el1, ID_AA64PFR1_EL1_NMI_SHIFT,
ID_AA64PFR1_EL1_NMI_MASK, NMI_IMPLEMENTED)
static inline bool is_feat_ebep_present(void)
{
return ((read_id_aa64dfr1_el1() >> ID_AA64DFR1_EBEP_SHIFT) &
ID_AA64DFR1_EBEP_MASK) == EBEP_IMPLEMENTED;
}
/* FEAT_EBEP */
CREATE_FEATURE_PRESENT(feat_ebep, id_aa64dfr1_el1, ID_AA64DFR1_EBEP_SHIFT,
ID_AA64DFR1_EBEP_MASK, EBEP_IMPLEMENTED)
static inline bool is_feat_sebep_present(void)
{
return ((read_id_aa64dfr0_el1() >> ID_AA64DFR0_SEBEP_SHIFT) &
ID_AA64DFR0_SEBEP_MASK) == SEBEP_IMPLEMENTED;
}
/* FEAT_SEBEP */
CREATE_FEATURE_PRESENT(feat_sebep, id_aa64dfr0_el1, ID_AA64DFR0_SEBEP_SHIFT,
ID_AA64DFR0_SEBEP_MASK, SEBEP_IMPLEMENTED)
CREATE_FEATURE_FUNCS_VER(feat_mte2, get_armv8_5_mte_support, MTE_IMPLEMENTED_ELX,
ENABLE_FEAT_MTE2)
/* FEAT_SEL2: Secure EL2 */
CREATE_FEATURE_FUNCS(feat_sel2, id_aa64pfr0_el1, ID_AA64PFR0_SEL2_SHIFT,
ENABLE_FEAT_SEL2)
ID_AA64PFR0_SEL2_MASK, 1U, ENABLE_FEAT_SEL2)
/* FEAT_TWED: Delayed trapping of WFE */
CREATE_FEATURE_FUNCS(feat_twed, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_TWED_SHIFT,
ENABLE_FEAT_TWED)
ID_AA64MMFR1_EL1_TWED_MASK, 1U, ENABLE_FEAT_TWED)
/* FEAT_FGT: Fine-grained traps */
CREATE_FEATURE_FUNCS(feat_fgt, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_FGT_SHIFT,
ENABLE_FEAT_FGT)
ID_AA64MMFR0_EL1_FGT_MASK, 1U, ENABLE_FEAT_FGT)
/* FEAT_ECV: Enhanced Counter Virtualization */
CREATE_FEATURE_FUNCS(feat_ecv, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_ECV_SHIFT,
ENABLE_FEAT_ECV)
CREATE_FEATURE_FUNCS_VER(feat_ecv_v2, read_feat_ecv_id_field,
ID_AA64MMFR0_EL1_ECV_SELF_SYNCH, ENABLE_FEAT_ECV)
ID_AA64MMFR0_EL1_ECV_MASK, 1U, ENABLE_FEAT_ECV)
CREATE_FEATURE_FUNCS(feat_ecv_v2, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_ECV_SHIFT,
ID_AA64MMFR0_EL1_ECV_MASK, ID_AA64MMFR0_EL1_ECV_SELF_SYNCH, ENABLE_FEAT_ECV)
/* FEAT_RNG: Random number generator */
CREATE_FEATURE_FUNCS(feat_rng, id_aa64isar0_el1, ID_AA64ISAR0_RNDR_SHIFT,
ENABLE_FEAT_RNG)
ID_AA64ISAR0_RNDR_MASK, 1U, ENABLE_FEAT_RNG)
/* FEAT_TCR2: Support TCR2_ELx regs */
CREATE_FEATURE_FUNCS(feat_tcr2, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_TCRX_SHIFT,
ENABLE_FEAT_TCR2)
ID_AA64MMFR3_EL1_TCRX_MASK, 1U, ENABLE_FEAT_TCR2)
/* FEAT_S2POE */
CREATE_FEATURE_FUNCS(feat_s2poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S2POE_SHIFT,
ENABLE_FEAT_S2POE)
ID_AA64MMFR3_EL1_S2POE_MASK, 1U, ENABLE_FEAT_S2POE)
/* FEAT_S1POE */
CREATE_FEATURE_FUNCS(feat_s1poe, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1POE_SHIFT,
ENABLE_FEAT_S1POE)
ID_AA64MMFR3_EL1_S1POE_MASK, 1U, ENABLE_FEAT_S1POE)
static inline bool is_feat_sxpoe_supported(void)
{
return is_feat_s1poe_supported() || is_feat_s2poe_supported();
}
/* FEAT_S2PIE */
CREATE_FEATURE_FUNCS(feat_s2pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S2PIE_SHIFT,
ENABLE_FEAT_S2PIE)
ID_AA64MMFR3_EL1_S2PIE_MASK, 1U, ENABLE_FEAT_S2PIE)
/* FEAT_S1PIE */
CREATE_FEATURE_FUNCS(feat_s1pie, id_aa64mmfr3_el1, ID_AA64MMFR3_EL1_S1PIE_SHIFT,
ENABLE_FEAT_S1PIE)
ID_AA64MMFR3_EL1_S1PIE_MASK, 1U, ENABLE_FEAT_S1PIE)
static inline bool is_feat_sxpie_supported(void)
{
return is_feat_s1pie_supported() || is_feat_s2pie_supported();
@ -178,13 +258,15 @@ static inline bool is_feat_sxpie_supported(void)
/* FEAT_GCS: Guarded Control Stack */
CREATE_FEATURE_FUNCS(feat_gcs, id_aa64pfr1_el1, ID_AA64PFR1_EL1_GCS_SHIFT,
ENABLE_FEAT_GCS)
ID_AA64PFR1_EL1_GCS_MASK, 1U, ENABLE_FEAT_GCS)
/* FEAT_AMU: Activity Monitors Extension */
CREATE_FEATURE_FUNCS(feat_amu, id_aa64pfr0_el1, ID_AA64PFR0_AMU_SHIFT,
ENABLE_FEAT_AMU)
CREATE_FEATURE_FUNCS_VER(feat_amuv1p1, read_feat_amu_id_field,
ID_AA64PFR0_AMU_V1P1, ENABLE_FEAT_AMUv1p1)
ID_AA64PFR0_AMU_MASK, 1U, ENABLE_FEAT_AMU)
/* FEAT_AMUV1P1: AMU Extension v1.1 */
CREATE_FEATURE_FUNCS(feat_amuv1p1, id_aa64pfr0_el1, ID_AA64PFR0_AMU_SHIFT,
ID_AA64PFR0_AMU_MASK, ID_AA64PFR0_AMU_V1P1, ENABLE_FEAT_AMUv1p1)
/*
* Return MPAM version:
@ -195,46 +277,32 @@ CREATE_FEATURE_FUNCS_VER(feat_amuv1p1, read_feat_amu_id_field,
* 0x11: v1.1 Armv8.4 or later
*
*/
static inline unsigned int read_feat_mpam_version(void)
static inline bool is_feat_mpam_present(void)
{
return (unsigned int)((((read_id_aa64pfr0_el1() >>
unsigned int ret = (unsigned int)((((read_id_aa64pfr0_el1() >>
ID_AA64PFR0_MPAM_SHIFT) & ID_AA64PFR0_MPAM_MASK) << 4) |
((read_id_aa64pfr1_el1() >>
ID_AA64PFR1_MPAM_FRAC_SHIFT) & ID_AA64PFR1_MPAM_FRAC_MASK));
((read_id_aa64pfr1_el1() >> ID_AA64PFR1_MPAM_FRAC_SHIFT)
& ID_AA64PFR1_MPAM_FRAC_MASK));
return ret;
}
CREATE_FEATURE_FUNCS_VER(feat_mpam, read_feat_mpam_version, 1U,
ENABLE_FEAT_MPAM)
CREATE_FEATURE_SUPPORTED(feat_mpam, is_feat_mpam_present, ENABLE_FEAT_MPAM)
/* FEAT_HCX: Extended Hypervisor Configuration Register */
CREATE_FEATURE_FUNCS(feat_hcx, id_aa64mmfr1_el1, ID_AA64MMFR1_EL1_HCX_SHIFT,
ENABLE_FEAT_HCX)
ID_AA64MMFR1_EL1_HCX_MASK, 1U, ENABLE_FEAT_HCX)
static inline bool is_feat_rng_trap_present(void)
{
return (((read_id_aa64pfr1_el1() >> ID_AA64PFR1_EL1_RNDR_TRAP_SHIFT) &
ID_AA64PFR1_EL1_RNDR_TRAP_MASK)
== RNG_TRAP_IMPLEMENTED);
}
/* FEAT_RNG_TRAP: Trapping support */
CREATE_FEATURE_PRESENT(feat_rng_trap, id_aa64pfr1_el1, ID_AA64PFR1_EL1_RNDR_TRAP_SHIFT,
ID_AA64PFR1_EL1_RNDR_TRAP_MASK, RNG_TRAP_IMPLEMENTED)
static inline unsigned int get_armv9_2_feat_rme_support(void)
{
/*
* Return the RME version, zero if not supported. This function can be
* used as both an integer value for the RME version or compared to zero
* to detect RME presence.
*/
return (unsigned int)(read_id_aa64pfr0_el1() >>
ID_AA64PFR0_FEAT_RME_SHIFT) & ID_AA64PFR0_FEAT_RME_MASK;
}
/* Return the RME version, zero if not supported. */
CREATE_FEATURE_FUNCS(feat_rme, id_aa64pfr0_el1, ID_AA64PFR0_FEAT_RME_SHIFT,
ID_AA64PFR0_FEAT_RME_MASK, 1U, ENABLE_RME)
/*********************************************************************************
* Function to identify the presence of FEAT_SB (Speculation Barrier Instruction)
********************************************************************************/
static inline unsigned int read_feat_sb_id_field(void)
{
return ISOLATE_FIELD(read_id_aa64isar1_el1(), ID_AA64ISAR1_SB_SHIFT);
}
/* FEAT_SB: Speculation barrier instruction */
CREATE_FEATURE_PRESENT(feat_sb, id_aa64isar1_el1, ID_AA64ISAR1_SB_SHIFT,
ID_AA64ISAR1_SB_MASK, 1U)
/*
* FEAT_CSV2: Cache Speculation Variant 2. This checks bit fields[56-59]
@ -248,109 +316,94 @@ static inline unsigned int read_feat_sb_id_field(void)
* implemented.
* 0b0011 - Feature FEAT_CSV2_3 is implemented.
*/
static inline unsigned int read_feat_csv2_id_field(void)
{
return (unsigned int)(read_id_aa64pfr0_el1() >>
ID_AA64PFR0_CSV2_SHIFT) & ID_AA64PFR0_CSV2_MASK;
}
CREATE_FEATURE_FUNCS_VER(feat_csv2_2, read_feat_csv2_id_field,
CSV2_2_IMPLEMENTED, ENABLE_FEAT_CSV2_2)
CREATE_FEATURE_FUNCS_VER(feat_csv2_3, read_feat_csv2_id_field,
CSV2_3_IMPLEMENTED, ENABLE_FEAT_CSV2_3)
CREATE_FEATURE_FUNCS(feat_csv2_2, id_aa64pfr0_el1, ID_AA64PFR0_CSV2_SHIFT,
ID_AA64PFR0_CSV2_MASK, CSV2_2_IMPLEMENTED, ENABLE_FEAT_CSV2_2)
CREATE_FEATURE_FUNCS(feat_csv2_3, id_aa64pfr0_el1, ID_AA64PFR0_CSV2_SHIFT,
ID_AA64PFR0_CSV2_MASK, CSV2_3_IMPLEMENTED, ENABLE_FEAT_CSV2_3)
/* FEAT_SPE: Statistical Profiling Extension */
CREATE_FEATURE_FUNCS(feat_spe, id_aa64dfr0_el1, ID_AA64DFR0_PMS_SHIFT,
ENABLE_SPE_FOR_NS)
ID_AA64DFR0_PMS_MASK, 1U, ENABLE_SPE_FOR_NS)
/* FEAT_SVE: Scalable Vector Extension */
CREATE_FEATURE_FUNCS(feat_sve, id_aa64pfr0_el1, ID_AA64PFR0_SVE_SHIFT,
ENABLE_SVE_FOR_NS)
ID_AA64PFR0_SVE_MASK, 1U, ENABLE_SVE_FOR_NS)
/* FEAT_RAS: Reliability, Accessibility, Serviceability */
CREATE_FEATURE_FUNCS(feat_ras, id_aa64pfr0_el1,
ID_AA64PFR0_RAS_SHIFT, ENABLE_FEAT_RAS)
CREATE_FEATURE_FUNCS(feat_ras, id_aa64pfr0_el1, ID_AA64PFR0_RAS_SHIFT,
ID_AA64PFR0_RAS_MASK, 1U, ENABLE_FEAT_RAS)
/* FEAT_DIT: Data Independent Timing instructions */
CREATE_FEATURE_FUNCS(feat_dit, id_aa64pfr0_el1,
ID_AA64PFR0_DIT_SHIFT, ENABLE_FEAT_DIT)
CREATE_FEATURE_FUNCS(feat_dit, id_aa64pfr0_el1, ID_AA64PFR0_DIT_SHIFT,
ID_AA64PFR0_DIT_MASK, 1U, ENABLE_FEAT_DIT)
CREATE_FEATURE_FUNCS(feat_sys_reg_trace, id_aa64dfr0_el1,
ID_AA64DFR0_TRACEVER_SHIFT, ENABLE_SYS_REG_TRACE_FOR_NS)
/* FEAT_SYS_REG_TRACE */
CREATE_FEATURE_FUNCS(feat_sys_reg_trace, id_aa64dfr0_el1, ID_AA64DFR0_TRACEVER_SHIFT,
ID_AA64DFR0_TRACEVER_MASK, 1U, ENABLE_SYS_REG_TRACE_FOR_NS)
/* FEAT_TRF: TraceFilter */
CREATE_FEATURE_FUNCS(feat_trf, id_aa64dfr0_el1, ID_AA64DFR0_TRACEFILT_SHIFT,
ENABLE_TRF_FOR_NS)
ID_AA64DFR0_TRACEFILT_MASK, 1U, ENABLE_TRF_FOR_NS)
/* FEAT_NV2: Enhanced Nested Virtualization */
CREATE_FEATURE_FUNCS(feat_nv, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_NV_SHIFT, 0)
CREATE_FEATURE_FUNCS_VER(feat_nv2, read_feat_nv_id_field,
NV2_IMPLEMENTED, CTX_INCLUDE_NEVE_REGS)
CREATE_FEATURE_FUNCS(feat_nv, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_NV_SHIFT,
ID_AA64MMFR2_EL1_NV_MASK, 1U, 0U)
CREATE_FEATURE_FUNCS(feat_nv2, id_aa64mmfr2_el1, ID_AA64MMFR2_EL1_NV_SHIFT,
ID_AA64MMFR2_EL1_NV_MASK, NV2_IMPLEMENTED, CTX_INCLUDE_NEVE_REGS)
/* FEAT_BRBE: Branch Record Buffer Extension */
CREATE_FEATURE_FUNCS(feat_brbe, id_aa64dfr0_el1, ID_AA64DFR0_BRBE_SHIFT,
ENABLE_BRBE_FOR_NS)
ID_AA64DFR0_BRBE_MASK, 1U, ENABLE_BRBE_FOR_NS)
/* FEAT_TRBE: Trace Buffer Extension */
CREATE_FEATURE_FUNCS(feat_trbe, id_aa64dfr0_el1, ID_AA64DFR0_TRACEBUFFER_SHIFT,
ENABLE_TRBE_FOR_NS)
ID_AA64DFR0_TRACEBUFFER_MASK, 1U, ENABLE_TRBE_FOR_NS)
/* FEAT_SME_FA64: Full A64 Instruction support in streaming SVE mode */
CREATE_FEATURE_PRESENT(feat_sme_fa64, id_aa64smfr0_el1, ID_AA64SMFR0_EL1_SME_FA64_SHIFT,
ID_AA64SMFR0_EL1_SME_FA64_MASK, 1U)
static inline unsigned int read_feat_sme_fa64_id_field(void)
{
return ISOLATE_FIELD(read_id_aa64smfr0_el1(),
ID_AA64SMFR0_EL1_SME_FA64_SHIFT);
}
/* FEAT_SMEx: Scalar Matrix Extension */
CREATE_FEATURE_FUNCS(feat_sme, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SME_SHIFT,
ENABLE_SME_FOR_NS)
CREATE_FEATURE_FUNCS_VER(feat_sme2, read_feat_sme_id_field,
SME2_IMPLEMENTED, ENABLE_SME2_FOR_NS)
ID_AA64PFR1_EL1_SME_MASK, 1U, ENABLE_SME_FOR_NS)
CREATE_FEATURE_FUNCS(feat_sme2, id_aa64pfr1_el1, ID_AA64PFR1_EL1_SME_SHIFT,
ID_AA64PFR1_EL1_SME_MASK, SME2_IMPLEMENTED, ENABLE_SME2_FOR_NS)
/*******************************************************************************
* Function to get hardware granularity support
******************************************************************************/
static inline unsigned int read_id_aa64mmfr0_el0_tgran4_field(void)
{
return ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
ID_AA64MMFR0_EL1_TGRAN4_SHIFT);
}
static inline unsigned int read_id_aa64mmfr0_el0_tgran16_field(void)
static inline bool is_feat_tgran4K_present(void)
{
return ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
ID_AA64MMFR0_EL1_TGRAN16_SHIFT);
unsigned int tgranx = ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
ID_AA64MMFR0_EL1_TGRAN4_SHIFT, ID_REG_FIELD_MASK);
return (tgranx < 8U);
}
static inline unsigned int read_id_aa64mmfr0_el0_tgran64_field(void)
{
return ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
ID_AA64MMFR0_EL1_TGRAN64_SHIFT);
}
CREATE_FEATURE_PRESENT(feat_tgran16K, id_aa64mmfr0_el1, ID_AA64MMFR0_EL1_TGRAN16_SHIFT,
ID_AA64MMFR0_EL1_TGRAN16_MASK, TGRAN16_IMPLEMENTED)
static inline unsigned int read_feat_pmuv3_id_field(void)
static inline bool is_feat_tgran64K_present(void)
{
return ISOLATE_FIELD(read_id_aa64dfr0_el1(), ID_AA64DFR0_PMUVER_SHIFT);
unsigned int tgranx = ISOLATE_FIELD(read_id_aa64mmfr0_el1(),
ID_AA64MMFR0_EL1_TGRAN64_SHIFT, ID_REG_FIELD_MASK);
return (tgranx < 8U);
}
static inline unsigned int read_feat_mtpmu_id_field(void)
{
return ISOLATE_FIELD(read_id_aa64dfr0_el1(), ID_AA64DFR0_MTPMU_SHIFT);
}
/* FEAT_PMUV3 */
CREATE_FEATURE_PRESENT(feat_pmuv3, id_aa64dfr0_el1, ID_AA64DFR0_PMUVER_SHIFT,
ID_AA64DFR0_PMUVER_MASK, 1U)
static inline bool is_feat_mtpmu_supported(void)
/* FEAT_MTPMU */
static inline bool is_feat_mtpmu_present(void)
{
if (DISABLE_MTPMU == FEAT_STATE_DISABLED) {
return false;
}
if (DISABLE_MTPMU == FEAT_STATE_ALWAYS) {
return true;
}
unsigned int mtpmu = read_feat_mtpmu_id_field();
unsigned int mtpmu = ISOLATE_FIELD(read_id_aa64dfr0_el1(), ID_AA64DFR0_MTPMU_SHIFT,
ID_AA64DFR0_MTPMU_MASK);
return (mtpmu != 0U) && (mtpmu != MTPMU_NOT_IMPLEMENTED);
}
CREATE_FEATURE_SUPPORTED(feat_mtpmu, is_feat_mtpmu_present, DISABLE_MTPMU)
#endif /* ARCH_FEATURES_H */

2
lib/el3_runtime/aarch32/context_mgmt.c

@ -149,7 +149,7 @@ static void enable_extensions_nonsecure(bool el2_unused)
trf_init_el3();
}
if (read_feat_pmuv3_id_field() >= 3U) {
if (is_feat_pmuv3_present()) {
pmuv3_init_el3();
}
#endif /* IMAGE_BL32 */

4
lib/extensions/sme/sme.c

@ -1,5 +1,5 @@
/*
* Copyright (c) 2021-2023, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2021-2024, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -53,7 +53,7 @@ void sme_init_el3(void)
* using SMCR_EL2 and SMCR_EL1.
*/
smcr_el3 = SMCR_ELX_LEN_MAX;
if (read_feat_sme_fa64_id_field() != 0U) {
if (is_feat_sme_fa64_present()) {
VERBOSE("[SME] FA64 enabled\n");
smcr_el3 |= SMCR_ELX_FA64_BIT;
}

4
lib/xlat_mpu/aarch64/xlat_mpu_arch.c

@ -1,5 +1,5 @@
/*
* Copyright (c) 2021, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2021-2024, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -27,7 +27,7 @@ uintptr_t xlat_get_min_virt_addr_space_size(void)
{
uintptr_t ret;
if (is_armv8_4_ttst_present()) {
if (is_feat_ttst_present()) {
ret = MIN_VIRT_ADDR_SPACE_SIZE_TTST;
} else {
ret = MIN_VIRT_ADDR_SPACE_SIZE;

4
lib/xlat_tables/aarch64/xlat_tables.c

@ -1,5 +1,5 @@
/*
* Copyright (c) 2014-2021, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2014-2024, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -87,7 +87,7 @@ static uintptr_t xlat_get_min_virt_addr_space_size(void)
{
uintptr_t ret;
if (is_armv8_4_ttst_present())
if (is_feat_ttst_present())
ret = MIN_VIRT_ADDR_SPACE_SIZE_TTST;
else
ret = MIN_VIRT_ADDR_SPACE_SIZE;

4
lib/xlat_tables_v2/aarch32/xlat_tables_arch.c

@ -1,5 +1,5 @@
/*
* Copyright (c) 2017-2021, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2024, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -248,7 +248,7 @@ void setup_mmu_cfg(uint64_t *params, unsigned int flags,
/* Set TTBR0 bits as well */
ttbr0 = (uint64_t)(uintptr_t) base_table;
if (is_armv8_2_ttcnp_present()) {
if (is_feat_ttcnp_present()) {
/* Enable CnP bit so as to share page tables with all PEs. */
ttbr0 |= TTBR_CNP_BIT;
}

15
lib/xlat_tables_v2/aarch64/xlat_tables_arch.c

@ -22,19 +22,14 @@
*/
bool xlat_arch_is_granule_size_supported(size_t size)
{
unsigned int tgranx;
if (size == PAGE_SIZE_4KB) {
tgranx = read_id_aa64mmfr0_el0_tgran4_field();
/* MSB of TGRAN4 field will be '1' for unsupported feature */
return (tgranx < 8U);
return is_feat_tgran4K_present();
} else if (size == PAGE_SIZE_16KB) {
tgranx = read_id_aa64mmfr0_el0_tgran16_field();
return (tgranx >= TGRAN16_IMPLEMENTED);
return is_feat_tgran16K_present();
} else if (size == PAGE_SIZE_64KB) {
tgranx = read_id_aa64mmfr0_el0_tgran64_field();
/* MSB of TGRAN64 field will be '1' for unsupported feature */
return (tgranx < 8U);
return is_feat_tgran64K_present();
} else {
return false;
}
@ -135,7 +130,7 @@ uintptr_t xlat_get_min_virt_addr_space_size(void)
{
uintptr_t ret;
if (is_armv8_4_ttst_present())
if (is_feat_ttst_present())
ret = MIN_VIRT_ADDR_SPACE_SIZE_TTST;
else
ret = MIN_VIRT_ADDR_SPACE_SIZE;
@ -312,7 +307,7 @@ void setup_mmu_cfg(uint64_t *params, unsigned int flags,
/* Set TTBR bits as well */
ttbr0 = (uint64_t) base_table;
if (is_armv8_2_ttcnp_present()) {
if (is_feat_ttcnp_present()) {
/* Enable CnP bit so as to share page tables with all PEs. */
ttbr0 |= TTBR_CNP_BIT;
}

4
lib/xlat_tables_v2/xlat_tables_core.c

@ -1,5 +1,5 @@
/*
* Copyright (c) 2017-2021, Arm Limited and Contributors. All rights reserved.
* Copyright (c) 2017-2024, Arm Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -214,7 +214,7 @@ uint64_t xlat_desc(const xlat_ctx_t *ctx, uint32_t attr,
/* Set GP bit for block and page code entries
* if BTI mechanism is implemented.
*/
if (is_armv8_5_bti_present() &&
if (is_feat_bti_present() &&
((attr & (MT_TYPE_MASK | MT_RW |
MT_EXECUTE_NEVER)) == MT_CODE)) {
desc |= GP;

2
plat/arm/common/arm_bl2_setup.c

@ -217,7 +217,7 @@ void arm_bl2_plat_arch_setup(void)
#ifdef __aarch64__
#if ENABLE_RME
/* BL2 runs in EL3 when RME enabled. */
assert(get_armv9_2_feat_rme_support() != 0U);
assert(is_feat_rme_present());
enable_mmu_el3(0);
/* Initialise and enable granule protection after MMU. */

2
plat/qemu/common/qemu_bl2_setup.c

@ -217,7 +217,7 @@ void bl2_plat_arch_setup(void)
#if ENABLE_RME
/* BL2 runs in EL3 when RME enabled. */
assert(get_armv9_2_feat_rme_support() != 0U);
assert(is_feat_rme_present());
enable_mmu_el3(0);
/* Initialise and enable granule protection after MMU. */

2
services/std_svc/rmmd/rmmd_main.c

@ -202,7 +202,7 @@ int rmmd_setup(void)
int rc;
/* Make sure RME is supported. */
assert(get_armv9_2_feat_rme_support() != 0U);
assert(is_feat_rme_present());
rmm_ep_info = bl31_plat_get_next_image_ep_info(REALM);
if (rmm_ep_info == NULL) {

Loading…
Cancel
Save