Browse Source

refactor(cm): move MPAM3_EL3 reg to per world context

Refactor MPAM3_EL3 to be world-specific, eliminating redundant cross-CPU
value duplication and reducing memory footprint.

Signed-off-by: Arvind Ram Prakash <arvind.ramprakash@arm.com>
Change-Id: Iddf020a5462737e01ac35e4f2b2b204a8759fafb
pull/2005/head
Arvind Ram Prakash 1 year ago
committed by Jayanth Dodderi Chidanand
parent
commit
ac4f6aaf85
  1. 19
      include/lib/el3_runtime/aarch64/context.h
  2. 4
      include/lib/extensions/mpam.h
  3. 4
      lib/el3_runtime/aarch64/context.S
  4. 22
      lib/el3_runtime/aarch64/context_mgmt.c
  5. 8
      lib/extensions/mpam/mpam.c

19
include/lib/el3_runtime/aarch64/context.h

@ -62,23 +62,22 @@
#define CTX_ELR_EL3 U(0x20)
#define CTX_PMCR_EL0 U(0x28)
#define CTX_IS_IN_EL3 U(0x30)
#define CTX_MPAM3_EL3 U(0x38)
/* Constants required in supporting nested exception in EL3 */
#define CTX_SAVED_ELR_EL3 U(0x40)
#define CTX_SAVED_ELR_EL3 U(0x38)
/*
* General purpose flag, to save various EL3 states
* FFH mode : Used to identify if handling nested exception
* KFH mode : Used as counter value
*/
#define CTX_NESTED_EA_FLAG U(0x48)
#define CTX_NESTED_EA_FLAG U(0x40)
#if FFH_SUPPORT
#define CTX_SAVED_ESR_EL3 U(0x50)
#define CTX_SAVED_SPSR_EL3 U(0x58)
#define CTX_SAVED_GPREG_LR U(0x60)
#define CTX_EL3STATE_END U(0x70) /* Align to the next 16 byte boundary */
#define CTX_SAVED_ESR_EL3 U(0x48)
#define CTX_SAVED_SPSR_EL3 U(0x50)
#define CTX_SAVED_GPREG_LR U(0x58)
#define CTX_EL3STATE_END U(0x60) /* Align to the next 16 byte boundary */
#else
#define CTX_EL3STATE_END U(0x50) /* Align to the next 16 byte boundary */
#endif
#endif /* FFH_SUPPORT */
/*******************************************************************************
* Constants that allow assembler code to access members of and the
@ -343,7 +342,8 @@
******************************************************************************/
#define CTX_CPTR_EL3 U(0x0)
#define CTX_ZCR_EL3 U(0x8)
#define CTX_PERWORLD_EL3STATE_END U(0x10)
#define CTX_MPAM3_EL3 U(0x10)
#define CTX_PERWORLD_EL3STATE_END U(0x18)
#ifndef __ASSEMBLER__
@ -462,6 +462,7 @@ typedef struct cpu_context {
typedef struct per_world_context {
uint64_t ctx_cptr_el3;
uint64_t ctx_zcr_el3;
uint64_t ctx_mpam3_el3;
} per_world_context_t;
extern per_world_context_t per_world_context[CPU_DATA_CONTEXT_NUM];

4
include/lib/extensions/mpam.h

@ -12,10 +12,10 @@
#include <context.h>
#if ENABLE_FEAT_MPAM
void mpam_enable(cpu_context_t *context);
void mpam_enable_per_world(per_world_context_t *per_world_ctx);
void mpam_init_el2_unused(void);
#else
static inline void mpam_enable(cpu_context_t *context)
static inline void mpam_enable_per_world(per_world_context_t *per_world_ctx)
{
}
static inline void mpam_init_el2_unused(void)

4
lib/el3_runtime/aarch64/context.S

@ -378,9 +378,11 @@ endfunc fpregs_context_restore
* Restore MPAM3_EL3 register as per context state
* Currently we only enable MPAM for NS world and trap to EL3
* for MPAM access in lower ELs of Secure and Realm world
* x9 holds address of the per_world context
* -----------------------------------------------------------
*/
ldr x17, [sp, #CTX_EL3STATE_OFFSET + CTX_MPAM3_EL3]
ldr x17, [x9, #CTX_MPAM3_EL3]
msr S3_6_C10_C5_0, x17 /* mpam3_el3 */
no_mpam:

22
lib/el3_runtime/aarch64/context_mgmt.c

@ -483,11 +483,6 @@ static void setup_context_common(cpu_context_t *ctx, const entry_point_info_t *e
}
#endif /* (IMAGE_BL31 && defined(SPD_spmd) && SPMD_SPM_AT_SEL2) */
if (is_feat_mpam_supported()) {
write_ctx_reg(get_el3state_ctx(ctx), CTX_MPAM3_EL3, \
MPAM3_EL3_RESET_VAL);
}
/*
* Populate EL3 state so that we've the right context
* before doing ERET
@ -618,7 +613,17 @@ void cm_el3_arch_init_per_world(per_world_context_t *per_world_ctx)
* CPTR_EL2,CPACR, or HCPTR do not trap to EL3.
*/
uint64_t cptr_el3 = CPTR_EL3_RESET_VAL & ~(TCPAC_BIT | TFP_BIT);
per_world_ctx->ctx_cptr_el3 = cptr_el3;
/*
* Initialize MPAM3_EL3 to its default reset value
*
* MPAM3_EL3_RESET_VAL sets the MPAM3_EL3.TRAPLOWER bit that forces
* all lower ELn MPAM3_EL3 register access to, trap to EL3
*/
per_world_ctx->ctx_mpam3_el3 = MPAM3_EL3_RESET_VAL;
}
#endif /* IMAGE_BL31 */
@ -647,6 +652,10 @@ void manage_extensions_nonsecure_per_world(void)
if (is_feat_sys_reg_trace_supported()) {
sys_reg_trace_enable_per_world(&per_world_context[CPU_CONTEXT_NS]);
}
if (is_feat_mpam_supported()) {
mpam_enable_per_world(&per_world_context[CPU_CONTEXT_NS]);
}
}
#endif /* IMAGE_BL31 */
@ -715,9 +724,6 @@ static void manage_extensions_nonsecure(cpu_context_t *ctx)
sme_enable(ctx);
}
if (is_feat_mpam_supported()) {
mpam_enable(ctx);
}
pmuv3_enable(ctx);
#endif /* IMAGE_BL31 */
}

8
lib/extensions/mpam/mpam.c

@ -11,19 +11,19 @@
#include <arch_helpers.h>
#include <lib/extensions/mpam.h>
void mpam_enable(cpu_context_t *context)
void mpam_enable_per_world(per_world_context_t *per_world_ctx)
{
u_register_t mpam3_el3;
mpam3_el3 = read_ctx_reg(get_el3state_ctx(context), CTX_MPAM3_EL3);
/*
* Enable MPAM, and disable trapping to EL3 when lower ELs access their
* own MPAM registers
*/
mpam3_el3 = per_world_ctx->ctx_mpam3_el3;
mpam3_el3 = (mpam3_el3 | MPAM3_EL3_MPAMEN_BIT) &
~(MPAM3_EL3_TRAPLOWER_BIT);
write_ctx_reg(get_el3state_ctx(context), CTX_MPAM3_EL3, mpam3_el3);
per_world_ctx->ctx_mpam3_el3 = mpam3_el3;
}
/*

Loading…
Cancel
Save