Browse Source

AArch32: Disable Secure Cycle Counter

This patch changes implementation for disabling Secure Cycle
Counter. For ARMv8.5 the counter gets disabled by setting
SDCR.SCCD bit on CPU cold/warm boot. For the earlier
architectures PMCR register is saved/restored on secure
world entry/exit from/to Non-secure state, and cycle counting
gets disabled by setting PMCR.DP bit.
In 'include\aarch32\arch.h' header file new
ARMv8.5-PMU related definitions were added.

Change-Id: Ia8845db2ebe8de940d66dff479225a5b879316f8
Signed-off-by: Alexei Fedorov <Alexei.Fedorov@arm.com>
pull/1935/head
Alexei Fedorov 5 years ago
committed by Paul Beesley
parent
commit
c3e8b0be9b
  1. 20
      bl32/sp_min/aarch32/entrypoint.S
  2. 5
      include/arch/aarch32/arch.h
  3. 30
      include/arch/aarch32/el3_common_macros.S
  4. 54
      include/arch/aarch32/smccc_macros.S
  5. 26
      lib/el3_runtime/aarch32/context_mgmt.c

20
bl32/sp_min/aarch32/entrypoint.S

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -183,15 +183,6 @@ func sp_min_handle_smc
stcopr r0, SCR
isb
/*
* Set PMCR.DP to 1 to prohibit cycle counting whilst in Secure Mode.
* Also, the PMCR.LC field has an architecturally UNKNOWN value on reset
* and so set to 1 as ARM has deprecated use of PMCR.LC=0.
*/
ldcopr r0, PMCR
orr r0, r0, #(PMCR_LC_BIT | PMCR_DP_BIT)
stcopr r0, PMCR
ldr r0, [r2, #SMC_CTX_GPREG_R0] /* smc_fid */
/* Check whether an SMC64 is issued */
tst r0, #(FUNCID_CC_MASK << FUNCID_CC_SHIFT)
@ -236,15 +227,6 @@ func sp_min_handle_fiq
stcopr r0, SCR
isb
/*
* Set PMCR.DP to 1 to prohibit cycle counting whilst in Secure Mode.
* Also, the PMCR.LC field has an architecturally UNKNOWN value on reset
* and so set to 1 as ARM has deprecated use of PMCR.LC=0.
*/
ldcopr r0, PMCR
orr r0, r0, #(PMCR_LC_BIT | PMCR_DP_BIT)
stcopr r0, PMCR
push {r2, r3}
bl sp_min_fiq
pop {r0, r3}

5
include/arch/aarch32/arch.h

@ -162,6 +162,7 @@
#define SDCR_SPD_DISABLE U(0x2)
#define SDCR_SPD_ENABLE U(0x3)
#define SDCR_SCCD_BIT (U(1) << 23)
#define SDCR_SPME_BIT (U(1) << 17)
#define SDCR_RESET_VAL U(0x0)
/* HSCTLR definitions */
@ -243,6 +244,8 @@
#define VTTBR_BADDR_SHIFT U(0)
/* HDCR definitions */
#define HDCR_HLP_BIT (U(1) << 26)
#define HDCR_HPME_BIT (U(1) << 7)
#define HDCR_RESET_VAL U(0x0)
/* HSTR definitions */
@ -419,8 +422,10 @@
#define PMCR_N_SHIFT U(11)
#define PMCR_N_MASK U(0x1f)
#define PMCR_N_BITS (PMCR_N_MASK << PMCR_N_SHIFT)
#define PMCR_LP_BIT (U(1) << 7)
#define PMCR_LC_BIT (U(1) << 6)
#define PMCR_DP_BIT (U(1) << 5)
#define PMCR_RESET_VAL U(0x0)
/*******************************************************************************
* Definitions of register offsets, fields and macros for CPU system

30
include/arch/aarch32/el3_common_macros.S

@ -112,15 +112,41 @@
* SDCR.SPD: Disable AArch32 privileged debug. Debug exceptions from
* Secure EL1 are disabled.
*
* SDCR: Set to one so that cycle counting by PMCCNTR is prohibited in
* Secure state. This bit is RES0 in versions of the architecture
* SDCR.SCCD: Set to one so that cycle counting by PMCCNTR is prohibited
* in Secure state. This bit is RES0 in versions of the architecture
* earlier than ARMv8.5, setting it to 1 doesn't have any effect on
* them.
* ---------------------------------------------------------------------
*/
ldr r0, =(SDCR_RESET_VAL | SDCR_SPD(SDCR_SPD_DISABLE) | SDCR_SCCD_BIT)
stcopr r0, SDCR
/* ---------------------------------------------------------------------
* Initialise PMCR, setting all fields rather than relying
* on hw. Some fields are architecturally UNKNOWN on reset.
*
* PMCR.LP: Set to one so that event counter overflow, that
* is recorded in PMOVSCLR[0-30], occurs on the increment
* that changes PMEVCNTR<n>[63] from 1 to 0, when ARMv8.5-PMU
* is implemented. This bit is RES0 in versions of the architecture
* earlier than ARMv8.5, setting it to 1 doesn't have any effect
* on them.
* This bit is Reserved, UNK/SBZP in ARMv7.
*
* PMCR.LC: Set to one so that cycle counter overflow, that
* is recorded in PMOVSCLR[31], occurs on the increment
* that changes PMCCNTR[63] from 1 to 0.
* This bit is Reserved, UNK/SBZP in ARMv7.
*
* PMCR.DP: Set to one to prohibit cycle counting whilst in Secure mode.
* ---------------------------------------------------------------------
*/
ldr r0, =(PMCR_RESET_VAL | PMCR_DP_BIT | PMCR_LC_BIT | \
PMCR_LP_BIT)
#else
ldr r0, =(PMCR_RESET_VAL | PMCR_DP_BIT)
#endif
stcopr r0, PMCR
/*
* If Data Independent Timing (DIT) functionality is implemented,

54
include/arch/aarch32/smccc_macros.S

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -58,7 +58,6 @@
stm r0!, {r2}
stcopr r4, SCR
isb
#else
/* Save the banked registers including the current SPSR and LR */
mrs r4, sp_usr
@ -85,10 +84,34 @@
/* lr_mon is already saved by caller */
ldcopr r4, SCR
#if ARM_ARCH_MAJOR > 7
/*
* Check if earlier initialization of SDCR.SCCD to 1
* failed, meaning that ARMv8-PMU is not implemented,
* cycle counting is not disabled and PMCR should be
* saved in Non-secure context.
*/
ldcopr r5, SDCR
tst r5, #SDCR_SCCD_BIT
bne 1f
#endif
/* Secure Cycle Counter is not disabled */
#endif
str r4, [sp, #SMC_CTX_SCR]
ldcopr r4, PMCR
str r4, [sp, #SMC_CTX_PMCR]
ldcopr r5, PMCR
/* Check caller's security state */
tst r4, #SCR_NS_BIT
beq 2f
/* Save PMCR if called from Non-secure state */
str r5, [sp, #SMC_CTX_PMCR]
/* Disable cycle counter when event counting is prohibited */
2: orr r5, r5, #PMCR_DP_BIT
stcopr r5, PMCR
isb
1: str r4, [sp, #SMC_CTX_SCR]
.endm
/*
@ -113,12 +136,31 @@
stcopr r1, SCR
isb
/*
* Restore PMCR when returning to Non-secure state
*/
tst r1, #SCR_NS_BIT
beq 2f
/*
* Back to Non-secure state
*/
#if ARM_ARCH_MAJOR > 7
/*
* Check if earlier initialization SDCR.SCCD to 1
* failed, meaning that ARMv8-PMU is not implemented and
* PMCR should be restored from Non-secure context.
*/
ldcopr r1, SDCR
tst r1, #SDCR_SCCD_BIT
bne 2f
#endif
/*
* Restore the PMCR register.
*/
ldr r1, [r0, #SMC_CTX_PMCR]
stcopr r1, PMCR
2:
/* Restore the banked registers including the current SPSR */
add r1, r0, #SMC_CTX_SP_USR

26
lib/el3_runtime/aarch32/context_mgmt.c

@ -1,5 +1,5 @@
/*
* Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2016-2019, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -281,10 +281,28 @@ void cm_prepare_el3_exit(uint32_t security_state)
*
* HDCR.HPMN: Set to value of PMCR.N which is the
* architecturally-defined reset value.
*
* HDCR.HLP: Set to one so that event counter
* overflow, that is recorded in PMOVSCLR[0-30],
* occurs on the increment that changes
* PMEVCNTR<n>[63] from 1 to 0, when ARMv8.5-PMU is
* implemented. This bit is RES0 in versions of the
* architecture earlier than ARMv8.5, setting it to 1
* doesn't have any effect on them.
* This bit is Reserved, UNK/SBZP in ARMv7.
*
* HDCR.HPME: Set to zero to disable EL2 Event
* counters.
*/
write_hdcr(HDCR_RESET_VAL |
((read_pmcr() & PMCR_N_BITS) >> PMCR_N_SHIFT));
#if (ARM_ARCH_MAJOR > 7)
write_hdcr((HDCR_RESET_VAL | HDCR_HLP_BIT |
((read_pmcr() & PMCR_N_BITS) >>
PMCR_N_SHIFT)) & ~HDCR_HPME_BIT);
#else
write_hdcr((HDCR_RESET_VAL |
((read_pmcr() & PMCR_N_BITS) >>
PMCR_N_SHIFT)) & ~HDCR_HPME_BIT);
#endif
/*
* Set HSTR to its architectural reset value so that
* access to system registers in the cproc=1111

Loading…
Cancel
Save