Browse Source

Fix exception handlers in BL31: Use DSB to synchronize pending EA

For SoCs which do not implement RAS, use DSB as a barrier to
synchronize pending external aborts at the entry and exit of
exception handlers. This is needed to isolate the SErrors to
appropriate context.

However, this introduces an unintended side effect as discussed
in the https://review.trustedfirmware.org/c/TF-A/trusted-firmware-a/+/3440
A summary of the side effect and a quick workaround is provided as
part of this patch and summarized here:

The explicit DSB at the entry of various exception vectors in BL31
for handling exceptions from lower ELs can inadvertently trigger an
SError exception in EL3 due to pending asyncrhonouus aborts in lower
ELs. This will end up being handled by serror_sp_elx in EL3 which will
ultimately panic and die.

The way to workaround is to update a flag to indicate if the exception
truly came from EL3. This flag is allocated in the cpu_context
structure. This is not a bullet proof solution to the problem at hand
because we assume the instructions following "isb" that help to update
the flag (lines 100-102 & 139-141) execute without causing further
exceptions.

Change-Id: I4d345b07d746a727459435ddd6abb37fda24a9bf
Signed-off-by: Madhukar Pappireddy <madhukar.pappireddy@arm.com>
pull/1940/head
Madhukar Pappireddy 4 years ago
parent
commit
c2d32a5f85
  1. 4
      bl31/aarch64/ea_delegate.S
  2. 94
      bl31/aarch64/runtime_exceptions.S
  3. 5
      include/lib/el3_runtime/aarch64/context.h
  4. 7
      lib/el3_runtime/aarch64/context.S

4
bl31/aarch64/ea_delegate.S

@ -1,5 +1,5 @@
/*
* Copyright (c) 2018-2019, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2018-2021, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -15,6 +15,7 @@
#include <context.h>
.globl handle_lower_el_ea_esb
.globl handle_lower_el_async_ea
.globl enter_lower_el_sync_ea
.globl enter_lower_el_async_ea
@ -133,6 +134,7 @@ func enter_lower_el_async_ea
*/
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
handle_lower_el_async_ea:
/*
* Save general purpose and ARMv8.3-PAuth registers (if enabled).
* If Secure Cycle Counter is not disabled in MDCR_EL3 when

94
bl31/aarch64/runtime_exceptions.S

@ -1,5 +1,5 @@
/*
* Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -45,8 +45,9 @@
* instruction. When an error is thus synchronized, the handling is
* delegated to platform EA handler.
*
* Without RAS_EXTENSION, this macro just saves x30, and unmasks
* Asynchronous External Aborts.
* Without RAS_EXTENSION, this macro synchronizes pending errors using
* a DSB, unmasks Asynchronous External Aborts and saves X30 before
* setting the flag CTX_IS_IN_EL3.
*/
.macro check_and_unmask_ea
#if RAS_EXTENSION
@ -79,13 +80,89 @@
bl restore_gp_pmcr_pauth_regs
1:
#else
/*
* For SoCs which do not implement RAS, use DSB as a barrier to
* synchronize pending external aborts.
*/
dsb sy
/* Unmask the SError interrupt */
msr daifclr, #DAIF_ABT_BIT
/* Use ISB for the above unmask operation to take effect immediately */
isb
/*
* Refer Note 1. No need to restore X30 as both handle_sync_exception
* and handle_interrupt_exception macro which follow this macro modify
* X30 anyway.
*/
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
mov x30, #1
str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
dmb sy
#endif
.endm
#if !RAS_EXTENSION
/*
* Note 1: The explicit DSB at the entry of various exception vectors
* for handling exceptions from lower ELs can inadvertently trigger an
* SError exception in EL3 due to pending asynchronous aborts in lower
* ELs. This will end up being handled by serror_sp_elx which will
* ultimately panic and die.
* The way to workaround is to update a flag to indicate if the exception
* truly came from EL3. This flag is allocated in the cpu_context
* structure and located at offset "CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3"
* This is not a bullet proof solution to the problem at hand because
* we assume the instructions following "isb" that help to update the
* flag execute without causing further exceptions.
*/
/* ---------------------------------------------------------------------
* This macro handles Asynchronous External Aborts.
* ---------------------------------------------------------------------
*/
.macro handle_async_ea
/*
* Use a barrier to synchronize pending external aborts.
*/
dsb sy
/* Unmask the SError interrupt */
msr daifclr, #DAIF_ABT_BIT
/* Use ISB for the above unmask operation to take effect immediately */
isb
/* Refer Note 1 */
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
mov x30, #1
str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
dmb sy
b handle_lower_el_async_ea
.endm
/*
* This macro checks if the exception was taken due to SError in EL3 or
* because of pending asynchronous external aborts from lower EL that got
* triggered due to explicit synchronization in EL3. Refer Note 1.
*/
.macro check_if_serror_from_EL3
/* Assumes SP_EL3 on entry */
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
cbnz x30, exp_from_EL3
/* Handle asynchronous external abort from lower EL */
b handle_lower_el_async_ea
exp_from_EL3:
/* Jump to plat_handle_el3_ea which does not return */
.endm
#endif
/* ---------------------------------------------------------------------
* This macro handles Synchronous exceptions.
* Only SMC exceptions are supported.
@ -272,6 +349,9 @@ vector_entry fiq_sp_elx
end_vector_entry fiq_sp_elx
vector_entry serror_sp_elx
#if !RAS_EXTENSION
check_if_serror_from_EL3
#endif
no_ret plat_handle_el3_ea
end_vector_entry serror_sp_elx
@ -305,8 +385,12 @@ end_vector_entry fiq_aarch64
vector_entry serror_aarch64
apply_at_speculative_wa
#if RAS_EXTENSION
msr daifclr, #DAIF_ABT_BIT
b enter_lower_el_async_ea
#else
handle_async_ea
#endif
end_vector_entry serror_aarch64
/* ---------------------------------------------------------------------
@ -339,8 +423,12 @@ end_vector_entry fiq_aarch32
vector_entry serror_aarch32
apply_at_speculative_wa
#if RAS_EXTENSION
msr daifclr, #DAIF_ABT_BIT
b enter_lower_el_async_ea
#else
handle_async_ea
#endif
end_vector_entry serror_aarch32
#ifdef MONITOR_TRAPS

5
include/lib/el3_runtime/aarch64/context.h

@ -1,5 +1,5 @@
/*
* Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -60,7 +60,8 @@
#define CTX_SPSR_EL3 U(0x18)
#define CTX_ELR_EL3 U(0x20)
#define CTX_PMCR_EL0 U(0x28)
#define CTX_EL3STATE_END U(0x30)
#define CTX_IS_IN_EL3 U(0x30)
#define CTX_EL3STATE_END U(0x40) /* Align to the next 16 byte boundary */
/*******************************************************************************
* Constants that allow assembler code to access members of and the

7
lib/el3_runtime/aarch64/context.S

@ -1,5 +1,5 @@
/*
* Copyright (c) 2013-2020, ARM Limited and Contributors. All rights reserved.
* Copyright (c) 2013-2021, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
@ -995,6 +995,11 @@ func el3_exit
* ----------------------------------------------------------
*/
esb
#else
dsb sy
#endif
#ifdef IMAGE_BL31
str xzr, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
#endif
exception_return

Loading…
Cancel
Save