Browse Source

refactor(el3_runtime): unify handle/enter_lower_el_async_ea

handle_lower_el_async_ea and enter_lower_el_async_ea are same except for
saving x30 register, with previous patch x30 is now freed before calling
these function we don't need both of them.

This patch also unifies the naming convention, now we have 3 handlers
 - handle_lower_el_ea_esb
 - handle_lower_el_sync_ea
 - handle_lower_el_async_ea

Signed-off-by: Manish Pandey <manish.pandey2@arm.com>
Change-Id: I63b584cf059bac80195aa334981d50fa6272cf49
pull/1994/head
Manish Pandey 2 years ago
parent
commit
6f7de9a871
  1. 39
      bl31/aarch64/ea_delegate.S
  2. 10
      bl31/aarch64/runtime_exceptions.S

39
bl31/aarch64/ea_delegate.S

@ -16,9 +16,8 @@
#include <context.h>
.globl handle_lower_el_ea_esb
.globl handle_lower_el_async_ea
.globl enter_lower_el_sync_ea
.globl enter_lower_el_async_ea
.globl handle_lower_el_sync_ea
.globl handle_lower_el_async_ea
/*
@ -42,17 +41,12 @@ endfunc handle_lower_el_ea_esb
* Implementation Defined Exceptions. If any other kind of exception is detected,
* then this function reports unhandled exception.
*
* Since it's part of exception vector, this function doesn't expect any GP
* registers to have been saved. It delegates the handling of the EA to platform
* handler, and upon successfully handling the EA, exits EL3; otherwise panics.
* It delegates the handling of the EA to platform handler, and upon successfully
* handling the EA, exits EL3; otherwise panics.
*
* This function assumes x30 has been saved.
*/
func enter_lower_el_sync_ea
/*
* Explicitly save x30 so as to free up a register and to enable
* branching.
*/
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
func handle_lower_el_sync_ea
mrs x30, esr_el3
ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
@ -114,24 +108,19 @@ func enter_lower_el_sync_ea
/* Synchronous exceptions other than the above are assumed to be EA */
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
no_ret report_unhandled_exception
endfunc enter_lower_el_sync_ea
endfunc handle_lower_el_sync_ea
/*
* This function handles SErrors from lower ELs.
*
* Since it's part of exception vector, this function doesn't expect any GP
* registers to have been saved. It delegates the handling of the EA to platform
* handler, and upon successfully handling the EA, exits EL3; otherwise panics.
* It delegates the handling of the EA to platform handler, and upon successfully
* handling the EA, exits EL3; otherwise panics.
*
* This function assumes x30 has been saved.
*/
func enter_lower_el_async_ea
/*
* Explicitly save x30 so as to free up a register and to enable
* branching
*/
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
func handle_lower_el_async_ea
handle_lower_el_async_ea:
/*
* Save general purpose and ARMv8.3-PAuth registers (if enabled).
* If Secure Cycle Counter is not disabled in MDCR_EL3 when
@ -153,7 +142,7 @@ handle_lower_el_async_ea:
/* el3_exit assumes SP_EL0 on entry */
msr spsel, #MODE_SP_EL0
b el3_exit
endfunc enter_lower_el_async_ea
endfunc handle_lower_el_async_ea
/*

10
bl31/aarch64/runtime_exceptions.S

@ -151,7 +151,7 @@
/* Synchronous exceptions other than the above are assumed to be EA */
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
b enter_lower_el_sync_ea
b handle_lower_el_sync_ea
.endm
@ -361,11 +361,11 @@ vector_entry serror_aarch64
apply_at_speculative_wa
#if RAS_EXTENSION
msr daifclr, #DAIF_ABT_BIT
b enter_lower_el_async_ea
#else
check_and_unmask_ea
b handle_lower_el_async_ea
#endif
b handle_lower_el_async_ea
end_vector_entry serror_aarch64
/* ---------------------------------------------------------------------
@ -404,11 +404,11 @@ vector_entry serror_aarch32
apply_at_speculative_wa
#if RAS_EXTENSION
msr daifclr, #DAIF_ABT_BIT
b enter_lower_el_async_ea
#else
check_and_unmask_ea
b handle_lower_el_async_ea
#endif
b handle_lower_el_async_ea
end_vector_entry serror_aarch32
#ifdef MONITOR_TRAPS

Loading…
Cancel
Save