mirror of
https://github.com/ARM-software/arm-trusted-firmware.git
synced 2025-08-22 06:51:10 +02:00
Merge changes I63b584cf,I617f3d41 into integration
* changes: refactor(el3_runtime): unify handle/enter_lower_el_async_ea refactor(el3_runtime): introduce save_x30 macro
This commit is contained in:
commit
cd8f6af390
@ -16,9 +16,8 @@
|
|||||||
#include <context.h>
|
#include <context.h>
|
||||||
|
|
||||||
.globl handle_lower_el_ea_esb
|
.globl handle_lower_el_ea_esb
|
||||||
|
.globl handle_lower_el_sync_ea
|
||||||
.globl handle_lower_el_async_ea
|
.globl handle_lower_el_async_ea
|
||||||
.globl enter_lower_el_sync_ea
|
|
||||||
.globl enter_lower_el_async_ea
|
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -42,17 +41,12 @@ endfunc handle_lower_el_ea_esb
|
|||||||
* Implementation Defined Exceptions. If any other kind of exception is detected,
|
* Implementation Defined Exceptions. If any other kind of exception is detected,
|
||||||
* then this function reports unhandled exception.
|
* then this function reports unhandled exception.
|
||||||
*
|
*
|
||||||
* Since it's part of exception vector, this function doesn't expect any GP
|
* It delegates the handling of the EA to platform handler, and upon successfully
|
||||||
* registers to have been saved. It delegates the handling of the EA to platform
|
* handling the EA, exits EL3; otherwise panics.
|
||||||
* handler, and upon successfully handling the EA, exits EL3; otherwise panics.
|
*
|
||||||
|
* This function assumes x30 has been saved.
|
||||||
*/
|
*/
|
||||||
func enter_lower_el_sync_ea
|
func handle_lower_el_sync_ea
|
||||||
/*
|
|
||||||
* Explicitly save x30 so as to free up a register and to enable
|
|
||||||
* branching.
|
|
||||||
*/
|
|
||||||
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
|
||||||
|
|
||||||
mrs x30, esr_el3
|
mrs x30, esr_el3
|
||||||
ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
|
ubfx x30, x30, #ESR_EC_SHIFT, #ESR_EC_LENGTH
|
||||||
|
|
||||||
@ -114,24 +108,19 @@ func enter_lower_el_sync_ea
|
|||||||
/* Synchronous exceptions other than the above are assumed to be EA */
|
/* Synchronous exceptions other than the above are assumed to be EA */
|
||||||
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
||||||
no_ret report_unhandled_exception
|
no_ret report_unhandled_exception
|
||||||
endfunc enter_lower_el_sync_ea
|
endfunc handle_lower_el_sync_ea
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* This function handles SErrors from lower ELs.
|
* This function handles SErrors from lower ELs.
|
||||||
*
|
*
|
||||||
* Since it's part of exception vector, this function doesn't expect any GP
|
* It delegates the handling of the EA to platform handler, and upon successfully
|
||||||
* registers to have been saved. It delegates the handling of the EA to platform
|
* handling the EA, exits EL3; otherwise panics.
|
||||||
* handler, and upon successfully handling the EA, exits EL3; otherwise panics.
|
*
|
||||||
|
* This function assumes x30 has been saved.
|
||||||
*/
|
*/
|
||||||
func enter_lower_el_async_ea
|
func handle_lower_el_async_ea
|
||||||
/*
|
|
||||||
* Explicitly save x30 so as to free up a register and to enable
|
|
||||||
* branching
|
|
||||||
*/
|
|
||||||
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
|
||||||
|
|
||||||
handle_lower_el_async_ea:
|
|
||||||
/*
|
/*
|
||||||
* Save general purpose and ARMv8.3-PAuth registers (if enabled).
|
* Save general purpose and ARMv8.3-PAuth registers (if enabled).
|
||||||
* If Secure Cycle Counter is not disabled in MDCR_EL3 when
|
* If Secure Cycle Counter is not disabled in MDCR_EL3 when
|
||||||
@ -153,7 +142,7 @@ handle_lower_el_async_ea:
|
|||||||
/* el3_exit assumes SP_EL0 on entry */
|
/* el3_exit assumes SP_EL0 on entry */
|
||||||
msr spsel, #MODE_SP_EL0
|
msr spsel, #MODE_SP_EL0
|
||||||
b el3_exit
|
b el3_exit
|
||||||
endfunc enter_lower_el_async_ea
|
endfunc handle_lower_el_async_ea
|
||||||
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -39,6 +39,14 @@
|
|||||||
.globl fiq_aarch32
|
.globl fiq_aarch32
|
||||||
.globl serror_aarch32
|
.globl serror_aarch32
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Save LR and make x30 available as most of the routines in vector entry
|
||||||
|
* need a free register
|
||||||
|
*/
|
||||||
|
.macro save_x30
|
||||||
|
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
||||||
|
.endm
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Macro that prepares entry to EL3 upon taking an exception.
|
* Macro that prepares entry to EL3 upon taking an exception.
|
||||||
*
|
*
|
||||||
@ -58,12 +66,6 @@
|
|||||||
/* Unmask the SError interrupt */
|
/* Unmask the SError interrupt */
|
||||||
msr daifclr, #DAIF_ABT_BIT
|
msr daifclr, #DAIF_ABT_BIT
|
||||||
|
|
||||||
/*
|
|
||||||
* Explicitly save x30 so as to free up a register and to enable
|
|
||||||
* branching
|
|
||||||
*/
|
|
||||||
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
|
||||||
|
|
||||||
/* Check for SErrors synchronized by the ESB instruction */
|
/* Check for SErrors synchronized by the ESB instruction */
|
||||||
mrs x30, DISR_EL1
|
mrs x30, DISR_EL1
|
||||||
tbz x30, #DISR_A_BIT, 1f
|
tbz x30, #DISR_A_BIT, 1f
|
||||||
@ -108,11 +110,7 @@
|
|||||||
/* Use ISB for the above unmask operation to take effect immediately */
|
/* Use ISB for the above unmask operation to take effect immediately */
|
||||||
isb
|
isb
|
||||||
|
|
||||||
/*
|
/* Refer Note 1. */
|
||||||
* Refer Note 1.
|
|
||||||
* No need to restore X30 as macros following this modify x30 anyway.
|
|
||||||
*/
|
|
||||||
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
|
||||||
mov x30, #1
|
mov x30, #1
|
||||||
str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
|
str x30, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
|
||||||
dmb sy
|
dmb sy
|
||||||
@ -153,7 +151,7 @@
|
|||||||
|
|
||||||
/* Synchronous exceptions other than the above are assumed to be EA */
|
/* Synchronous exceptions other than the above are assumed to be EA */
|
||||||
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
ldr x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
||||||
b enter_lower_el_sync_ea
|
b handle_lower_el_sync_ea
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
|
|
||||||
@ -316,7 +314,7 @@ vector_entry serror_sp_elx
|
|||||||
* triggered due to explicit synchronization in EL3. Refer Note 1.
|
* triggered due to explicit synchronization in EL3. Refer Note 1.
|
||||||
*/
|
*/
|
||||||
/* Assumes SP_EL3 on entry */
|
/* Assumes SP_EL3 on entry */
|
||||||
str x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_LR]
|
save_x30
|
||||||
ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
|
ldr x30, [sp, #CTX_EL3STATE_OFFSET + CTX_IS_IN_EL3]
|
||||||
cbnz x30, 1f
|
cbnz x30, 1f
|
||||||
|
|
||||||
@ -338,32 +336,36 @@ vector_entry sync_exception_aarch64
|
|||||||
* to a valid cpu context where the general purpose and system register
|
* to a valid cpu context where the general purpose and system register
|
||||||
* state can be saved.
|
* state can be saved.
|
||||||
*/
|
*/
|
||||||
|
save_x30
|
||||||
apply_at_speculative_wa
|
apply_at_speculative_wa
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
handle_sync_exception
|
handle_sync_exception
|
||||||
end_vector_entry sync_exception_aarch64
|
end_vector_entry sync_exception_aarch64
|
||||||
|
|
||||||
vector_entry irq_aarch64
|
vector_entry irq_aarch64
|
||||||
|
save_x30
|
||||||
apply_at_speculative_wa
|
apply_at_speculative_wa
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
handle_interrupt_exception irq_aarch64
|
handle_interrupt_exception irq_aarch64
|
||||||
end_vector_entry irq_aarch64
|
end_vector_entry irq_aarch64
|
||||||
|
|
||||||
vector_entry fiq_aarch64
|
vector_entry fiq_aarch64
|
||||||
|
save_x30
|
||||||
apply_at_speculative_wa
|
apply_at_speculative_wa
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
handle_interrupt_exception fiq_aarch64
|
handle_interrupt_exception fiq_aarch64
|
||||||
end_vector_entry fiq_aarch64
|
end_vector_entry fiq_aarch64
|
||||||
|
|
||||||
vector_entry serror_aarch64
|
vector_entry serror_aarch64
|
||||||
|
save_x30
|
||||||
apply_at_speculative_wa
|
apply_at_speculative_wa
|
||||||
#if RAS_EXTENSION
|
#if RAS_EXTENSION
|
||||||
msr daifclr, #DAIF_ABT_BIT
|
msr daifclr, #DAIF_ABT_BIT
|
||||||
b enter_lower_el_async_ea
|
|
||||||
#else
|
#else
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
b handle_lower_el_async_ea
|
|
||||||
#endif
|
#endif
|
||||||
|
b handle_lower_el_async_ea
|
||||||
|
|
||||||
end_vector_entry serror_aarch64
|
end_vector_entry serror_aarch64
|
||||||
|
|
||||||
/* ---------------------------------------------------------------------
|
/* ---------------------------------------------------------------------
|
||||||
@ -377,32 +379,36 @@ vector_entry sync_exception_aarch32
|
|||||||
* to a valid cpu context where the general purpose and system register
|
* to a valid cpu context where the general purpose and system register
|
||||||
* state can be saved.
|
* state can be saved.
|
||||||
*/
|
*/
|
||||||
|
save_x30
|
||||||
apply_at_speculative_wa
|
apply_at_speculative_wa
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
handle_sync_exception
|
handle_sync_exception
|
||||||
end_vector_entry sync_exception_aarch32
|
end_vector_entry sync_exception_aarch32
|
||||||
|
|
||||||
vector_entry irq_aarch32
|
vector_entry irq_aarch32
|
||||||
|
save_x30
|
||||||
apply_at_speculative_wa
|
apply_at_speculative_wa
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
handle_interrupt_exception irq_aarch32
|
handle_interrupt_exception irq_aarch32
|
||||||
end_vector_entry irq_aarch32
|
end_vector_entry irq_aarch32
|
||||||
|
|
||||||
vector_entry fiq_aarch32
|
vector_entry fiq_aarch32
|
||||||
|
save_x30
|
||||||
apply_at_speculative_wa
|
apply_at_speculative_wa
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
handle_interrupt_exception fiq_aarch32
|
handle_interrupt_exception fiq_aarch32
|
||||||
end_vector_entry fiq_aarch32
|
end_vector_entry fiq_aarch32
|
||||||
|
|
||||||
vector_entry serror_aarch32
|
vector_entry serror_aarch32
|
||||||
|
save_x30
|
||||||
apply_at_speculative_wa
|
apply_at_speculative_wa
|
||||||
#if RAS_EXTENSION
|
#if RAS_EXTENSION
|
||||||
msr daifclr, #DAIF_ABT_BIT
|
msr daifclr, #DAIF_ABT_BIT
|
||||||
b enter_lower_el_async_ea
|
|
||||||
#else
|
#else
|
||||||
check_and_unmask_ea
|
check_and_unmask_ea
|
||||||
b handle_lower_el_async_ea
|
|
||||||
#endif
|
#endif
|
||||||
|
b handle_lower_el_async_ea
|
||||||
|
|
||||||
end_vector_entry serror_aarch32
|
end_vector_entry serror_aarch32
|
||||||
|
|
||||||
#ifdef MONITOR_TRAPS
|
#ifdef MONITOR_TRAPS
|
||||||
|
@ -384,13 +384,12 @@
|
|||||||
.macro apply_at_speculative_wa
|
.macro apply_at_speculative_wa
|
||||||
#if ERRATA_SPECULATIVE_AT
|
#if ERRATA_SPECULATIVE_AT
|
||||||
/*
|
/*
|
||||||
* Explicitly save x30 so as to free up a register and to enable
|
* This function expects x30 has been saved.
|
||||||
* branching and also, save x29 which will be used in the called
|
* Also, save x29 which will be used in the called function.
|
||||||
* function
|
|
||||||
*/
|
*/
|
||||||
stp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
|
str x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
|
||||||
bl save_and_update_ptw_el1_sys_regs
|
bl save_and_update_ptw_el1_sys_regs
|
||||||
ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
|
ldr x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
|
||||||
#endif
|
#endif
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
|
@ -532,13 +532,12 @@
|
|||||||
.macro apply_at_speculative_wa
|
.macro apply_at_speculative_wa
|
||||||
#if ERRATA_SPECULATIVE_AT
|
#if ERRATA_SPECULATIVE_AT
|
||||||
/*
|
/*
|
||||||
* Explicitly save x30 so as to free up a register and to enable
|
* This function expects x30 has been saved.
|
||||||
* branching and also, save x29 which will be used in the called
|
* Also, save x29 which will be used in the called function.
|
||||||
* function
|
|
||||||
*/
|
*/
|
||||||
stp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
|
str x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
|
||||||
bl save_and_update_ptw_el1_sys_regs
|
bl save_and_update_ptw_el1_sys_regs
|
||||||
ldp x29, x30, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
|
ldr x29, [sp, #CTX_GPREGS_OFFSET + CTX_GPREG_X29]
|
||||||
#endif
|
#endif
|
||||||
.endm
|
.endm
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user