mirror of
https://github.com/ARM-software/arm-trusted-firmware.git
synced 2025-08-15 08:57:02 +02:00
A per-cpu vbar is installed that implements the workaround by invalidating the branch target buffer (BTB) directly in the case of A9 and A17 and indirectly by invalidating the icache in the case of A15. For Cortex A57 and A72 there is currently no workaround implemented when EL3 is in AArch32 mode so report it as missing. For other vulnerable CPUs (e.g. Cortex A73 and Cortex A75), there are no changes since there is currently no upstream AArch32 EL3 support for these CPUs. Change-Id: Ib42c6ef0b3c9ff2878a9e53839de497ff736258f Signed-off-by: Dimitris Papastamos <dimitris.papastamos@arm.com>
121 lines
2.4 KiB
ArmAsm
121 lines
2.4 KiB
ArmAsm
/*
|
|
* Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
|
|
*
|
|
* SPDX-License-Identifier: BSD-3-Clause
|
|
*/
|
|
|
|
#include <arch.h>
|
|
#include <asm_macros.S>
|
|
#include <assert_macros.S>
|
|
#include <cortex_a15.h>
|
|
#include <cpu_macros.S>
|
|
|
|
/*
|
|
* Cortex-A15 support LPAE and Virtualization Extensions.
|
|
* Don't care if confiugration uses or not LPAE and VE.
|
|
* Therefore, where we don't check ARCH_IS_ARMV7_WITH_LPAE/VE
|
|
*/
|
|
|
|
.macro assert_cache_enabled
|
|
#if ENABLE_ASSERTIONS
|
|
ldcopr r0, SCTLR
|
|
tst r0, #SCTLR_C_BIT
|
|
ASM_ASSERT(eq)
|
|
#endif
|
|
.endm
|
|
|
|
func cortex_a15_disable_smp
|
|
ldcopr r0, ACTLR
|
|
bic r0, #CORTEX_A15_ACTLR_SMP_BIT
|
|
stcopr r0, ACTLR
|
|
isb
|
|
dsb sy
|
|
bx lr
|
|
endfunc cortex_a15_disable_smp
|
|
|
|
func cortex_a15_enable_smp
|
|
ldcopr r0, ACTLR
|
|
orr r0, #CORTEX_A15_ACTLR_SMP_BIT
|
|
stcopr r0, ACTLR
|
|
isb
|
|
bx lr
|
|
endfunc cortex_a15_enable_smp
|
|
|
|
func check_errata_cve_2017_5715
|
|
#if WORKAROUND_CVE_2017_5715
|
|
mov r0, #ERRATA_APPLIES
|
|
#else
|
|
mov r0, #ERRATA_MISSING
|
|
#endif
|
|
bx lr
|
|
endfunc check_errata_cve_2017_5715
|
|
|
|
#if REPORT_ERRATA
|
|
/*
|
|
* Errata printing function for Cortex A15. Must follow AAPCS.
|
|
*/
|
|
func cortex_a15_errata_report
|
|
push {r12, lr}
|
|
|
|
bl cpu_get_rev_var
|
|
mov r4, r0
|
|
|
|
/*
|
|
* Report all errata. The revision-variant information is passed to
|
|
* checking functions of each errata.
|
|
*/
|
|
report_errata WORKAROUND_CVE_2017_5715, cortex_a15, cve_2017_5715
|
|
|
|
pop {r12, lr}
|
|
bx lr
|
|
endfunc cortex_a15_errata_report
|
|
#endif
|
|
|
|
func cortex_a15_reset_func
|
|
#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
|
|
ldcopr r0, ACTLR
|
|
orr r0, #CORTEX_A15_ACTLR_INV_BTB_BIT
|
|
stcopr r0, ACTLR
|
|
ldr r0, =workaround_icache_inv_runtime_exceptions
|
|
stcopr r0, VBAR
|
|
stcopr r0, MVBAR
|
|
/* isb will be applied in the course of the reset func */
|
|
#endif
|
|
b cortex_a15_enable_smp
|
|
endfunc cortex_a15_reset_func
|
|
|
|
func cortex_a15_core_pwr_dwn
|
|
push {r12, lr}
|
|
|
|
assert_cache_enabled
|
|
|
|
/* Flush L1 cache */
|
|
mov r0, #DC_OP_CISW
|
|
bl dcsw_op_level1
|
|
|
|
/* Exit cluster coherency */
|
|
pop {r12, lr}
|
|
b cortex_a15_disable_smp
|
|
endfunc cortex_a15_core_pwr_dwn
|
|
|
|
func cortex_a15_cluster_pwr_dwn
|
|
push {r12, lr}
|
|
|
|
assert_cache_enabled
|
|
|
|
/* Flush L1 caches */
|
|
mov r0, #DC_OP_CISW
|
|
bl dcsw_op_level1
|
|
|
|
bl plat_disable_acp
|
|
|
|
/* Exit cluster coherency */
|
|
pop {r12, lr}
|
|
b cortex_a15_disable_smp
|
|
endfunc cortex_a15_cluster_pwr_dwn
|
|
|
|
declare_cpu_ops cortex_a15, CORTEX_A15_MIDR, \
|
|
cortex_a15_reset_func, \
|
|
cortex_a15_core_pwr_dwn, \
|
|
cortex_a15_cluster_pwr_dwn
|