arm-trusted-firmware/lib/cpus/aarch32/cortex_a9.S
Dimitris Papastamos e4b34efa18 Workaround for CVE-2017-5715 for Cortex A9, A15 and A17
A per-cpu vbar is installed that implements the workaround by
invalidating the branch target buffer (BTB) directly in the case of A9
and A17 and indirectly by invalidating the icache in the case of A15.

For Cortex A57 and A72 there is currently no workaround implemented
when EL3 is in AArch32 mode so report it as missing.

For other vulnerable CPUs (e.g. Cortex A73 and Cortex A75), there are
no changes since there is currently no upstream AArch32 EL3 support
for these CPUs.

Change-Id: Ib42c6ef0b3c9ff2878a9e53839de497ff736258f
Signed-off-by: Dimitris Papastamos <dimitris.papastamos@arm.com>
2018-01-18 10:36:25 +00:00

112 lines
2.1 KiB
ArmAsm

/*
* Copyright (c) 2016-2018, ARM Limited and Contributors. All rights reserved.
*
* SPDX-License-Identifier: BSD-3-Clause
*/
#include <arch.h>
#include <asm_macros.S>
#include <assert_macros.S>
#include <cortex_a9.h>
#include <cpu_macros.S>
.macro assert_cache_enabled
#if ENABLE_ASSERTIONS
ldcopr r0, SCTLR
tst r0, #SCTLR_C_BIT
ASM_ASSERT(eq)
#endif
.endm
func cortex_a9_disable_smp
ldcopr r0, ACTLR
bic r0, #CORTEX_A9_ACTLR_SMP_BIT
stcopr r0, ACTLR
isb
dsb sy
bx lr
endfunc cortex_a9_disable_smp
func cortex_a9_enable_smp
ldcopr r0, ACTLR
orr r0, #CORTEX_A9_ACTLR_SMP_BIT
stcopr r0, ACTLR
isb
bx lr
endfunc cortex_a9_enable_smp
func check_errata_cve_2017_5715
#if WORKAROUND_CVE_2017_5715
mov r0, #ERRATA_APPLIES
#else
mov r0, #ERRATA_MISSING
#endif
bx lr
endfunc check_errata_cve_2017_5715
#if REPORT_ERRATA
/*
* Errata printing function for Cortex A9. Must follow AAPCS.
*/
func cortex_a9_errata_report
push {r12, lr}
bl cpu_get_rev_var
mov r4, r0
/*
* Report all errata. The revision-variant information is passed to
* checking functions of each errata.
*/
report_errata WORKAROUND_CVE_2017_5715, cortex_a9, cve_2017_5715
pop {r12, lr}
bx lr
endfunc cortex_a9_errata_report
#endif
func cortex_a9_reset_func
#if IMAGE_BL32 && WORKAROUND_CVE_2017_5715
ldr r0, =workaround_bpiall_runtime_exceptions
stcopr r0, VBAR
stcopr r0, MVBAR
/* isb will be applied in the course of the reset func */
#endif
b cortex_a9_enable_smp
endfunc cortex_a9_reset_func
func cortex_a9_core_pwr_dwn
push {r12, lr}
assert_cache_enabled
/* Flush L1 cache */
mov r0, #DC_OP_CISW
bl dcsw_op_level1
/* Exit cluster coherency */
pop {r12, lr}
b cortex_a9_disable_smp
endfunc cortex_a9_core_pwr_dwn
func cortex_a9_cluster_pwr_dwn
push {r12, lr}
assert_cache_enabled
/* Flush L1 caches */
mov r0, #DC_OP_CISW
bl dcsw_op_level1
bl plat_disable_acp
/* Exit cluster coherency */
pop {r12, lr}
b cortex_a9_disable_smp
endfunc cortex_a9_cluster_pwr_dwn
declare_cpu_ops cortex_a9, CORTEX_A9_MIDR, \
cortex_a9_reset_func, \
cortex_a9_core_pwr_dwn, \
cortex_a9_cluster_pwr_dwn