summaryrefslogtreecommitdiffstats
path: root/lib/cpus/aarch32/cortex_a32.S
blob: c262276224607ba9a6dd442b45fc82960a601842 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
/*
 * Copyright (c) 2016-2017, ARM Limited and Contributors. All rights reserved.
 *
 * SPDX-License-Identifier: BSD-3-Clause
 */

#include <arch.h>
#include <asm_macros.S>
#include <assert_macros.S>
#include <cortex_a32.h>
#include <cpu_macros.S>


	/* ---------------------------------------------
	 * Disable intra-cluster coherency
	 * Clobbers: r0-r1
	 * ---------------------------------------------
	 */
func cortex_a32_disable_smp
	ldcopr16	r0, r1, CORTEX_A32_CPUECTLR_EL1
	bic	r0, r0, #CORTEX_A32_CPUECTLR_SMPEN_BIT
	stcopr16	r0, r1, CORTEX_A32_CPUECTLR_EL1
	isb
	dsb	sy
	bx	lr
endfunc cortex_a32_disable_smp

	/* -------------------------------------------------
	 * The CPU Ops reset function for Cortex-A32.
	 * Clobbers: r0-r1
	 * -------------------------------------------------
	 */
func cortex_a32_reset_func
	/* ---------------------------------------------
	 * Enable the SMP bit.
	 * ---------------------------------------------
	 */
	ldcopr16	r0, r1, CORTEX_A32_CPUECTLR_EL1
	orr	r0, r0, #CORTEX_A32_CPUECTLR_SMPEN_BIT
	stcopr16	r0, r1, CORTEX_A32_CPUECTLR_EL1
	isb
	bx	lr
endfunc cortex_a32_reset_func

	/* ----------------------------------------------------
	 * The CPU Ops core power down function for Cortex-A32.
	 * Clobbers: r0-r3
	 * ----------------------------------------------------
	 */
func cortex_a32_core_pwr_dwn
	/* r12 is pushed to meet the 8 byte stack alignment requirement */
	push	{r12, lr}

	/* Assert if cache is enabled */
#if ENABLE_ASSERTIONS
	ldcopr	r0, SCTLR
	tst	r0, #SCTLR_C_BIT
	ASM_ASSERT(eq)
#endif

	/* ---------------------------------------------
	 * Flush L1 caches.
	 * ---------------------------------------------
	 */
	mov	r0, #DC_OP_CISW
	bl	dcsw_op_level1

	/* ---------------------------------------------
	 * Come out of intra cluster coherency
	 * ---------------------------------------------
	 */
	pop	{r12, lr}
	b	cortex_a32_disable_smp
endfunc cortex_a32_core_pwr_dwn

	/* -------------------------------------------------------
	 * The CPU Ops cluster power down function for Cortex-A32.
	 * Clobbers: r0-r3
	 * -------------------------------------------------------
	 */
func cortex_a32_cluster_pwr_dwn
	/* r12 is pushed to meet the 8 byte stack alignment requirement */
	push	{r12, lr}

	/* Assert if cache is enabled */
#if ENABLE_ASSERTIONS
	ldcopr	r0, SCTLR
	tst	r0, #SCTLR_C_BIT
	ASM_ASSERT(eq)
#endif

	/* ---------------------------------------------
	 * Flush L1 cache.
	 * ---------------------------------------------
	 */
	mov	r0, #DC_OP_CISW
	bl	dcsw_op_level1

	/* ---------------------------------------------
	 * Disable the optional ACP.
	 * ---------------------------------------------
	 */
	bl	plat_disable_acp

	/* ---------------------------------------------
	 * Flush L2 cache.
	 * ---------------------------------------------
	 */
	mov	r0, #DC_OP_CISW
	bl	dcsw_op_level2

	/* ---------------------------------------------
	 * Come out of intra cluster coherency
	 * ---------------------------------------------
	 */
	pop	{r12, lr}
	b	cortex_a32_disable_smp
endfunc cortex_a32_cluster_pwr_dwn

#if REPORT_ERRATA
/*
 * Errata printing function for Cortex-A32. Must follow AAPCS.
 */
func cortex_a32_errata_report
	bx	lr
endfunc cortex_a32_errata_report
#endif

declare_cpu_ops cortex_a32, CORTEX_A32_MIDR, \
	cortex_a32_reset_func, \
	cortex_a32_core_pwr_dwn, \
	cortex_a32_cluster_pwr_dwn