1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
|
/* SPDX-License-Identifier: GPL-2.0+ */
/*
* armboot - Startup Code for OMAP3530/ARM Cortex CPU-core
*
* Copyright (c) 2004 Texas Instruments <r-woodruff2@ti.com>
*
* Copyright (c) 2001 Marius Gröger <mag@sysgo.de>
* Copyright (c) 2002 Alex Züpke <azu@sysgo.de>
* Copyright (c) 2002 Gary Jennejohn <garyj@denx.de>
* Copyright (c) 2003 Richard Woodruff <r-woodruff2@ti.com>
* Copyright (c) 2003 Kshitij <kshitij@ti.com>
* Copyright (c) 2006-2008 Syed Mohammed Khasim <x0khasim@ti.com>
*/
#include <asm-offsets.h>
#include <config.h>
#include <asm/system.h>
#include <linux/linkage.h>
#include <asm/armv7.h>
#include <system-constants.h>
/*************************************************************************
*
* Startup Code (reset vector)
*
* Do important init only if we don't start from memory!
* Setup memory and board specific bits prior to relocation.
* Relocate armboot to ram. Setup stack.
*
*************************************************************************/
.globl reset
.globl save_boot_params_ret
.type save_boot_params_ret,%function
#ifdef CONFIG_ARMV7_LPAE
.global switch_to_hypervisor_ret
#endif
reset:
/* Allow the board to save important registers */
b save_boot_params
save_boot_params_ret:
#ifdef CONFIG_POSITION_INDEPENDENT
/*
* Fix .rela.dyn relocations. This allows U-Boot to loaded to and
* executed at a different address than it was linked at.
*/
pie_fixup:
adr r0, reset /* r0 <- Runtime value of reset label */
ldr r1, =reset /* r1 <- Linked value of reset label */
subs r4, r0, r1 /* r4 <- Runtime-vs-link offset */
beq pie_fixup_done
adr r0, pie_fixup
ldr r1, _rel_dyn_start_ofs
add r2, r0, r1 /* r2 <- Runtime &__rel_dyn_start */
ldr r1, _rel_dyn_end_ofs
add r3, r0, r1 /* r3 <- Runtime &__rel_dyn_end */
pie_fix_loop:
ldr r0, [r2] /* r0 <- Link location */
ldr r1, [r2, #4] /* r1 <- fixup */
cmp r1, #23 /* relative fixup? */
bne pie_skip_reloc
/* relative fix: increase location by offset */
add r0, r4
ldr r1, [r0]
add r1, r4
str r1, [r0]
str r0, [r2]
add r2, #8
pie_skip_reloc:
cmp r2, r3
blo pie_fix_loop
pie_fixup_done:
#endif
#ifdef CONFIG_ARMV7_LPAE
/*
* check for Hypervisor support
*/
mrc p15, 0, r0, c0, c1, 1 @ read ID_PFR1
and r0, r0, #CPUID_ARM_VIRT_MASK @ mask virtualization bits
cmp r0, #(1 << CPUID_ARM_VIRT_SHIFT)
beq switch_to_hypervisor
switch_to_hypervisor_ret:
#endif
/*
* disable interrupts (FIQ and IRQ), also set the cpu to SVC32 mode,
* except if in HYP mode already
*/
mrs r0, cpsr
and r1, r0, #0x1f @ mask mode bits
teq r1, #0x1a @ test for HYP mode
bicne r0, r0, #0x1f @ clear all mode bits
orrne r0, r0, #0x13 @ set SVC mode
orr r0, r0, #0xc0 @ disable FIQ and IRQ
msr cpsr,r0
#if !CONFIG_IS_ENABLED(SYS_NO_VECTOR_TABLE)
/*
* Setup vector:
*/
/* Set V=0 in CP15 SCTLR register - for VBAR to point to vector */
mrc p15, 0, r0, c1, c0, 0 @ Read CP15 SCTLR Register
bic r0, #CR_V @ V = 0
mcr p15, 0, r0, c1, c0, 0 @ Write CP15 SCTLR Register
#ifdef CONFIG_HAS_VBAR
/* Set vector address in CP15 VBAR register */
ldr r0, =_start
mcr p15, 0, r0, c12, c0, 0 @Set VBAR
#endif
#endif
/* the mask ROM code should have PLL and others stable */
#if !CONFIG_IS_ENABLED(SKIP_LOWLEVEL_INIT)
#ifdef CONFIG_CPU_V7A
bl cpu_init_cp15
#endif
#if !CONFIG_IS_ENABLED(SKIP_LOWLEVEL_INIT_ONLY)
bl cpu_init_crit
#endif
#endif
bl _main
/*------------------------------------------------------------------------------*/
ENTRY(c_runtime_cpu_setup)
/*
* If I-cache is enabled invalidate it
*/
#if !CONFIG_IS_ENABLED(SYS_ICACHE_OFF)
mcr p15, 0, r0, c7, c5, 0 @ invalidate icache
dsb
isb
#endif
bx lr
ENDPROC(c_runtime_cpu_setup)
/*************************************************************************
*
* void save_boot_params(u32 r0, u32 r1, u32 r2, u32 r3)
* __attribute__((weak));
*
* Stack pointer is not yet initialized at this moment
* Don't save anything to stack even if compiled with -O0
*
*************************************************************************/
WEAK(save_boot_params)
b save_boot_params_ret @ back to my caller
ENDPROC(save_boot_params)
#ifdef CONFIG_ARMV7_LPAE
WEAK(switch_to_hypervisor)
b switch_to_hypervisor_ret
ENDPROC(switch_to_hypervisor)
#endif
/*************************************************************************
*
* cpu_init_cp15
*
* Setup CP15 registers (cache, MMU, TLBs). The I-cache is turned on unless
* CONFIG_SYS_ICACHE_OFF is defined.
*
*************************************************************************/
ENTRY(cpu_init_cp15)
#if CONFIG_IS_ENABLED(ARMV7_SET_CORTEX_SMPEN)
/*
* The Arm Cortex-A7 TRM says this bit must be enabled before
* "any cache or TLB maintenance operations are performed".
*/
mrc p15, 0, r0, c1, c0, 1 @ read auxilary control register
orr r0, r0, #1 << 6 @ set SMP bit to enable coherency
mcr p15, 0, r0, c1, c0, 1 @ write auxilary control register
#endif
/*
* Invalidate L1 I/D
*/
mov r0, #0 @ set up for MCR
mcr p15, 0, r0, c8, c7, 0 @ invalidate TLBs
mcr p15, 0, r0, c7, c5, 0 @ invalidate icache
mcr p15, 0, r0, c7, c5, 6 @ invalidate BP array
dsb
isb
/*
* disable MMU stuff and caches
*/
mrc p15, 0, r0, c1, c0, 0
bic r0, r0, #0x00002000 @ clear bits 13 (--V-)
bic r0, r0, #0x00000007 @ clear bits 2:0 (-CAM)
orr r0, r0, #0x00000002 @ set bit 1 (--A-) Align
orr r0, r0, #0x00000800 @ set bit 11 (Z---) BTB
#if CONFIG_IS_ENABLED(SYS_ICACHE_OFF)
bic r0, r0, #0x00001000 @ clear bit 12 (I) I-cache
#else
orr r0, r0, #0x00001000 @ set bit 12 (I) I-cache
#endif
mcr p15, 0, r0, c1, c0, 0
#ifdef CONFIG_ARM_ERRATA_716044
mrc p15, 0, r0, c1, c0, 0 @ read system control register
orr r0, r0, #1 << 11 @ set bit #11
mcr p15, 0, r0, c1, c0, 0 @ write system control register
#endif
#if (defined(CONFIG_ARM_ERRATA_742230) || defined(CONFIG_ARM_ERRATA_794072))
mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register
orr r0, r0, #1 << 4 @ set bit #4
mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register
#endif
#ifdef CONFIG_ARM_ERRATA_743622
mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register
orr r0, r0, #1 << 6 @ set bit #6
mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register
#endif
#ifdef CONFIG_ARM_ERRATA_751472
mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register
orr r0, r0, #1 << 11 @ set bit #11
mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register
#endif
#ifdef CONFIG_ARM_ERRATA_761320
mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register
orr r0, r0, #1 << 21 @ set bit #21
mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register
#endif
#ifdef CONFIG_ARM_ERRATA_845369
mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register
orr r0, r0, #1 << 22 @ set bit #22
mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register
#endif
mov r5, lr @ Store my Caller
mrc p15, 0, r1, c0, c0, 0 @ r1 has Read Main ID Register (MIDR)
mov r3, r1, lsr #20 @ get variant field
and r3, r3, #0xf @ r3 has CPU variant
and r4, r1, #0xf @ r4 has CPU revision
mov r2, r3, lsl #4 @ shift variant field for combined value
orr r2, r4, r2 @ r2 has combined CPU variant + revision
/* Early stack for ERRATA that needs into call C code */
#if defined(CONFIG_SPL_BUILD) && defined(CONFIG_SPL_STACK)
ldr r0, =(CONFIG_SPL_STACK)
#else
ldr r0, =(SYS_INIT_SP_ADDR)
#endif
bic r0, r0, #7 /* 8-byte alignment for ABI compliance */
mov sp, r0
#ifdef CONFIG_ARM_ERRATA_798870
cmp r2, #0x30 @ Applies to lower than R3p0
bge skip_errata_798870 @ skip if not affected rev
cmp r2, #0x20 @ Applies to including and above R2p0
blt skip_errata_798870 @ skip if not affected rev
mrc p15, 1, r0, c15, c0, 0 @ read l2 aux ctrl reg
orr r0, r0, #1 << 7 @ Enable hazard-detect timeout
push {r1-r5} @ Save the cpu info registers
bl v7_arch_cp15_set_l2aux_ctrl
isb @ Recommended ISB after l2actlr update
pop {r1-r5} @ Restore the cpu info - fall through
skip_errata_798870:
#endif
#ifdef CONFIG_ARM_ERRATA_801819
cmp r2, #0x24 @ Applies to lt including R2p4
bgt skip_errata_801819 @ skip if not affected rev
cmp r2, #0x20 @ Applies to including and above R2p0
blt skip_errata_801819 @ skip if not affected rev
mrc p15, 0, r0, c0, c0, 6 @ pick up REVIDR reg
and r0, r0, #1 << 3 @ check REVIDR[3]
cmp r0, #1 << 3
beq skip_errata_801819 @ skip erratum if REVIDR[3] is set
mrc p15, 0, r0, c1, c0, 1 @ read auxilary control register
orr r0, r0, #3 << 27 @ Disables streaming. All write-allocate
@ lines allocate in the L1 or L2 cache.
orr r0, r0, #3 << 25 @ Disables streaming. All write-allocate
@ lines allocate in the L1 cache.
push {r1-r5} @ Save the cpu info registers
bl v7_arch_cp15_set_acr
pop {r1-r5} @ Restore the cpu info - fall through
skip_errata_801819:
#endif
#ifdef CONFIG_ARM_CORTEX_A15_CVE_2017_5715
mrc p15, 0, r0, c1, c0, 1 @ read auxilary control register
orr r0, r0, #1 << 0 @ Enable invalidates of BTB
push {r1-r5} @ Save the cpu info registers
bl v7_arch_cp15_set_acr
pop {r1-r5} @ Restore the cpu info - fall through
#endif
#ifdef CONFIG_ARM_ERRATA_454179
mrc p15, 0, r0, c1, c0, 1 @ Read ACR
cmp r2, #0x21 @ Only on < r2p1
orrlt r0, r0, #(0x3 << 6) @ Set DBSM(BIT7) and IBE(BIT6) bits
push {r1-r5} @ Save the cpu info registers
bl v7_arch_cp15_set_acr
pop {r1-r5} @ Restore the cpu info - fall through
#endif
#if defined(CONFIG_ARM_ERRATA_430973) || defined (CONFIG_ARM_CORTEX_A8_CVE_2017_5715)
mrc p15, 0, r0, c1, c0, 1 @ Read ACR
#ifdef CONFIG_ARM_CORTEX_A8_CVE_2017_5715
orr r0, r0, #(0x1 << 6) @ Set IBE bit always to enable OS WA
#else
cmp r2, #0x21 @ Only on < r2p1
orrlt r0, r0, #(0x1 << 6) @ Set IBE bit
#endif
push {r1-r5} @ Save the cpu info registers
bl v7_arch_cp15_set_acr
pop {r1-r5} @ Restore the cpu info - fall through
#endif
#ifdef CONFIG_ARM_ERRATA_621766
mrc p15, 0, r0, c1, c0, 1 @ Read ACR
cmp r2, #0x21 @ Only on < r2p1
orrlt r0, r0, #(0x1 << 5) @ Set L1NEON bit
push {r1-r5} @ Save the cpu info registers
bl v7_arch_cp15_set_acr
pop {r1-r5} @ Restore the cpu info - fall through
#endif
#ifdef CONFIG_ARM_ERRATA_725233
mrc p15, 1, r0, c9, c0, 2 @ Read L2ACR
cmp r2, #0x21 @ Only on < r2p1 (Cortex A8)
orrlt r0, r0, #(0x1 << 27) @ L2 PLD data forwarding disable
push {r1-r5} @ Save the cpu info registers
bl v7_arch_cp15_set_l2aux_ctrl
pop {r1-r5} @ Restore the cpu info - fall through
#endif
#ifdef CONFIG_ARM_ERRATA_852421
mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register
orr r0, r0, #1 << 24 @ set bit #24
mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register
#endif
#ifdef CONFIG_ARM_ERRATA_852423
mrc p15, 0, r0, c15, c0, 1 @ read diagnostic register
orr r0, r0, #1 << 12 @ set bit #12
mcr p15, 0, r0, c15, c0, 1 @ write diagnostic register
#endif
mov pc, r5 @ back to my caller
ENDPROC(cpu_init_cp15)
#if !CONFIG_IS_ENABLED(SKIP_LOWLEVEL_INIT) && \
!CONFIG_IS_ENABLED(SKIP_LOWLEVEL_INIT_ONLY)
/*************************************************************************
*
* CPU_init_critical registers
*
* setup important registers
* setup memory timing
*
*************************************************************************/
ENTRY(cpu_init_crit)
/*
* Jump to board specific initialization...
* The Mask ROM will have already initialized
* basic memory. Go here to bump up clock rate and handle
* wake up conditions.
*/
b lowlevel_init @ go setup pll,mux,memory
ENDPROC(cpu_init_crit)
#endif
#if CONFIG_POSITION_INDEPENDENT
_rel_dyn_start_ofs:
.word __rel_dyn_start - pie_fixup
_rel_dyn_end_ofs:
.word __rel_dyn_end - pie_fixup
#endif
|