1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
|
/*
* arm exception handlers
*/
#include "arm.s"
#undef B /* B is for 'botch' */
/*
* exception vectors, copied by trapinit() to somewhere useful
*/
TEXT vectors(SB), 1, $-4
MOVW 0x18(R15), R15 /* reset */
MOVW 0x18(R15), R15 /* undefined instr. */
MOVW 0x18(R15), R15 /* SWI & SMC */
MOVW 0x18(R15), R15 /* prefetch abort */
MOVW 0x18(R15), R15 /* data abort */
MOVW 0x18(R15), R15 /* hypervisor call */
MOVW 0x18(R15), R15 /* IRQ */
MOVW 0x18(R15), R15 /* FIQ */
TEXT vtable(SB), 1, $-4
WORD $_vrst-KZERO(SB) /* reset, in svc mode already */
WORD $_vund(SB) /* undefined, switch to svc mode */
WORD $_vsvc(SB) /* swi, in svc mode already */
WORD $_vpabt(SB) /* prefetch abort, switch to svc mode */
WORD $_vdabt(SB) /* data abort, switch to svc mode */
WORD $_vhype(SB) /* hypervisor call */
WORD $_virq(SB) /* IRQ, switch to svc mode */
WORD $_vfiq(SB) /* FIQ, switch to svc mode */
/*
* reset - start additional cpus
*/
TEXT _vrst(SB), 1, $-4
/* running in the zero segment (pc is lower 256MB) */
CPSMODE(PsrMsvc) /* should be redundant */
CPSID
CPSAE
SETEND(0) /* force little-endian */
BARRIERS
SETZSB
MOVW $PsrMsvc, SPSR
MOVW $0, R14
/* invalidate i-cache and branch-target cache */
MTCP CpSC, 0, PC, C(CpCACHE), C(CpCACHEinvi), CpCACHEall
BARRIERS
BL cpureset(SB)
spin:
B spin
/*
* system call
*/
TEXT _vsvc(SB), 1, $-4 /* SWI */
CLREX
BARRIERS
/* stack is m->stack */
MOVW.W R14, -4(R13) /* ureg->pc = interrupted PC */
MOVW SPSR, R14 /* ureg->psr = SPSR */
MOVW.W R14, -4(R13) /* ... */
MOVW $PsrMsvc, R14 /* ureg->type = PsrMsvc */
MOVW.W R14, -4(R13) /* ... */
/* avoid the ambiguity described in notes/movm.w. */
MOVM.DB.S [R0-R14], (R13) /* save user level registers */
SUB $(NREGS*4), R13 /* r13 now points to ureg */
MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
/*
* set up m and up registers since user registers could contain anything
*/
CPUID(R1)
SLL $2, R1 /* convert to word index */
MOVW $machaddr(SB), R2
ADD R1, R2
MOVW (R2), R(MACH) /* m = machaddr[cpuid] */
CMP $0, R(MACH)
MOVW.EQ $MACHADDR, R0 /* paranoia: use MACHADDR if 0 */
MOVW 8(R(MACH)), R(USER) /* up = m->proc */
MOVW ((NREGS+1)*4)(R13), R2 /* saved SPSR (user mode) */
MOVW R13, R0 /* first arg is pointer to ureg */
SUB $8, R13 /* space for argument+link */
BL syscall(SB)
/*
* caller saves on plan 9, so registers other than 9, 10, 13 & 14
* may have been trashed when we get here.
*/
MOVW $setR12(SB), R12 /* reload kernel's SB */
ADD $(8+4*NREGS), R13 /* make r13 point to ureg->type */
MOVW 8(R13), R14 /* restore link */
MOVW 4(R13), R0 /* restore SPSR */
/*
* return from user-mode exception.
* expects new SPSR in R0. R13 must point to ureg->type.
*/
_rfue:
TEXT rfue(SB), 1, $-4
MOVW R0, SPSR /* ... */
/*
* order on stack is type, psr, pc, but RFEV7 needs pc, psr.
* step on type and previous word to hold temporary values.
* we could instead change the order in which psr & pc are pushed.
*/
MOVW 4(R13), R1 /* psr */
MOVW 8(R13), R2 /* pc */
MOVW R2, 4(R13) /* pc */
MOVW R1, 8(R13) /* psr */
MOVM.DB.S (R13), [R0-R14] /* restore user registers */
ADD $4, R13 /* pop type, sp -> pc */
RFEV7W(13)
TEXT _vund(SB), 1, $-4 /* undefined */
/* sp is m->sund */
MOVM.IA [R0-R4], (R13) /* free some working space */
MOVW $PsrMund, R0
B _vswitch
TEXT _vpabt(SB), 1, $-4 /* prefetch abort */
/* sp is m->sabt */
MOVM.IA [R0-R4], (R13) /* free some working space */
MOVW $PsrMabt, R0 /* r0 = type */
B _vswitch
TEXT _vdabt(SB), 1, $-4 /* data abort */
/* sp is m->sabt */
MOVM.IA [R0-R4], (R13) /* free some working space */
MOVW $(PsrMabt+1), R0 /* r0 = type */
B _vswitch
TEXT _virq(SB), 1, $-4 /* IRQ */
/* sp is m->sirq */
MOVM.IA [R0-R4], (R13) /* free some working space */
MOVW $PsrMirq, R0 /* r0 = type */
B _vswitch
/*
* come here with type in R0 and R13 pointing above saved [r0-r4].
* we'll switch to SVC mode and then call trap.
*/
_vswitch:
// TEXT _vswtch(SB), 1, $-4 /* make symbol visible to debuggers */
CLREX
BARRIERS
MOVW SPSR, R1 /* save SPSR for ureg */
/*
* R12 needs to be set before using PsrMbz, so BIGENDCHECK code has
* been moved below.
*/
MOVW R14, R2 /* save interrupted pc for ureg */
MOVW R13, R3 /* save pointer to where the original [R0-R4] are */
/*
* switch processor to svc mode. this switches the banked registers
* (r13 [sp] and r14 [link]) to those of svc mode (so we must be sure
* to never get here already in svc mode).
*/
CPSMODE(PsrMsvc) /* switch! */
CPSID
AND.S $0xf, R1, R4 /* interrupted code kernel or user? */
BEQ _userexcep
/*
* here for trap from SVC mode
*/
/* push ureg->{type, psr, pc} onto Msvc stack.
* r13 points to ureg->type after.
*/
MOVM.DB.W [R0-R2], (R13)
MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
/*
* avoid the ambiguity described in notes/movm.w.
* In order to get a predictable value in R13 after the stores,
* separate the store-multiple from the stack-pointer adjustment.
* We'll assume that the old value of R13 should be stored on the stack.
*/
/* save kernel level registers, at end r13 points to ureg */
MOVM.DB [R0-R14], (R13)
SUB $(NREGS*4), R13 /* SP now points to saved R0 */
MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
/* previous mode was svc, so the saved spsr should be sane. */
MOVW ((NREGS+1)*4)(R13), R1
MOVM.IA (R13), [R0-R8] /* restore a few user registers */
MOVW R13, R0 /* first arg is pointer to ureg */
SUB $(4*2), R13 /* space for argument+link (for debugger) */
MOVW $0xdeaddead, R11 /* marker */
BL trap(SB) /* trap(ureg) */
/*
* caller saves on plan 9, so registers other than 9, 10, 13 & 14
* may have been trashed when we get here.
*/
MOVW $setR12(SB), R12 /* reload kernel's SB */
ADD $(4*2+4*NREGS), R13 /* make r13 point to ureg->type */
/*
* if we interrupted a previous trap's handler and are now
* returning to it, we need to propagate the current R(MACH) (R10)
* by overriding the saved one on the stack, since we may have
* been rescheduled and be on a different processor now than
* at entry.
*/
MOVW R(MACH), (-(NREGS-MACH)*4)(R13) /* restore current cpu's MACH */
MOVW 8(R13), R14 /* restore link */
MOVW 4(R13), R0 /* restore SPSR */
/* return from kernel-mode exception */
MOVW R0, SPSR /* ... */
/*
* order on stack is type, psr, pc, but RFEV7 needs pc, psr.
* step on type and previous word to hold temporary values.
* we could instead change the order in which psr & pc are pushed.
*/
MOVW 4(R13), R1 /* psr */
MOVW 8(R13), R2 /* pc */
MOVW R2, 4(R13) /* pc */
MOVW R1, 8(R13) /* psr */
/* restore kernel regs other than SP; we're using it */
SUB $(NREGS*4), R13
MOVM.IA.W (R13), [R0-R12]
ADD $4, R13 /* skip saved kernel SP */
MOVM.IA.W (R13), [R14]
ADD $4, R13 /* pop type, sp -> pc */
BARRIERS
RFEV7W(13)
/*
* here for trap from USER mode
*/
_userexcep:
MOVM.DB.W [R0-R2], (R13) /* set ureg->{type, psr, pc}; r13 points to ureg->type */
MOVM.IA (R3), [R0-R4] /* restore [R0-R4] from previous mode's stack */
/* avoid the ambiguity described in notes/movm.w. */
MOVM.DB.S [R0-R14], (R13) /* save kernel level registers */
SUB $(NREGS*4), R13 /* r13 now points to ureg */
MOVW $setR12(SB), R12 /* Make sure we've got the kernel's SB loaded */
/*
* set up m and up registers since user registers could contain anything
*/
CPUID(R1)
SLL $2, R1 /* convert to word index */
MOVW $machaddr(SB), R2
ADD R1, R2
MOVW (R2), R(MACH) /* m = machaddr[cpuid] */
CMP $0, R(MACH)
MOVW.EQ $MACHADDR, R0 /* paranoia: use MACHADDR if 0 */
MOVW 8(R(MACH)), R(USER) /* up = m->proc */
MOVW ((NREGS+1)*4)(R13), R2 /* saved SPSR */
MOVW R13, R0 /* first arg is pointer to ureg */
SUB $(4*2), R13 /* space for argument+link (for debugger) */
BL trap(SB) /* trap(ureg) */
/*
* caller saves on plan 9, so registers other than 9, 10, 13 & 14
* may have been trashed when we get here.
*/
ADD $(4*2+4*NREGS), R13 /* make r13 point to ureg->type */
MOVW 8(R13), R14 /* restore link */
MOVW 4(R13), R0 /* restore SPSR */
MOVW 4(R13), R0 /* restore SPSR */
B _rfue
TEXT _vfiq(SB), 1, $-4 /* FIQ */
PUTC('?')
PUTC('f')
PUTC('i')
PUTC('q')
RFE /* FIQ is special, ignore it for now */
TEXT _vhype(SB), 1, $-4
PUTC('?')
PUTC('h')
PUTC('y')
PUTC('p')
RFE
/*
* set the stack value for the mode passed in R0
*/
TEXT setr13(SB), 1, $-4
MOVW 4(FP), R1
MOVW CPSR, R2
BIC $(PsrMask|PsrMbz), R2, R3
ORR $(PsrDirq|PsrDfiq), R3
ORR R0, R3
MOVW R3, CPSR /* switch to new mode */
MOVW R13, R0 /* return old sp */
MOVW R1, R13 /* install new one */
MOVW R2, CPSR /* switch back to old mode */
RET
|