aboutsummaryrefslogtreecommitdiffstats
path: root/arch/arm/vfp/vfphw.S
blob: d4b7b229631d5b3fe9db079a3aed284f656beadf (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
/*
 *  linux/arch/arm/vfp/vfphw.S
 *
 *  Copyright (C) 2004 ARM Limited.
 *  Written by Deep Blue Solutions Limited.
 *
 * This program is free software; you can redistribute it and/or modify
 * it under the terms of the GNU General Public License version 2 as
 * published by the Free Software Foundation.
 *
 * This code is called from the kernel's undefined instruction trap.
 * r9 holds the return address for successful handling.
 * lr holds the return address for unrecognised instructions.
 * r10 points at the start of the private FP workspace in the thread structure
 * sp points to a struct pt_regs (as defined in include/asm/proc/ptrace.h)
 */
#include <asm/thread_info.h>
#include <asm/vfpmacros.h>
#include "../kernel/entry-header.S"

	.macro	DBGSTR, str
#ifdef DEBUG
	stmfd	sp!, {r0-r3, ip, lr}
	add	r0, pc, #4
	bl	printk
	b	1f
	.asciz  "<7>VFP: \str\n"
	.balign 4
1:	ldmfd	sp!, {r0-r3, ip, lr}
#endif
	.endm

	.macro  DBGSTR1, str, arg
#ifdef DEBUG
	stmfd	sp!, {r0-r3, ip, lr}
	mov	r1, \arg
	add	r0, pc, #4
	bl	printk
	b	1f
	.asciz  "<7>VFP: \str\n"
	.balign 4
1:	ldmfd	sp!, {r0-r3, ip, lr}
#endif
	.endm

	.macro  DBGSTR3, str, arg1, arg2, arg3
#ifdef DEBUG
	stmfd	sp!, {r0-r3, ip, lr}
	mov	r3, \arg3
	mov	r2, \arg2
	mov	r1, \arg1
	add	r0, pc, #4
	bl	printk
	b	1f
	.asciz  "<7>VFP: \str\n"
	.balign 4
1:	ldmfd	sp!, {r0-r3, ip, lr}
#endif
	.endm


@ VFP hardware support entry point.
@
@  r0  = faulted instruction
@  r2  = faulted PC+4
@  r9  = successful return
@  r10 = vfp_state union
@  r11 = CPU number
@  lr  = failure return

	.globl	vfp_support_entry
vfp_support_entry:
	DBGSTR3	"instr %08x pc %08x state %p", r0, r2, r10

	VFPFMRX	r1, FPEXC		@ Is the VFP enabled?
	DBGSTR1	"fpexc %08x", r1
	tst	r1, #FPEXC_ENABLE
	bne	look_for_VFP_exceptions	@ VFP is already enabled

	DBGSTR1 "enable %x", r10
	ldr	r3, last_VFP_context_address
	orr	r1, r1, #FPEXC_ENABLE	@ user FPEXC has the enable bit set
	ldr	r4, [r3, r11, lsl #2]	@ last_VFP_context pointer
	bic	r5, r1, #FPEXC_EXCEPTION @ make sure exceptions are disabled
	cmp	r4, r10
	beq	check_for_exception	@ we are returning to the same
					@ process, so the registers are
					@ still there.  In this case, we do
					@ not want to drop a pending exception.

	VFPFMXR	FPEXC, r5		@ enable VFP, disable any pending
					@ exceptions, so we can get at the
					@ rest of it

#ifndef CONFIG_SMP
	@ Save out the current registers to the old thread state
	@ No need for SMP since this is not done lazily

	DBGSTR1	"save old state %p", r4
	cmp	r4, #0
	beq	no_old_VFP_process
	VFPFMRX	r5, FPSCR		@ current status
	VFPFMRX	r6, FPINST		@ FPINST (always there, rev0 onwards)
	tst	r1, #FPEXC_FPV2		@ is there an FPINST2 to read?
	VFPFMRX	r8, FPINST2, NE		@ FPINST2 if needed - avoids reading
					@ nonexistant reg on rev0
	VFPFSTMIA r4 			@ save the working registers
	stmia	r4, {r1, r5, r6, r8}	@ save FPEXC, FPSCR, FPINST, FPINST2
					@ and point r4 at the word at the
					@ start of the register dump
#endif

no_old_VFP_process:
	DBGSTR1	"load state %p", r10
	str	r10, [r3, r11, lsl #2]	@ update the last_VFP_context pointer
					@ Load the saved state back into the VFP
	VFPFLDMIA r10	 		@ reload the working registers while
					@ FPEXC is in a safe state
	ldmia	r10, {r1, r5, r6, r8}	@ load FPEXC, FPSCR, FPINST, FPINST2
	tst	r1, #FPEXC_FPV2		@ is there an FPINST2 to write?
	VFPFMXR	FPINST2, r8, NE		@ FPINST2 if needed - avoids writing
					@ nonexistant reg on rev0
	VFPFMXR	FPINST, r6
	VFPFMXR	FPSCR, r5		@ restore status

check_for_exception:
	tst	r1, #FPEXC_EXCEPTION
	bne	process_exception	@ might as well handle the pending
					@ exception before retrying branch
					@ out before setting an FPEXC that
					@ stops us reading stuff
	VFPFMXR	FPEXC, r1		@ restore FPEXC last
	sub	r2, r2, #4
	str	r2, [sp, #S_PC]		@ retry the instruction
	mov	pc, r9			@ we think we have handled things


look_for_VFP_exceptions:
	tst	r1, #FPEXC_EXCEPTION
	bne	process_exception
	VFPFMRX	r5, FPSCR
	tst	r5, #FPSCR_IXE		@ IXE doesn't set FPEXC_EXCEPTION !
	bne	process_exception

	@ Fall into hand on to next handler - appropriate coproc instr
	@ not recognised by VFP

	DBGSTR	"not VFP"
	mov	pc, lr

process_exception:
	DBGSTR	"bounce"
	sub	r2, r2, #4
	str	r2, [sp, #S_PC]		@ retry the instruction on exit from
					@ the imprecise exception handling in
					@ the support code
	mov	r2, sp			@ nothing stacked - regdump is at TOS
	mov	lr, r9			@ setup for a return to the user code.

	@ Now call the C code to package up the bounce to the support code
	@   r0 holds the trigger instruction
	@   r1 holds the FPEXC value
	@   r2 pointer to register dump
	b	VFP9_bounce		@ we have handled this - the support
					@ code will raise an exception if
					@ required. If not, the user code will
					@ retry the faulted instruction

#ifdef CONFIG_SMP
	.globl	vfp_save_state
	.type	vfp_save_state, %function
vfp_save_state:
	@ Save the current VFP state
	@ r0 - save location
	@ r1 - FPEXC
	DBGSTR1	"save VFP state %p", r0
	VFPFMRX	r2, FPSCR		@ current status
	VFPFMRX	r3, FPINST		@ FPINST (always there, rev0 onwards)
	tst	r1, #FPEXC_FPV2		@ is there an FPINST2 to read?
	VFPFMRX	r12, FPINST2, NE	@ FPINST2 if needed - avoids reading
					@ nonexistant reg on rev0
	VFPFSTMIA r0 			@ save the working registers
	stmia	r0, {r1, r2, r3, r12}	@ save FPEXC, FPSCR, FPINST, FPINST2
	mov	pc, lr
#endif

last_VFP_context_address:
	.word	last_VFP_context

	.globl	vfp_get_float
vfp_get_float:
	add	pc, pc, r0, lsl #3
	mov	r0, r0
	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
	mrc	p10, 0, r0, c\dr, c0, 0	@ fmrs	r0, s0
	mov	pc, lr
	mrc	p10, 0, r0, c\dr, c0, 4	@ fmrs	r0, s1
	mov	pc, lr
	.endr

	.globl	vfp_put_float
vfp_put_float:
	add	pc, pc, r1, lsl #3
	mov	r0, r0
	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
	mcr	p10, 0, r0, c\dr, c0, 0	@ fmsr	r0, s0
	mov	pc, lr
	mcr	p10, 0, r0, c\dr, c0, 4	@ fmsr	r0, s1
	mov	pc, lr
	.endr

	.globl	vfp_get_double
vfp_get_double:
	add	pc, pc, r0, lsl #3
	mov	r0, r0
	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
	fmrrd	r0, r1, d\dr
	mov	pc, lr
	.endr

	@ virtual register 16 for compare with zero
	mov	r0, #0
	mov	r1, #0
	mov	pc, lr

	.globl	vfp_put_double
vfp_put_double:
	add	pc, pc, r2, lsl #3
	mov	r0, r0
	.irp	dr,0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15
	fmdrr	d\dr, r0, r1
	mov	pc, lr
	.endr