Commit | Line | Data |
---|---|---|
14cf11af PM |
1 | /* |
2 | * FPU support code, moved here from head.S so that it can be used | |
3 | * by chips which use other head-whatever.S files. | |
4 | * | |
fea23bfe PM |
5 | * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org) |
6 | * Copyright (C) 1996 Cort Dougan <cort@cs.nmt.edu> | |
7 | * Copyright (C) 1996 Paul Mackerras. | |
8 | * Copyright (C) 1997 Dan Malek (dmalek@jlc.net). | |
9 | * | |
14cf11af PM |
10 | * This program is free software; you can redistribute it and/or |
11 | * modify it under the terms of the GNU General Public License | |
12 | * as published by the Free Software Foundation; either version | |
13 | * 2 of the License, or (at your option) any later version. | |
14 | * | |
15 | */ | |
16 | ||
b3b8dc6c | 17 | #include <asm/reg.h> |
14cf11af PM |
18 | #include <asm/page.h> |
19 | #include <asm/mmu.h> | |
20 | #include <asm/pgtable.h> | |
21 | #include <asm/cputable.h> | |
22 | #include <asm/cache.h> | |
23 | #include <asm/thread_info.h> | |
24 | #include <asm/ppc_asm.h> | |
25 | #include <asm/asm-offsets.h> | |
26 | ||
27 | /* | |
28 | * This task wants to use the FPU now. | |
29 | * On UP, disable FP for the task which had the FPU previously, | |
30 | * and save its floating-point registers in its thread_struct. | |
31 | * Load up this task's FP registers from its thread_struct, | |
32 | * enable the FPU for the current task and return to the task. | |
33 | */ | |
b85a046a | 34 | _GLOBAL(load_up_fpu) |
14cf11af PM |
35 | mfmsr r5 |
36 | ori r5,r5,MSR_FP | |
14cf11af PM |
37 | SYNC |
38 | MTMSRD(r5) /* enable use of fpu now */ | |
39 | isync | |
40 | /* | |
41 | * For SMP, we don't do lazy FPU switching because it just gets too | |
42 | * horrendously complex, especially when a task switches from one CPU | |
43 | * to another. Instead we call giveup_fpu in switch_to. | |
44 | */ | |
45 | #ifndef CONFIG_SMP | |
e58c3495 | 46 | LOAD_REG_ADDRBASE(r3, last_task_used_math) |
6316222e | 47 | toreal(r3) |
e58c3495 | 48 | PPC_LL r4,ADDROFF(last_task_used_math)(r3) |
3ddfbcf1 | 49 | PPC_LCMPI 0,r4,0 |
14cf11af | 50 | beq 1f |
6316222e | 51 | toreal(r4) |
14cf11af PM |
52 | addi r4,r4,THREAD /* want last_task_used_math->thread */ |
53 | SAVE_32FPRS(0, r4) | |
54 | mffs fr0 | |
25c8a78b | 55 | stfd fr0,THREAD_FPSCR(r4) |
3ddfbcf1 | 56 | PPC_LL r5,PT_REGS(r4) |
6316222e | 57 | toreal(r5) |
3ddfbcf1 | 58 | PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5) |
14cf11af PM |
59 | li r10,MSR_FP|MSR_FE0|MSR_FE1 |
60 | andc r4,r4,r10 /* disable FP for previous task */ | |
3ddfbcf1 | 61 | PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5) |
14cf11af PM |
62 | 1: |
63 | #endif /* CONFIG_SMP */ | |
64 | /* enable use of FP after return */ | |
b85a046a | 65 | #ifdef CONFIG_PPC32 |
14cf11af PM |
66 | mfspr r5,SPRN_SPRG3 /* current task's THREAD (phys) */ |
67 | lwz r4,THREAD_FPEXC_MODE(r5) | |
68 | ori r9,r9,MSR_FP /* enable FP for current */ | |
69 | or r9,r9,r4 | |
b85a046a PM |
70 | #else |
71 | ld r4,PACACURRENT(r13) | |
72 | addi r5,r4,THREAD /* Get THREAD */ | |
e2f5a3c1 | 73 | lwz r4,THREAD_FPEXC_MODE(r5) |
b85a046a PM |
74 | ori r12,r12,MSR_FP |
75 | or r12,r12,r4 | |
76 | std r12,_MSR(r1) | |
77 | #endif | |
25c8a78b | 78 | lfd fr0,THREAD_FPSCR(r5) |
3a2c48cf | 79 | MTFSF_L(fr0) |
14cf11af PM |
80 | REST_32FPRS(0, r5) |
81 | #ifndef CONFIG_SMP | |
82 | subi r4,r5,THREAD | |
6316222e | 83 | fromreal(r4) |
e58c3495 | 84 | PPC_STL r4,ADDROFF(last_task_used_math)(r3) |
14cf11af PM |
85 | #endif /* CONFIG_SMP */ |
86 | /* restore registers and return */ | |
87 | /* we haven't used ctr or xer or lr */ | |
6f3d8e69 | 88 | blr |
14cf11af | 89 | |
14cf11af PM |
90 | /* |
91 | * giveup_fpu(tsk) | |
92 | * Disable FP for the task given as the argument, | |
93 | * and save the floating-point registers in its thread_struct. | |
94 | * Enables the FPU for use in the kernel on return. | |
95 | */ | |
b85a046a | 96 | _GLOBAL(giveup_fpu) |
14cf11af PM |
97 | mfmsr r5 |
98 | ori r5,r5,MSR_FP | |
99 | SYNC_601 | |
100 | ISYNC_601 | |
101 | MTMSRD(r5) /* enable use of fpu now */ | |
102 | SYNC_601 | |
103 | isync | |
3ddfbcf1 | 104 | PPC_LCMPI 0,r3,0 |
14cf11af PM |
105 | beqlr- /* if no previous owner, done */ |
106 | addi r3,r3,THREAD /* want THREAD of task */ | |
3ddfbcf1 DG |
107 | PPC_LL r5,PT_REGS(r3) |
108 | PPC_LCMPI 0,r5,0 | |
14cf11af PM |
109 | SAVE_32FPRS(0, r3) |
110 | mffs fr0 | |
25c8a78b | 111 | stfd fr0,THREAD_FPSCR(r3) |
14cf11af | 112 | beq 1f |
3ddfbcf1 | 113 | PPC_LL r4,_MSR-STACK_FRAME_OVERHEAD(r5) |
14cf11af PM |
114 | li r3,MSR_FP|MSR_FE0|MSR_FE1 |
115 | andc r4,r4,r3 /* disable FP for previous task */ | |
3ddfbcf1 | 116 | PPC_STL r4,_MSR-STACK_FRAME_OVERHEAD(r5) |
14cf11af PM |
117 | 1: |
118 | #ifndef CONFIG_SMP | |
119 | li r5,0 | |
e58c3495 DG |
120 | LOAD_REG_ADDRBASE(r4,last_task_used_math) |
121 | PPC_STL r5,ADDROFF(last_task_used_math)(r4) | |
14cf11af PM |
122 | #endif /* CONFIG_SMP */ |
123 | blr | |
25c8a78b DG |
124 | |
125 | /* | |
126 | * These are used in the alignment trap handler when emulating | |
127 | * single-precision loads and stores. | |
128 | * We restore and save the fpscr so the task gets the same result | |
129 | * and exceptions as if the cpu had performed the load or store. | |
130 | */ | |
131 | ||
132 | _GLOBAL(cvt_fd) | |
133 | lfd 0,THREAD_FPSCR(r5) /* load up fpscr value */ | |
3a2c48cf | 134 | MTFSF_L(0) |
25c8a78b DG |
135 | lfs 0,0(r3) |
136 | stfd 0,0(r4) | |
137 | mffs 0 | |
138 | stfd 0,THREAD_FPSCR(r5) /* save new fpscr value */ | |
139 | blr | |
140 | ||
141 | _GLOBAL(cvt_df) | |
142 | lfd 0,THREAD_FPSCR(r5) /* load up fpscr value */ | |
3a2c48cf | 143 | MTFSF_L(0) |
25c8a78b DG |
144 | lfd 0,0(r3) |
145 | stfs 0,0(r4) | |
146 | mffs 0 | |
147 | stfd 0,THREAD_FPSCR(r5) /* save new fpscr value */ | |
148 | blr |