Commit | Line | Data |
---|---|---|
496203b1 IY |
1 | |
2 | #ifdef CONFIG_VIRT_CPU_ACCOUNTING | |
3 | /* read ar.itc in advance, and use it before leaving bank 0 */ | |
4 | #define XEN_ACCOUNT_GET_STAMP \ | |
5 | MOV_FROM_ITC(pUStk, p6, r20, r2); | |
6 | #else | |
7 | #define XEN_ACCOUNT_GET_STAMP | |
8 | #endif | |
9 | ||
21820cce IY |
10 | /* |
11 | * DO_SAVE_MIN switches to the kernel stacks (if necessary) and saves | |
12 | * the minimum state necessary that allows us to turn psr.ic back | |
13 | * on. | |
14 | * | |
15 | * Assumed state upon entry: | |
16 | * psr.ic: off | |
17 | * r31: contains saved predicates (pr) | |
18 | * | |
19 | * Upon exit, the state is as follows: | |
20 | * psr.ic: off | |
21 | * r2 = points to &pt_regs.r16 | |
22 | * r8 = contents of ar.ccv | |
23 | * r9 = contents of ar.csd | |
24 | * r10 = contents of ar.ssd | |
25 | * r11 = FPSR_DEFAULT | |
26 | * r12 = kernel sp (kernel virtual address) | |
27 | * r13 = points to current task_struct (kernel virtual address) | |
28 | * p15 = TRUE if psr.i is set in cr.ipsr | |
29 | * predicate registers (other than p2, p3, and p15), b6, r3, r14, r15: | |
30 | * preserved | |
31 | * CONFIG_XEN note: p6/p7 are not preserved | |
32 | * | |
33 | * Note that psr.ic is NOT turned on by this macro. This is so that | |
34 | * we can pass interruption state as arguments to a handler. | |
35 | */ | |
36 | #define XEN_DO_SAVE_MIN(__COVER,SAVE_IFS,EXTRA,WORKAROUND) \ | |
37 | mov r16=IA64_KR(CURRENT); /* M */ \ | |
38 | mov r27=ar.rsc; /* M */ \ | |
39 | mov r20=r1; /* A */ \ | |
40 | mov r25=ar.unat; /* M */ \ | |
41 | MOV_FROM_IPSR(p0,r29); /* M */ \ | |
42 | MOV_FROM_IIP(r28); /* M */ \ | |
43 | mov r21=ar.fpsr; /* M */ \ | |
44 | mov r26=ar.pfs; /* I */ \ | |
45 | __COVER; /* B;; (or nothing) */ \ | |
46 | adds r16=IA64_TASK_THREAD_ON_USTACK_OFFSET,r16; \ | |
47 | ;; \ | |
48 | ld1 r17=[r16]; /* load current->thread.on_ustack flag */ \ | |
49 | st1 [r16]=r0; /* clear current->thread.on_ustack flag */ \ | |
50 | adds r1=-IA64_TASK_THREAD_ON_USTACK_OFFSET,r16 \ | |
51 | /* switch from user to kernel RBS: */ \ | |
52 | ;; \ | |
53 | invala; /* M */ \ | |
54 | /* SAVE_IFS;*/ /* see xen special handling below */ \ | |
55 | cmp.eq pKStk,pUStk=r0,r17; /* are we in kernel mode already? */ \ | |
56 | ;; \ | |
57 | (pUStk) mov ar.rsc=0; /* set enforced lazy mode, pl 0, little-endian, loadrs=0 */ \ | |
58 | ;; \ | |
59 | (pUStk) mov.m r24=ar.rnat; \ | |
60 | (pUStk) addl r22=IA64_RBS_OFFSET,r1; /* compute base of RBS */ \ | |
61 | (pKStk) mov r1=sp; /* get sp */ \ | |
62 | ;; \ | |
63 | (pUStk) lfetch.fault.excl.nt1 [r22]; \ | |
64 | (pUStk) addl r1=IA64_STK_OFFSET-IA64_PT_REGS_SIZE,r1; /* compute base of memory stack */ \ | |
65 | (pUStk) mov r23=ar.bspstore; /* save ar.bspstore */ \ | |
66 | ;; \ | |
67 | (pUStk) mov ar.bspstore=r22; /* switch to kernel RBS */ \ | |
68 | (pKStk) addl r1=-IA64_PT_REGS_SIZE,r1; /* if in kernel mode, use sp (r12) */ \ | |
69 | ;; \ | |
70 | (pUStk) mov r18=ar.bsp; \ | |
71 | (pUStk) mov ar.rsc=0x3; /* set eager mode, pl 0, little-endian, loadrs=0 */ \ | |
72 | adds r17=2*L1_CACHE_BYTES,r1; /* really: biggest cache-line size */ \ | |
73 | adds r16=PT(CR_IPSR),r1; \ | |
74 | ;; \ | |
75 | lfetch.fault.excl.nt1 [r17],L1_CACHE_BYTES; \ | |
76 | st8 [r16]=r29; /* save cr.ipsr */ \ | |
77 | ;; \ | |
78 | lfetch.fault.excl.nt1 [r17]; \ | |
79 | tbit.nz p15,p0=r29,IA64_PSR_I_BIT; \ | |
80 | mov r29=b0 \ | |
81 | ;; \ | |
82 | WORKAROUND; \ | |
83 | adds r16=PT(R8),r1; /* initialize first base pointer */ \ | |
84 | adds r17=PT(R9),r1; /* initialize second base pointer */ \ | |
85 | (pKStk) mov r18=r0; /* make sure r18 isn't NaT */ \ | |
86 | ;; \ | |
87 | .mem.offset 0,0; st8.spill [r16]=r8,16; \ | |
88 | .mem.offset 8,0; st8.spill [r17]=r9,16; \ | |
89 | ;; \ | |
90 | .mem.offset 0,0; st8.spill [r16]=r10,24; \ | |
91 | movl r8=XSI_PRECOVER_IFS; \ | |
92 | .mem.offset 8,0; st8.spill [r17]=r11,24; \ | |
93 | ;; \ | |
94 | /* xen special handling for possibly lazy cover */ \ | |
95 | /* SAVE_MIN case in dispatch_ia32_handler: mov r30=r0 */ \ | |
96 | ld8 r30=[r8]; \ | |
97 | (pUStk) sub r18=r18,r22; /* r18=RSE.ndirty*8 */ \ | |
98 | st8 [r16]=r28,16; /* save cr.iip */ \ | |
99 | ;; \ | |
100 | st8 [r17]=r30,16; /* save cr.ifs */ \ | |
101 | mov r8=ar.ccv; \ | |
102 | mov r9=ar.csd; \ | |
103 | mov r10=ar.ssd; \ | |
104 | movl r11=FPSR_DEFAULT; /* L-unit */ \ | |
105 | ;; \ | |
106 | st8 [r16]=r25,16; /* save ar.unat */ \ | |
107 | st8 [r17]=r26,16; /* save ar.pfs */ \ | |
108 | shl r18=r18,16; /* compute ar.rsc to be used for "loadrs" */ \ | |
109 | ;; \ | |
110 | st8 [r16]=r27,16; /* save ar.rsc */ \ | |
111 | (pUStk) st8 [r17]=r24,16; /* save ar.rnat */ \ | |
112 | (pKStk) adds r17=16,r17; /* skip over ar_rnat field */ \ | |
113 | ;; /* avoid RAW on r16 & r17 */ \ | |
114 | (pUStk) st8 [r16]=r23,16; /* save ar.bspstore */ \ | |
115 | st8 [r17]=r31,16; /* save predicates */ \ | |
116 | (pKStk) adds r16=16,r16; /* skip over ar_bspstore field */ \ | |
117 | ;; \ | |
118 | st8 [r16]=r29,16; /* save b0 */ \ | |
119 | st8 [r17]=r18,16; /* save ar.rsc value for "loadrs" */ \ | |
120 | cmp.eq pNonSys,pSys=r0,r0 /* initialize pSys=0, pNonSys=1 */ \ | |
121 | ;; \ | |
122 | .mem.offset 0,0; st8.spill [r16]=r20,16; /* save original r1 */ \ | |
123 | .mem.offset 8,0; st8.spill [r17]=r12,16; \ | |
124 | adds r12=-16,r1; /* switch to kernel memory stack (with 16 bytes of scratch) */ \ | |
125 | ;; \ | |
126 | .mem.offset 0,0; st8.spill [r16]=r13,16; \ | |
127 | .mem.offset 8,0; st8.spill [r17]=r21,16; /* save ar.fpsr */ \ | |
128 | mov r13=IA64_KR(CURRENT); /* establish `current' */ \ | |
129 | ;; \ | |
130 | .mem.offset 0,0; st8.spill [r16]=r15,16; \ | |
131 | .mem.offset 8,0; st8.spill [r17]=r14,16; \ | |
132 | ;; \ | |
133 | .mem.offset 0,0; st8.spill [r16]=r2,16; \ | |
134 | .mem.offset 8,0; st8.spill [r17]=r3,16; \ | |
496203b1 | 135 | XEN_ACCOUNT_GET_STAMP \ |
21820cce IY |
136 | adds r2=IA64_PT_REGS_R16_OFFSET,r1; \ |
137 | ;; \ | |
138 | EXTRA; \ | |
139 | movl r1=__gp; /* establish kernel global pointer */ \ | |
140 | ;; \ | |
141 | ACCOUNT_SYS_ENTER \ | |
142 | BSW_1(r3,r14); /* switch back to bank 1 (must be last in insn group) */ \ | |
143 | ;; |