Lines Matching +full:11 +full:n
41 asm volatile("std 11,%0" : "=Q" (state->fprs[11])); in __kernel_fpu_begin()
56 " la 1,%[vxrs]\n" /* load save area */ in __kernel_fpu_begin()
57 " tmll %[m],30\n" /* KERNEL_VXR */ in __kernel_fpu_begin()
58 " jz 7f\n" /* no work -> done */ in __kernel_fpu_begin()
59 " jo 5f\n" /* -> save V0..V31 */ in __kernel_fpu_begin()
64 " chi %[m],12\n" /* KERNEL_VXR_MID */ in __kernel_fpu_begin()
65 " jne 0f\n" /* -> save V8..V23 */ in __kernel_fpu_begin()
66 " VSTM 8,23,128,1\n" /* vstm %v8,%v23,128(%r1) */ in __kernel_fpu_begin()
67 " j 7f\n" in __kernel_fpu_begin()
69 "0: tmll %[m],6\n" /* KERNEL_VXR_LOW */ in __kernel_fpu_begin()
70 " jz 3f\n" /* -> KERNEL_VXR_HIGH */ in __kernel_fpu_begin()
71 " jo 2f\n" /* 11 -> save V0..V15 */ in __kernel_fpu_begin()
72 " brc 2,1f\n" /* 10 -> save V8..V15 */ in __kernel_fpu_begin()
73 " VSTM 0,7,0,1\n" /* vstm %v0,%v7,0(%r1) */ in __kernel_fpu_begin()
74 " j 3f\n" in __kernel_fpu_begin()
75 "1: VSTM 8,15,128,1\n" /* vstm %v8,%v15,128(%r1) */ in __kernel_fpu_begin()
76 " j 3f\n" in __kernel_fpu_begin()
77 "2: VSTM 0,15,0,1\n" /* vstm %v0,%v15,0(%r1) */ in __kernel_fpu_begin()
79 "3: tmll %[m],24\n" /* KERNEL_VXR_HIGH */ in __kernel_fpu_begin()
80 " jz 7f\n" in __kernel_fpu_begin()
81 " jo 6f\n" /* 11 -> save V16..V31 */ in __kernel_fpu_begin()
82 " brc 2,4f\n" /* 10 -> save V24..V31 */ in __kernel_fpu_begin()
83 " VSTM 16,23,256,1\n" /* vstm %v16,%v23,256(%r1) */ in __kernel_fpu_begin()
84 " j 7f\n" in __kernel_fpu_begin()
85 "4: VSTM 24,31,384,1\n" /* vstm %v24,%v31,384(%r1) */ in __kernel_fpu_begin()
86 " j 7f\n" in __kernel_fpu_begin()
87 "5: VSTM 0,15,0,1\n" /* vstm %v0,%v15,0(%r1) */ in __kernel_fpu_begin()
88 "6: VSTM 16,31,256,1\n" /* vstm %v16,%v31,256(%r1) */ in __kernel_fpu_begin()
123 asm volatile("ld 11,%0" : : "Q" (state->fprs[11])); in __kernel_fpu_end()
138 " la 1,%[vxrs]\n" /* load restore area */ in __kernel_fpu_end()
139 " tmll %[m],30\n" /* KERNEL_VXR */ in __kernel_fpu_end()
140 " jz 7f\n" /* no work -> done */ in __kernel_fpu_end()
141 " jo 5f\n" /* -> restore V0..V31 */ in __kernel_fpu_end()
146 " chi %[m],12\n" /* KERNEL_VXR_MID */ in __kernel_fpu_end()
147 " jne 0f\n" /* -> restore V8..V23 */ in __kernel_fpu_end()
148 " VLM 8,23,128,1\n" /* vlm %v8,%v23,128(%r1) */ in __kernel_fpu_end()
149 " j 7f\n" in __kernel_fpu_end()
151 "0: tmll %[m],6\n" /* KERNEL_VXR_LOW */ in __kernel_fpu_end()
152 " jz 3f\n" /* -> KERNEL_VXR_HIGH */ in __kernel_fpu_end()
153 " jo 2f\n" /* 11 -> restore V0..V15 */ in __kernel_fpu_end()
154 " brc 2,1f\n" /* 10 -> restore V8..V15 */ in __kernel_fpu_end()
155 " VLM 0,7,0,1\n" /* vlm %v0,%v7,0(%r1) */ in __kernel_fpu_end()
156 " j 3f\n" in __kernel_fpu_end()
157 "1: VLM 8,15,128,1\n" /* vlm %v8,%v15,128(%r1) */ in __kernel_fpu_end()
158 " j 3f\n" in __kernel_fpu_end()
159 "2: VLM 0,15,0,1\n" /* vlm %v0,%v15,0(%r1) */ in __kernel_fpu_end()
161 "3: tmll %[m],24\n" /* KERNEL_VXR_HIGH */ in __kernel_fpu_end()
162 " jz 7f\n" in __kernel_fpu_end()
163 " jo 6f\n" /* 11 -> restore V16..V31 */ in __kernel_fpu_end()
164 " brc 2,4f\n" /* 10 -> restore V24..V31 */ in __kernel_fpu_end()
165 " VLM 16,23,256,1\n" /* vlm %v16,%v23,256(%r1) */ in __kernel_fpu_end()
166 " j 7f\n" in __kernel_fpu_end()
167 "4: VLM 24,31,384,1\n" /* vlm %v24,%v31,384(%r1) */ in __kernel_fpu_end()
168 " j 7f\n" in __kernel_fpu_end()
169 "5: VLM 0,15,0,1\n" /* vlm %v0,%v15,0(%r1) */ in __kernel_fpu_end()
170 "6: VLM 16,31,256,1\n" /* vlm %v16,%v31,256(%r1) */ in __kernel_fpu_end()
185 asm volatile("lgr 1,%0\n" in __load_fpu_regs()
186 "VLM 0,15,0,1\n" in __load_fpu_regs()
187 "VLM 16,31,256,1\n" in __load_fpu_regs()
203 asm volatile("ld 11,%0" : : "Q" (regs[11])); in __load_fpu_regs()
236 asm volatile("lgr 1,%0\n" in save_fpu_regs()
237 "VSTM 0,15,0,1\n" in save_fpu_regs()
238 "VSTM 16,31,256,1\n" in save_fpu_regs()
254 asm volatile("std 11,%0" : "=Q" (regs[11])); in save_fpu_regs()