1/* clear_page.S: UltraSparc optimized copy page. 2 * 3 * Copyright (C) 1996, 1998, 1999, 2000, 2004 David S. Miller (davem@redhat.com) 4 * Copyright (C) 1997 Jakub Jelinek (jakub@redhat.com) 5 */ 6 7#include <asm/visasm.h> 8#include <asm/thread_info.h> 9#include <asm/page.h> 10#include <asm/pgtable.h> 11#include <asm/spitfire.h> 12#include <asm/head.h> 13#include <asm/export.h> 14 15 /* What we used to do was lock a TLB entry into a specific 16 * TLB slot, clear the page with interrupts disabled, then 17 * restore the original TLB entry. This was great for 18 * disturbing the TLB as little as possible, but it meant 19 * we had to keep interrupts disabled for a long time. 20 * 21 * Now, we simply use the normal TLB loading mechanism, 22 * and this makes the cpu choose a slot all by itself. 23 * Then we do a normal TLB flush on exit. We need only 24 * disable preemption during the clear. 25 */ 26 27#define DCACHE_SIZE (PAGE_SIZE * 2) 28 29#if (PAGE_SHIFT == 13) 30#define PAGE_SIZE_REM 0x80 31#elif (PAGE_SHIFT == 16) 32#define PAGE_SIZE_REM 0x100 33#else 34#error Wrong PAGE_SHIFT specified 35#endif 36 37#define TOUCH(reg0, reg1, reg2, reg3, reg4, reg5, reg6, reg7) \ 38 fsrc2 %reg0, %f48; fsrc2 %reg1, %f50; \ 39 fsrc2 %reg2, %f52; fsrc2 %reg3, %f54; \ 40 fsrc2 %reg4, %f56; fsrc2 %reg5, %f58; \ 41 fsrc2 %reg6, %f60; fsrc2 %reg7, %f62; 42 43 .text 44 45 .align 32 46 .globl copy_user_page 47 .type copy_user_page,#function 48 EXPORT_SYMBOL(copy_user_page) 49copy_user_page: /* %o0=dest, %o1=src, %o2=vaddr */ 50 lduw [%g6 + TI_PRE_COUNT], %o4 51 sethi %hi(PAGE_OFFSET), %g2 52 sethi %hi(PAGE_SIZE), %o3 53 54 ldx [%g2 + %lo(PAGE_OFFSET)], %g2 55 sethi %hi(PAGE_KERNEL_LOCKED), %g3 56 57 ldx [%g3 + %lo(PAGE_KERNEL_LOCKED)], %g3 58 sub %o0, %g2, %g1 ! dest paddr 59 60 sub %o1, %g2, %g2 ! src paddr 61 62 and %o2, %o3, %o0 ! vaddr D-cache alias bit 63 or %g1, %g3, %g1 ! dest TTE data 64 65 or %g2, %g3, %g2 ! src TTE data 66 sethi %hi(TLBTEMP_BASE), %o3 67 68 sethi %hi(DCACHE_SIZE), %o1 69 add %o0, %o3, %o0 ! dest TTE vaddr 70 71 add %o4, 1, %o2 72 add %o0, %o1, %o1 ! src TTE vaddr 73 74 /* Disable preemption. */ 75 mov TLB_TAG_ACCESS, %g3 76 stw %o2, [%g6 + TI_PRE_COUNT] 77 78 /* Load TLB entries. */ 79 rdpr %pstate, %o2 80 wrpr %o2, PSTATE_IE, %pstate 81 stxa %o0, [%g3] ASI_DMMU 82 stxa %g1, [%g0] ASI_DTLB_DATA_IN 83 membar #Sync 84 stxa %o1, [%g3] ASI_DMMU 85 stxa %g2, [%g0] ASI_DTLB_DATA_IN 86 membar #Sync 87 wrpr %o2, 0x0, %pstate 88 89cheetah_copy_page_insn: 90 ba,pt %xcc, 9f 91 nop 92 931: 94 VISEntryHalf 95 membar #StoreLoad | #StoreStore | #LoadStore 96 sethi %hi((PAGE_SIZE/64)-2), %o2 97 mov %o0, %g1 98 prefetch [%o1 + 0x000], #one_read 99 or %o2, %lo((PAGE_SIZE/64)-2), %o2 100 prefetch [%o1 + 0x040], #one_read 101 prefetch [%o1 + 0x080], #one_read 102 prefetch [%o1 + 0x0c0], #one_read 103 ldd [%o1 + 0x000], %f0 104 prefetch [%o1 + 0x100], #one_read 105 ldd [%o1 + 0x008], %f2 106 prefetch [%o1 + 0x140], #one_read 107 ldd [%o1 + 0x010], %f4 108 prefetch [%o1 + 0x180], #one_read 109 fsrc2 %f0, %f16 110 ldd [%o1 + 0x018], %f6 111 fsrc2 %f2, %f18 112 ldd [%o1 + 0x020], %f8 113 fsrc2 %f4, %f20 114 ldd [%o1 + 0x028], %f10 115 fsrc2 %f6, %f22 116 ldd [%o1 + 0x030], %f12 117 fsrc2 %f8, %f24 118 ldd [%o1 + 0x038], %f14 119 fsrc2 %f10, %f26 120 ldd [%o1 + 0x040], %f0 1211: ldd [%o1 + 0x048], %f2 122 fsrc2 %f12, %f28 123 ldd [%o1 + 0x050], %f4 124 fsrc2 %f14, %f30 125 stda %f16, [%o0] ASI_BLK_P 126 ldd [%o1 + 0x058], %f6 127 fsrc2 %f0, %f16 128 ldd [%o1 + 0x060], %f8 129 fsrc2 %f2, %f18 130 ldd [%o1 + 0x068], %f10 131 fsrc2 %f4, %f20 132 ldd [%o1 + 0x070], %f12 133 fsrc2 %f6, %f22 134 ldd [%o1 + 0x078], %f14 135 fsrc2 %f8, %f24 136 ldd [%o1 + 0x080], %f0 137 prefetch [%o1 + 0x180], #one_read 138 fsrc2 %f10, %f26 139 subcc %o2, 1, %o2 140 add %o0, 0x40, %o0 141 bne,pt %xcc, 1b 142 add %o1, 0x40, %o1 143 144 ldd [%o1 + 0x048], %f2 145 fsrc2 %f12, %f28 146 ldd [%o1 + 0x050], %f4 147 fsrc2 %f14, %f30 148 stda %f16, [%o0] ASI_BLK_P 149 ldd [%o1 + 0x058], %f6 150 fsrc2 %f0, %f16 151 ldd [%o1 + 0x060], %f8 152 fsrc2 %f2, %f18 153 ldd [%o1 + 0x068], %f10 154 fsrc2 %f4, %f20 155 ldd [%o1 + 0x070], %f12 156 fsrc2 %f6, %f22 157 add %o0, 0x40, %o0 158 ldd [%o1 + 0x078], %f14 159 fsrc2 %f8, %f24 160 fsrc2 %f10, %f26 161 fsrc2 %f12, %f28 162 fsrc2 %f14, %f30 163 stda %f16, [%o0] ASI_BLK_P 164 membar #Sync 165 VISExitHalf 166 ba,pt %xcc, 5f 167 nop 168 1699: 170 VISEntry 171 ldub [%g6 + TI_FAULT_CODE], %g3 172 mov %o0, %g1 173 cmp %g3, 0 174 rd %asi, %g3 175 be,a,pt %icc, 1f 176 wr %g0, ASI_BLK_P, %asi 177 wr %g0, ASI_BLK_COMMIT_P, %asi 1781: ldda [%o1] ASI_BLK_P, %f0 179 add %o1, 0x40, %o1 180 ldda [%o1] ASI_BLK_P, %f16 181 add %o1, 0x40, %o1 182 sethi %hi(PAGE_SIZE), %o2 1831: TOUCH(f0, f2, f4, f6, f8, f10, f12, f14) 184 ldda [%o1] ASI_BLK_P, %f32 185 stda %f48, [%o0] %asi 186 add %o1, 0x40, %o1 187 sub %o2, 0x40, %o2 188 add %o0, 0x40, %o0 189 TOUCH(f16, f18, f20, f22, f24, f26, f28, f30) 190 ldda [%o1] ASI_BLK_P, %f0 191 stda %f48, [%o0] %asi 192 add %o1, 0x40, %o1 193 sub %o2, 0x40, %o2 194 add %o0, 0x40, %o0 195 TOUCH(f32, f34, f36, f38, f40, f42, f44, f46) 196 ldda [%o1] ASI_BLK_P, %f16 197 stda %f48, [%o0] %asi 198 sub %o2, 0x40, %o2 199 add %o1, 0x40, %o1 200 cmp %o2, PAGE_SIZE_REM 201 bne,pt %xcc, 1b 202 add %o0, 0x40, %o0 203#if (PAGE_SHIFT == 16) 204 TOUCH(f0, f2, f4, f6, f8, f10, f12, f14) 205 ldda [%o1] ASI_BLK_P, %f32 206 stda %f48, [%o0] %asi 207 add %o1, 0x40, %o1 208 sub %o2, 0x40, %o2 209 add %o0, 0x40, %o0 210 TOUCH(f16, f18, f20, f22, f24, f26, f28, f30) 211 ldda [%o1] ASI_BLK_P, %f0 212 stda %f48, [%o0] %asi 213 add %o1, 0x40, %o1 214 sub %o2, 0x40, %o2 215 add %o0, 0x40, %o0 216 membar #Sync 217 stda %f32, [%o0] %asi 218 add %o0, 0x40, %o0 219 stda %f0, [%o0] %asi 220#else 221 membar #Sync 222 stda %f0, [%o0] %asi 223 add %o0, 0x40, %o0 224 stda %f16, [%o0] %asi 225#endif 226 membar #Sync 227 wr %g3, 0x0, %asi 228 VISExit 229 2305: 231 stxa %g0, [%g1] ASI_DMMU_DEMAP 232 membar #Sync 233 234 sethi %hi(DCACHE_SIZE), %g2 235 stxa %g0, [%g1 + %g2] ASI_DMMU_DEMAP 236 membar #Sync 237 238 retl 239 stw %o4, [%g6 + TI_PRE_COUNT] 240 241 .size copy_user_page, .-copy_user_page 242 243 .globl cheetah_patch_copy_page 244cheetah_patch_copy_page: 245 sethi %hi(0x01000000), %o1 ! NOP 246 sethi %hi(cheetah_copy_page_insn), %o0 247 or %o0, %lo(cheetah_copy_page_insn), %o0 248 stw %o1, [%o0] 249 membar #StoreStore 250 flush %o0 251 retl 252 nop 253