1*45320bcbSPaul Mackerras#include <linux/config.h> 2*45320bcbSPaul Mackerras#include <linux/threads.h> 3*45320bcbSPaul Mackerras#include <asm/processor.h> 4*45320bcbSPaul Mackerras#include <asm/page.h> 5*45320bcbSPaul Mackerras#include <asm/cputable.h> 6*45320bcbSPaul Mackerras#include <asm/thread_info.h> 7*45320bcbSPaul Mackerras#include <asm/ppc_asm.h> 8*45320bcbSPaul Mackerras#include <asm/asm-offsets.h> 9*45320bcbSPaul Mackerras 10*45320bcbSPaul Mackerras 11*45320bcbSPaul Mackerras/* 12*45320bcbSPaul Mackerras * Structure for storing CPU registers on the save area. 13*45320bcbSPaul Mackerras */ 14*45320bcbSPaul Mackerras#define SL_SP 0 15*45320bcbSPaul Mackerras#define SL_PC 4 16*45320bcbSPaul Mackerras#define SL_MSR 8 17*45320bcbSPaul Mackerras#define SL_SDR1 0xc 18*45320bcbSPaul Mackerras#define SL_SPRG0 0x10 /* 4 sprg's */ 19*45320bcbSPaul Mackerras#define SL_DBAT0 0x20 20*45320bcbSPaul Mackerras#define SL_IBAT0 0x28 21*45320bcbSPaul Mackerras#define SL_DBAT1 0x30 22*45320bcbSPaul Mackerras#define SL_IBAT1 0x38 23*45320bcbSPaul Mackerras#define SL_DBAT2 0x40 24*45320bcbSPaul Mackerras#define SL_IBAT2 0x48 25*45320bcbSPaul Mackerras#define SL_DBAT3 0x50 26*45320bcbSPaul Mackerras#define SL_IBAT3 0x58 27*45320bcbSPaul Mackerras#define SL_TB 0x60 28*45320bcbSPaul Mackerras#define SL_R2 0x68 29*45320bcbSPaul Mackerras#define SL_CR 0x6c 30*45320bcbSPaul Mackerras#define SL_LR 0x70 31*45320bcbSPaul Mackerras#define SL_R12 0x74 /* r12 to r31 */ 32*45320bcbSPaul Mackerras#define SL_SIZE (SL_R12 + 80) 33*45320bcbSPaul Mackerras 34*45320bcbSPaul Mackerras .section .data 35*45320bcbSPaul Mackerras .align 5 36*45320bcbSPaul Mackerras 37*45320bcbSPaul Mackerras_GLOBAL(swsusp_save_area) 38*45320bcbSPaul Mackerras .space SL_SIZE 39*45320bcbSPaul Mackerras 40*45320bcbSPaul Mackerras 41*45320bcbSPaul Mackerras .section .text 42*45320bcbSPaul Mackerras .align 5 43*45320bcbSPaul Mackerras 44*45320bcbSPaul Mackerras_GLOBAL(swsusp_arch_suspend) 45*45320bcbSPaul Mackerras 46*45320bcbSPaul Mackerras lis r11,swsusp_save_area@h 47*45320bcbSPaul Mackerras ori r11,r11,swsusp_save_area@l 48*45320bcbSPaul Mackerras 49*45320bcbSPaul Mackerras mflr r0 50*45320bcbSPaul Mackerras stw r0,SL_LR(r11) 51*45320bcbSPaul Mackerras mfcr r0 52*45320bcbSPaul Mackerras stw r0,SL_CR(r11) 53*45320bcbSPaul Mackerras stw r1,SL_SP(r11) 54*45320bcbSPaul Mackerras stw r2,SL_R2(r11) 55*45320bcbSPaul Mackerras stmw r12,SL_R12(r11) 56*45320bcbSPaul Mackerras 57*45320bcbSPaul Mackerras /* Save MSR & SDR1 */ 58*45320bcbSPaul Mackerras mfmsr r4 59*45320bcbSPaul Mackerras stw r4,SL_MSR(r11) 60*45320bcbSPaul Mackerras mfsdr1 r4 61*45320bcbSPaul Mackerras stw r4,SL_SDR1(r11) 62*45320bcbSPaul Mackerras 63*45320bcbSPaul Mackerras /* Get a stable timebase and save it */ 64*45320bcbSPaul Mackerras1: mftbu r4 65*45320bcbSPaul Mackerras stw r4,SL_TB(r11) 66*45320bcbSPaul Mackerras mftb r5 67*45320bcbSPaul Mackerras stw r5,SL_TB+4(r11) 68*45320bcbSPaul Mackerras mftbu r3 69*45320bcbSPaul Mackerras cmpw r3,r4 70*45320bcbSPaul Mackerras bne 1b 71*45320bcbSPaul Mackerras 72*45320bcbSPaul Mackerras /* Save SPRGs */ 73*45320bcbSPaul Mackerras mfsprg r4,0 74*45320bcbSPaul Mackerras stw r4,SL_SPRG0(r11) 75*45320bcbSPaul Mackerras mfsprg r4,1 76*45320bcbSPaul Mackerras stw r4,SL_SPRG0+4(r11) 77*45320bcbSPaul Mackerras mfsprg r4,2 78*45320bcbSPaul Mackerras stw r4,SL_SPRG0+8(r11) 79*45320bcbSPaul Mackerras mfsprg r4,3 80*45320bcbSPaul Mackerras stw r4,SL_SPRG0+12(r11) 81*45320bcbSPaul Mackerras 82*45320bcbSPaul Mackerras /* Save BATs */ 83*45320bcbSPaul Mackerras mfdbatu r4,0 84*45320bcbSPaul Mackerras stw r4,SL_DBAT0(r11) 85*45320bcbSPaul Mackerras mfdbatl r4,0 86*45320bcbSPaul Mackerras stw r4,SL_DBAT0+4(r11) 87*45320bcbSPaul Mackerras mfdbatu r4,1 88*45320bcbSPaul Mackerras stw r4,SL_DBAT1(r11) 89*45320bcbSPaul Mackerras mfdbatl r4,1 90*45320bcbSPaul Mackerras stw r4,SL_DBAT1+4(r11) 91*45320bcbSPaul Mackerras mfdbatu r4,2 92*45320bcbSPaul Mackerras stw r4,SL_DBAT2(r11) 93*45320bcbSPaul Mackerras mfdbatl r4,2 94*45320bcbSPaul Mackerras stw r4,SL_DBAT2+4(r11) 95*45320bcbSPaul Mackerras mfdbatu r4,3 96*45320bcbSPaul Mackerras stw r4,SL_DBAT3(r11) 97*45320bcbSPaul Mackerras mfdbatl r4,3 98*45320bcbSPaul Mackerras stw r4,SL_DBAT3+4(r11) 99*45320bcbSPaul Mackerras mfibatu r4,0 100*45320bcbSPaul Mackerras stw r4,SL_IBAT0(r11) 101*45320bcbSPaul Mackerras mfibatl r4,0 102*45320bcbSPaul Mackerras stw r4,SL_IBAT0+4(r11) 103*45320bcbSPaul Mackerras mfibatu r4,1 104*45320bcbSPaul Mackerras stw r4,SL_IBAT1(r11) 105*45320bcbSPaul Mackerras mfibatl r4,1 106*45320bcbSPaul Mackerras stw r4,SL_IBAT1+4(r11) 107*45320bcbSPaul Mackerras mfibatu r4,2 108*45320bcbSPaul Mackerras stw r4,SL_IBAT2(r11) 109*45320bcbSPaul Mackerras mfibatl r4,2 110*45320bcbSPaul Mackerras stw r4,SL_IBAT2+4(r11) 111*45320bcbSPaul Mackerras mfibatu r4,3 112*45320bcbSPaul Mackerras stw r4,SL_IBAT3(r11) 113*45320bcbSPaul Mackerras mfibatl r4,3 114*45320bcbSPaul Mackerras stw r4,SL_IBAT3+4(r11) 115*45320bcbSPaul Mackerras 116*45320bcbSPaul Mackerras#if 0 117*45320bcbSPaul Mackerras /* Backup various CPU config stuffs */ 118*45320bcbSPaul Mackerras bl __save_cpu_setup 119*45320bcbSPaul Mackerras#endif 120*45320bcbSPaul Mackerras /* Call the low level suspend stuff (we should probably have made 121*45320bcbSPaul Mackerras * a stackframe... 122*45320bcbSPaul Mackerras */ 123*45320bcbSPaul Mackerras bl swsusp_save 124*45320bcbSPaul Mackerras 125*45320bcbSPaul Mackerras /* Restore LR from the save area */ 126*45320bcbSPaul Mackerras lis r11,swsusp_save_area@h 127*45320bcbSPaul Mackerras ori r11,r11,swsusp_save_area@l 128*45320bcbSPaul Mackerras lwz r0,SL_LR(r11) 129*45320bcbSPaul Mackerras mtlr r0 130*45320bcbSPaul Mackerras 131*45320bcbSPaul Mackerras blr 132*45320bcbSPaul Mackerras 133*45320bcbSPaul Mackerras 134*45320bcbSPaul Mackerras/* Resume code */ 135*45320bcbSPaul Mackerras_GLOBAL(swsusp_arch_resume) 136*45320bcbSPaul Mackerras 137*45320bcbSPaul Mackerras /* Stop pending alitvec streams and memory accesses */ 138*45320bcbSPaul MackerrasBEGIN_FTR_SECTION 139*45320bcbSPaul Mackerras DSSALL 140*45320bcbSPaul MackerrasEND_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) 141*45320bcbSPaul Mackerras sync 142*45320bcbSPaul Mackerras 143*45320bcbSPaul Mackerras /* Disable MSR:DR to make sure we don't take a TLB or 144*45320bcbSPaul Mackerras * hash miss during the copy, as our hash table will 145*45320bcbSPaul Mackerras * for a while be unuseable. For .text, we assume we are 146*45320bcbSPaul Mackerras * covered by a BAT. This works only for non-G5 at this 147*45320bcbSPaul Mackerras * point. G5 will need a better approach, possibly using 148*45320bcbSPaul Mackerras * a small temporary hash table filled with large mappings, 149*45320bcbSPaul Mackerras * disabling the MMU completely isn't a good option for 150*45320bcbSPaul Mackerras * performance reasons. 151*45320bcbSPaul Mackerras * (Note that 750's may have the same performance issue as 152*45320bcbSPaul Mackerras * the G5 in this case, we should investigate using moving 153*45320bcbSPaul Mackerras * BATs for these CPUs) 154*45320bcbSPaul Mackerras */ 155*45320bcbSPaul Mackerras mfmsr r0 156*45320bcbSPaul Mackerras sync 157*45320bcbSPaul Mackerras rlwinm r0,r0,0,28,26 /* clear MSR_DR */ 158*45320bcbSPaul Mackerras mtmsr r0 159*45320bcbSPaul Mackerras sync 160*45320bcbSPaul Mackerras isync 161*45320bcbSPaul Mackerras 162*45320bcbSPaul Mackerras /* Load ptr the list of pages to copy in r3 */ 163*45320bcbSPaul Mackerras lis r11,(pagedir_nosave - KERNELBASE)@h 164*45320bcbSPaul Mackerras ori r11,r11,pagedir_nosave@l 165*45320bcbSPaul Mackerras lwz r10,0(r11) 166*45320bcbSPaul Mackerras 167*45320bcbSPaul Mackerras /* Copy the pages. This is a very basic implementation, to 168*45320bcbSPaul Mackerras * be replaced by something more cache efficient */ 169*45320bcbSPaul Mackerras1: 170*45320bcbSPaul Mackerras tophys(r3,r10) 171*45320bcbSPaul Mackerras li r0,256 172*45320bcbSPaul Mackerras mtctr r0 173*45320bcbSPaul Mackerras lwz r11,pbe_address(r3) /* source */ 174*45320bcbSPaul Mackerras tophys(r5,r11) 175*45320bcbSPaul Mackerras lwz r10,pbe_orig_address(r3) /* destination */ 176*45320bcbSPaul Mackerras tophys(r6,r10) 177*45320bcbSPaul Mackerras2: 178*45320bcbSPaul Mackerras lwz r8,0(r5) 179*45320bcbSPaul Mackerras lwz r9,4(r5) 180*45320bcbSPaul Mackerras lwz r10,8(r5) 181*45320bcbSPaul Mackerras lwz r11,12(r5) 182*45320bcbSPaul Mackerras addi r5,r5,16 183*45320bcbSPaul Mackerras stw r8,0(r6) 184*45320bcbSPaul Mackerras stw r9,4(r6) 185*45320bcbSPaul Mackerras stw r10,8(r6) 186*45320bcbSPaul Mackerras stw r11,12(r6) 187*45320bcbSPaul Mackerras addi r6,r6,16 188*45320bcbSPaul Mackerras bdnz 2b 189*45320bcbSPaul Mackerras lwz r10,pbe_next(r3) 190*45320bcbSPaul Mackerras cmpwi 0,r10,0 191*45320bcbSPaul Mackerras bne 1b 192*45320bcbSPaul Mackerras 193*45320bcbSPaul Mackerras /* Do a very simple cache flush/inval of the L1 to ensure 194*45320bcbSPaul Mackerras * coherency of the icache 195*45320bcbSPaul Mackerras */ 196*45320bcbSPaul Mackerras lis r3,0x0002 197*45320bcbSPaul Mackerras mtctr r3 198*45320bcbSPaul Mackerras li r3, 0 199*45320bcbSPaul Mackerras1: 200*45320bcbSPaul Mackerras lwz r0,0(r3) 201*45320bcbSPaul Mackerras addi r3,r3,0x0020 202*45320bcbSPaul Mackerras bdnz 1b 203*45320bcbSPaul Mackerras isync 204*45320bcbSPaul Mackerras sync 205*45320bcbSPaul Mackerras 206*45320bcbSPaul Mackerras /* Now flush those cache lines */ 207*45320bcbSPaul Mackerras lis r3,0x0002 208*45320bcbSPaul Mackerras mtctr r3 209*45320bcbSPaul Mackerras li r3, 0 210*45320bcbSPaul Mackerras1: 211*45320bcbSPaul Mackerras dcbf 0,r3 212*45320bcbSPaul Mackerras addi r3,r3,0x0020 213*45320bcbSPaul Mackerras bdnz 1b 214*45320bcbSPaul Mackerras sync 215*45320bcbSPaul Mackerras 216*45320bcbSPaul Mackerras /* Ok, we are now running with the kernel data of the old 217*45320bcbSPaul Mackerras * kernel fully restored. We can get to the save area 218*45320bcbSPaul Mackerras * easily now. As for the rest of the code, it assumes the 219*45320bcbSPaul Mackerras * loader kernel and the booted one are exactly identical 220*45320bcbSPaul Mackerras */ 221*45320bcbSPaul Mackerras lis r11,swsusp_save_area@h 222*45320bcbSPaul Mackerras ori r11,r11,swsusp_save_area@l 223*45320bcbSPaul Mackerras tophys(r11,r11) 224*45320bcbSPaul Mackerras 225*45320bcbSPaul Mackerras#if 0 226*45320bcbSPaul Mackerras /* Restore various CPU config stuffs */ 227*45320bcbSPaul Mackerras bl __restore_cpu_setup 228*45320bcbSPaul Mackerras#endif 229*45320bcbSPaul Mackerras /* Restore the BATs, and SDR1. Then we can turn on the MMU. 230*45320bcbSPaul Mackerras * This is a bit hairy as we are running out of those BATs, 231*45320bcbSPaul Mackerras * but first, our code is probably in the icache, and we are 232*45320bcbSPaul Mackerras * writing the same value to the BAT, so that should be fine, 233*45320bcbSPaul Mackerras * though a better solution will have to be found long-term 234*45320bcbSPaul Mackerras */ 235*45320bcbSPaul Mackerras lwz r4,SL_SDR1(r11) 236*45320bcbSPaul Mackerras mtsdr1 r4 237*45320bcbSPaul Mackerras lwz r4,SL_SPRG0(r11) 238*45320bcbSPaul Mackerras mtsprg 0,r4 239*45320bcbSPaul Mackerras lwz r4,SL_SPRG0+4(r11) 240*45320bcbSPaul Mackerras mtsprg 1,r4 241*45320bcbSPaul Mackerras lwz r4,SL_SPRG0+8(r11) 242*45320bcbSPaul Mackerras mtsprg 2,r4 243*45320bcbSPaul Mackerras lwz r4,SL_SPRG0+12(r11) 244*45320bcbSPaul Mackerras mtsprg 3,r4 245*45320bcbSPaul Mackerras 246*45320bcbSPaul Mackerras#if 0 247*45320bcbSPaul Mackerras lwz r4,SL_DBAT0(r11) 248*45320bcbSPaul Mackerras mtdbatu 0,r4 249*45320bcbSPaul Mackerras lwz r4,SL_DBAT0+4(r11) 250*45320bcbSPaul Mackerras mtdbatl 0,r4 251*45320bcbSPaul Mackerras lwz r4,SL_DBAT1(r11) 252*45320bcbSPaul Mackerras mtdbatu 1,r4 253*45320bcbSPaul Mackerras lwz r4,SL_DBAT1+4(r11) 254*45320bcbSPaul Mackerras mtdbatl 1,r4 255*45320bcbSPaul Mackerras lwz r4,SL_DBAT2(r11) 256*45320bcbSPaul Mackerras mtdbatu 2,r4 257*45320bcbSPaul Mackerras lwz r4,SL_DBAT2+4(r11) 258*45320bcbSPaul Mackerras mtdbatl 2,r4 259*45320bcbSPaul Mackerras lwz r4,SL_DBAT3(r11) 260*45320bcbSPaul Mackerras mtdbatu 3,r4 261*45320bcbSPaul Mackerras lwz r4,SL_DBAT3+4(r11) 262*45320bcbSPaul Mackerras mtdbatl 3,r4 263*45320bcbSPaul Mackerras lwz r4,SL_IBAT0(r11) 264*45320bcbSPaul Mackerras mtibatu 0,r4 265*45320bcbSPaul Mackerras lwz r4,SL_IBAT0+4(r11) 266*45320bcbSPaul Mackerras mtibatl 0,r4 267*45320bcbSPaul Mackerras lwz r4,SL_IBAT1(r11) 268*45320bcbSPaul Mackerras mtibatu 1,r4 269*45320bcbSPaul Mackerras lwz r4,SL_IBAT1+4(r11) 270*45320bcbSPaul Mackerras mtibatl 1,r4 271*45320bcbSPaul Mackerras lwz r4,SL_IBAT2(r11) 272*45320bcbSPaul Mackerras mtibatu 2,r4 273*45320bcbSPaul Mackerras lwz r4,SL_IBAT2+4(r11) 274*45320bcbSPaul Mackerras mtibatl 2,r4 275*45320bcbSPaul Mackerras lwz r4,SL_IBAT3(r11) 276*45320bcbSPaul Mackerras mtibatu 3,r4 277*45320bcbSPaul Mackerras lwz r4,SL_IBAT3+4(r11) 278*45320bcbSPaul Mackerras mtibatl 3,r4 279*45320bcbSPaul Mackerras#endif 280*45320bcbSPaul Mackerras 281*45320bcbSPaul MackerrasBEGIN_FTR_SECTION 282*45320bcbSPaul Mackerras li r4,0 283*45320bcbSPaul Mackerras mtspr SPRN_DBAT4U,r4 284*45320bcbSPaul Mackerras mtspr SPRN_DBAT4L,r4 285*45320bcbSPaul Mackerras mtspr SPRN_DBAT5U,r4 286*45320bcbSPaul Mackerras mtspr SPRN_DBAT5L,r4 287*45320bcbSPaul Mackerras mtspr SPRN_DBAT6U,r4 288*45320bcbSPaul Mackerras mtspr SPRN_DBAT6L,r4 289*45320bcbSPaul Mackerras mtspr SPRN_DBAT7U,r4 290*45320bcbSPaul Mackerras mtspr SPRN_DBAT7L,r4 291*45320bcbSPaul Mackerras mtspr SPRN_IBAT4U,r4 292*45320bcbSPaul Mackerras mtspr SPRN_IBAT4L,r4 293*45320bcbSPaul Mackerras mtspr SPRN_IBAT5U,r4 294*45320bcbSPaul Mackerras mtspr SPRN_IBAT5L,r4 295*45320bcbSPaul Mackerras mtspr SPRN_IBAT6U,r4 296*45320bcbSPaul Mackerras mtspr SPRN_IBAT6L,r4 297*45320bcbSPaul Mackerras mtspr SPRN_IBAT7U,r4 298*45320bcbSPaul Mackerras mtspr SPRN_IBAT7L,r4 299*45320bcbSPaul MackerrasEND_FTR_SECTION_IFSET(CPU_FTR_HAS_HIGH_BATS) 300*45320bcbSPaul Mackerras 301*45320bcbSPaul Mackerras /* Flush all TLBs */ 302*45320bcbSPaul Mackerras lis r4,0x1000 303*45320bcbSPaul Mackerras1: addic. r4,r4,-0x1000 304*45320bcbSPaul Mackerras tlbie r4 305*45320bcbSPaul Mackerras blt 1b 306*45320bcbSPaul Mackerras sync 307*45320bcbSPaul Mackerras 308*45320bcbSPaul Mackerras /* restore the MSR and turn on the MMU */ 309*45320bcbSPaul Mackerras lwz r3,SL_MSR(r11) 310*45320bcbSPaul Mackerras bl turn_on_mmu 311*45320bcbSPaul Mackerras tovirt(r11,r11) 312*45320bcbSPaul Mackerras 313*45320bcbSPaul Mackerras /* Restore TB */ 314*45320bcbSPaul Mackerras li r3,0 315*45320bcbSPaul Mackerras mttbl r3 316*45320bcbSPaul Mackerras lwz r3,SL_TB(r11) 317*45320bcbSPaul Mackerras lwz r4,SL_TB+4(r11) 318*45320bcbSPaul Mackerras mttbu r3 319*45320bcbSPaul Mackerras mttbl r4 320*45320bcbSPaul Mackerras 321*45320bcbSPaul Mackerras /* Kick decrementer */ 322*45320bcbSPaul Mackerras li r0,1 323*45320bcbSPaul Mackerras mtdec r0 324*45320bcbSPaul Mackerras 325*45320bcbSPaul Mackerras /* Restore the callee-saved registers and return */ 326*45320bcbSPaul Mackerras lwz r0,SL_CR(r11) 327*45320bcbSPaul Mackerras mtcr r0 328*45320bcbSPaul Mackerras lwz r2,SL_R2(r11) 329*45320bcbSPaul Mackerras lmw r12,SL_R12(r11) 330*45320bcbSPaul Mackerras lwz r1,SL_SP(r11) 331*45320bcbSPaul Mackerras lwz r0,SL_LR(r11) 332*45320bcbSPaul Mackerras mtlr r0 333*45320bcbSPaul Mackerras 334*45320bcbSPaul Mackerras // XXX Note: we don't really need to call swsusp_resume 335*45320bcbSPaul Mackerras 336*45320bcbSPaul Mackerras li r3,0 337*45320bcbSPaul Mackerras blr 338*45320bcbSPaul Mackerras 339*45320bcbSPaul Mackerras/* FIXME:This construct is actually not useful since we don't shut 340*45320bcbSPaul Mackerras * down the instruction MMU, we could just flip back MSR-DR on. 341*45320bcbSPaul Mackerras */ 342*45320bcbSPaul Mackerrasturn_on_mmu: 343*45320bcbSPaul Mackerras mflr r4 344*45320bcbSPaul Mackerras mtsrr0 r4 345*45320bcbSPaul Mackerras mtsrr1 r3 346*45320bcbSPaul Mackerras sync 347*45320bcbSPaul Mackerras isync 348*45320bcbSPaul Mackerras rfi 349*45320bcbSPaul Mackerras 350