145320bcbSPaul Mackerras#include <linux/config.h> 245320bcbSPaul Mackerras#include <linux/threads.h> 345320bcbSPaul Mackerras#include <asm/processor.h> 445320bcbSPaul Mackerras#include <asm/page.h> 545320bcbSPaul Mackerras#include <asm/cputable.h> 645320bcbSPaul Mackerras#include <asm/thread_info.h> 745320bcbSPaul Mackerras#include <asm/ppc_asm.h> 845320bcbSPaul Mackerras#include <asm/asm-offsets.h> 945320bcbSPaul Mackerras 1045320bcbSPaul Mackerras 1145320bcbSPaul Mackerras/* 1245320bcbSPaul Mackerras * Structure for storing CPU registers on the save area. 1345320bcbSPaul Mackerras */ 1445320bcbSPaul Mackerras#define SL_SP 0 1545320bcbSPaul Mackerras#define SL_PC 4 1645320bcbSPaul Mackerras#define SL_MSR 8 1745320bcbSPaul Mackerras#define SL_SDR1 0xc 1845320bcbSPaul Mackerras#define SL_SPRG0 0x10 /* 4 sprg's */ 1945320bcbSPaul Mackerras#define SL_DBAT0 0x20 2045320bcbSPaul Mackerras#define SL_IBAT0 0x28 2145320bcbSPaul Mackerras#define SL_DBAT1 0x30 2245320bcbSPaul Mackerras#define SL_IBAT1 0x38 2345320bcbSPaul Mackerras#define SL_DBAT2 0x40 2445320bcbSPaul Mackerras#define SL_IBAT2 0x48 2545320bcbSPaul Mackerras#define SL_DBAT3 0x50 2645320bcbSPaul Mackerras#define SL_IBAT3 0x58 2745320bcbSPaul Mackerras#define SL_TB 0x60 2845320bcbSPaul Mackerras#define SL_R2 0x68 2945320bcbSPaul Mackerras#define SL_CR 0x6c 3045320bcbSPaul Mackerras#define SL_LR 0x70 3145320bcbSPaul Mackerras#define SL_R12 0x74 /* r12 to r31 */ 3245320bcbSPaul Mackerras#define SL_SIZE (SL_R12 + 80) 3345320bcbSPaul Mackerras 3445320bcbSPaul Mackerras .section .data 3545320bcbSPaul Mackerras .align 5 3645320bcbSPaul Mackerras 3745320bcbSPaul Mackerras_GLOBAL(swsusp_save_area) 3845320bcbSPaul Mackerras .space SL_SIZE 3945320bcbSPaul Mackerras 4045320bcbSPaul Mackerras 4145320bcbSPaul Mackerras .section .text 4245320bcbSPaul Mackerras .align 5 4345320bcbSPaul Mackerras 4445320bcbSPaul Mackerras_GLOBAL(swsusp_arch_suspend) 4545320bcbSPaul Mackerras 4645320bcbSPaul Mackerras lis r11,swsusp_save_area@h 4745320bcbSPaul Mackerras ori r11,r11,swsusp_save_area@l 4845320bcbSPaul Mackerras 4945320bcbSPaul Mackerras mflr r0 5045320bcbSPaul Mackerras stw r0,SL_LR(r11) 5145320bcbSPaul Mackerras mfcr r0 5245320bcbSPaul Mackerras stw r0,SL_CR(r11) 5345320bcbSPaul Mackerras stw r1,SL_SP(r11) 5445320bcbSPaul Mackerras stw r2,SL_R2(r11) 5545320bcbSPaul Mackerras stmw r12,SL_R12(r11) 5645320bcbSPaul Mackerras 5745320bcbSPaul Mackerras /* Save MSR & SDR1 */ 5845320bcbSPaul Mackerras mfmsr r4 5945320bcbSPaul Mackerras stw r4,SL_MSR(r11) 6045320bcbSPaul Mackerras mfsdr1 r4 6145320bcbSPaul Mackerras stw r4,SL_SDR1(r11) 6245320bcbSPaul Mackerras 6345320bcbSPaul Mackerras /* Get a stable timebase and save it */ 6445320bcbSPaul Mackerras1: mftbu r4 6545320bcbSPaul Mackerras stw r4,SL_TB(r11) 6645320bcbSPaul Mackerras mftb r5 6745320bcbSPaul Mackerras stw r5,SL_TB+4(r11) 6845320bcbSPaul Mackerras mftbu r3 6945320bcbSPaul Mackerras cmpw r3,r4 7045320bcbSPaul Mackerras bne 1b 7145320bcbSPaul Mackerras 7245320bcbSPaul Mackerras /* Save SPRGs */ 7345320bcbSPaul Mackerras mfsprg r4,0 7445320bcbSPaul Mackerras stw r4,SL_SPRG0(r11) 7545320bcbSPaul Mackerras mfsprg r4,1 7645320bcbSPaul Mackerras stw r4,SL_SPRG0+4(r11) 7745320bcbSPaul Mackerras mfsprg r4,2 7845320bcbSPaul Mackerras stw r4,SL_SPRG0+8(r11) 7945320bcbSPaul Mackerras mfsprg r4,3 8045320bcbSPaul Mackerras stw r4,SL_SPRG0+12(r11) 8145320bcbSPaul Mackerras 8245320bcbSPaul Mackerras /* Save BATs */ 8345320bcbSPaul Mackerras mfdbatu r4,0 8445320bcbSPaul Mackerras stw r4,SL_DBAT0(r11) 8545320bcbSPaul Mackerras mfdbatl r4,0 8645320bcbSPaul Mackerras stw r4,SL_DBAT0+4(r11) 8745320bcbSPaul Mackerras mfdbatu r4,1 8845320bcbSPaul Mackerras stw r4,SL_DBAT1(r11) 8945320bcbSPaul Mackerras mfdbatl r4,1 9045320bcbSPaul Mackerras stw r4,SL_DBAT1+4(r11) 9145320bcbSPaul Mackerras mfdbatu r4,2 9245320bcbSPaul Mackerras stw r4,SL_DBAT2(r11) 9345320bcbSPaul Mackerras mfdbatl r4,2 9445320bcbSPaul Mackerras stw r4,SL_DBAT2+4(r11) 9545320bcbSPaul Mackerras mfdbatu r4,3 9645320bcbSPaul Mackerras stw r4,SL_DBAT3(r11) 9745320bcbSPaul Mackerras mfdbatl r4,3 9845320bcbSPaul Mackerras stw r4,SL_DBAT3+4(r11) 9945320bcbSPaul Mackerras mfibatu r4,0 10045320bcbSPaul Mackerras stw r4,SL_IBAT0(r11) 10145320bcbSPaul Mackerras mfibatl r4,0 10245320bcbSPaul Mackerras stw r4,SL_IBAT0+4(r11) 10345320bcbSPaul Mackerras mfibatu r4,1 10445320bcbSPaul Mackerras stw r4,SL_IBAT1(r11) 10545320bcbSPaul Mackerras mfibatl r4,1 10645320bcbSPaul Mackerras stw r4,SL_IBAT1+4(r11) 10745320bcbSPaul Mackerras mfibatu r4,2 10845320bcbSPaul Mackerras stw r4,SL_IBAT2(r11) 10945320bcbSPaul Mackerras mfibatl r4,2 11045320bcbSPaul Mackerras stw r4,SL_IBAT2+4(r11) 11145320bcbSPaul Mackerras mfibatu r4,3 11245320bcbSPaul Mackerras stw r4,SL_IBAT3(r11) 11345320bcbSPaul Mackerras mfibatl r4,3 11445320bcbSPaul Mackerras stw r4,SL_IBAT3+4(r11) 11545320bcbSPaul Mackerras 11645320bcbSPaul Mackerras#if 0 11745320bcbSPaul Mackerras /* Backup various CPU config stuffs */ 11845320bcbSPaul Mackerras bl __save_cpu_setup 11945320bcbSPaul Mackerras#endif 12045320bcbSPaul Mackerras /* Call the low level suspend stuff (we should probably have made 12145320bcbSPaul Mackerras * a stackframe... 12245320bcbSPaul Mackerras */ 12345320bcbSPaul Mackerras bl swsusp_save 12445320bcbSPaul Mackerras 12545320bcbSPaul Mackerras /* Restore LR from the save area */ 12645320bcbSPaul Mackerras lis r11,swsusp_save_area@h 12745320bcbSPaul Mackerras ori r11,r11,swsusp_save_area@l 12845320bcbSPaul Mackerras lwz r0,SL_LR(r11) 12945320bcbSPaul Mackerras mtlr r0 13045320bcbSPaul Mackerras 13145320bcbSPaul Mackerras blr 13245320bcbSPaul Mackerras 13345320bcbSPaul Mackerras 13445320bcbSPaul Mackerras/* Resume code */ 13545320bcbSPaul Mackerras_GLOBAL(swsusp_arch_resume) 13645320bcbSPaul Mackerras 13745320bcbSPaul Mackerras /* Stop pending alitvec streams and memory accesses */ 13845320bcbSPaul MackerrasBEGIN_FTR_SECTION 13945320bcbSPaul Mackerras DSSALL 14045320bcbSPaul MackerrasEND_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) 14145320bcbSPaul Mackerras sync 14245320bcbSPaul Mackerras 14345320bcbSPaul Mackerras /* Disable MSR:DR to make sure we don't take a TLB or 14445320bcbSPaul Mackerras * hash miss during the copy, as our hash table will 14545320bcbSPaul Mackerras * for a while be unuseable. For .text, we assume we are 14645320bcbSPaul Mackerras * covered by a BAT. This works only for non-G5 at this 14745320bcbSPaul Mackerras * point. G5 will need a better approach, possibly using 14845320bcbSPaul Mackerras * a small temporary hash table filled with large mappings, 14945320bcbSPaul Mackerras * disabling the MMU completely isn't a good option for 15045320bcbSPaul Mackerras * performance reasons. 15145320bcbSPaul Mackerras * (Note that 750's may have the same performance issue as 15245320bcbSPaul Mackerras * the G5 in this case, we should investigate using moving 15345320bcbSPaul Mackerras * BATs for these CPUs) 15445320bcbSPaul Mackerras */ 15545320bcbSPaul Mackerras mfmsr r0 15645320bcbSPaul Mackerras sync 15745320bcbSPaul Mackerras rlwinm r0,r0,0,28,26 /* clear MSR_DR */ 15845320bcbSPaul Mackerras mtmsr r0 15945320bcbSPaul Mackerras sync 16045320bcbSPaul Mackerras isync 16145320bcbSPaul Mackerras 16245320bcbSPaul Mackerras /* Load ptr the list of pages to copy in r3 */ 16345320bcbSPaul Mackerras lis r11,(pagedir_nosave - KERNELBASE)@h 16445320bcbSPaul Mackerras ori r11,r11,pagedir_nosave@l 16545320bcbSPaul Mackerras lwz r10,0(r11) 16645320bcbSPaul Mackerras 16745320bcbSPaul Mackerras /* Copy the pages. This is a very basic implementation, to 16845320bcbSPaul Mackerras * be replaced by something more cache efficient */ 16945320bcbSPaul Mackerras1: 17045320bcbSPaul Mackerras tophys(r3,r10) 17145320bcbSPaul Mackerras li r0,256 17245320bcbSPaul Mackerras mtctr r0 17345320bcbSPaul Mackerras lwz r11,pbe_address(r3) /* source */ 17445320bcbSPaul Mackerras tophys(r5,r11) 17545320bcbSPaul Mackerras lwz r10,pbe_orig_address(r3) /* destination */ 17645320bcbSPaul Mackerras tophys(r6,r10) 17745320bcbSPaul Mackerras2: 17845320bcbSPaul Mackerras lwz r8,0(r5) 17945320bcbSPaul Mackerras lwz r9,4(r5) 18045320bcbSPaul Mackerras lwz r10,8(r5) 18145320bcbSPaul Mackerras lwz r11,12(r5) 18245320bcbSPaul Mackerras addi r5,r5,16 18345320bcbSPaul Mackerras stw r8,0(r6) 18445320bcbSPaul Mackerras stw r9,4(r6) 18545320bcbSPaul Mackerras stw r10,8(r6) 18645320bcbSPaul Mackerras stw r11,12(r6) 18745320bcbSPaul Mackerras addi r6,r6,16 18845320bcbSPaul Mackerras bdnz 2b 18945320bcbSPaul Mackerras lwz r10,pbe_next(r3) 19045320bcbSPaul Mackerras cmpwi 0,r10,0 19145320bcbSPaul Mackerras bne 1b 19245320bcbSPaul Mackerras 19345320bcbSPaul Mackerras /* Do a very simple cache flush/inval of the L1 to ensure 19445320bcbSPaul Mackerras * coherency of the icache 19545320bcbSPaul Mackerras */ 19645320bcbSPaul Mackerras lis r3,0x0002 19745320bcbSPaul Mackerras mtctr r3 19845320bcbSPaul Mackerras li r3, 0 19945320bcbSPaul Mackerras1: 20045320bcbSPaul Mackerras lwz r0,0(r3) 20145320bcbSPaul Mackerras addi r3,r3,0x0020 20245320bcbSPaul Mackerras bdnz 1b 20345320bcbSPaul Mackerras isync 20445320bcbSPaul Mackerras sync 20545320bcbSPaul Mackerras 20645320bcbSPaul Mackerras /* Now flush those cache lines */ 20745320bcbSPaul Mackerras lis r3,0x0002 20845320bcbSPaul Mackerras mtctr r3 20945320bcbSPaul Mackerras li r3, 0 21045320bcbSPaul Mackerras1: 21145320bcbSPaul Mackerras dcbf 0,r3 21245320bcbSPaul Mackerras addi r3,r3,0x0020 21345320bcbSPaul Mackerras bdnz 1b 21445320bcbSPaul Mackerras sync 21545320bcbSPaul Mackerras 21645320bcbSPaul Mackerras /* Ok, we are now running with the kernel data of the old 21745320bcbSPaul Mackerras * kernel fully restored. We can get to the save area 21845320bcbSPaul Mackerras * easily now. As for the rest of the code, it assumes the 21945320bcbSPaul Mackerras * loader kernel and the booted one are exactly identical 22045320bcbSPaul Mackerras */ 22145320bcbSPaul Mackerras lis r11,swsusp_save_area@h 22245320bcbSPaul Mackerras ori r11,r11,swsusp_save_area@l 22345320bcbSPaul Mackerras tophys(r11,r11) 22445320bcbSPaul Mackerras 22545320bcbSPaul Mackerras#if 0 22645320bcbSPaul Mackerras /* Restore various CPU config stuffs */ 22745320bcbSPaul Mackerras bl __restore_cpu_setup 22845320bcbSPaul Mackerras#endif 22945320bcbSPaul Mackerras /* Restore the BATs, and SDR1. Then we can turn on the MMU. 23045320bcbSPaul Mackerras * This is a bit hairy as we are running out of those BATs, 23145320bcbSPaul Mackerras * but first, our code is probably in the icache, and we are 23245320bcbSPaul Mackerras * writing the same value to the BAT, so that should be fine, 23345320bcbSPaul Mackerras * though a better solution will have to be found long-term 23445320bcbSPaul Mackerras */ 23545320bcbSPaul Mackerras lwz r4,SL_SDR1(r11) 23645320bcbSPaul Mackerras mtsdr1 r4 23745320bcbSPaul Mackerras lwz r4,SL_SPRG0(r11) 23845320bcbSPaul Mackerras mtsprg 0,r4 23945320bcbSPaul Mackerras lwz r4,SL_SPRG0+4(r11) 24045320bcbSPaul Mackerras mtsprg 1,r4 24145320bcbSPaul Mackerras lwz r4,SL_SPRG0+8(r11) 24245320bcbSPaul Mackerras mtsprg 2,r4 24345320bcbSPaul Mackerras lwz r4,SL_SPRG0+12(r11) 24445320bcbSPaul Mackerras mtsprg 3,r4 24545320bcbSPaul Mackerras 24645320bcbSPaul Mackerras#if 0 24745320bcbSPaul Mackerras lwz r4,SL_DBAT0(r11) 24845320bcbSPaul Mackerras mtdbatu 0,r4 24945320bcbSPaul Mackerras lwz r4,SL_DBAT0+4(r11) 25045320bcbSPaul Mackerras mtdbatl 0,r4 25145320bcbSPaul Mackerras lwz r4,SL_DBAT1(r11) 25245320bcbSPaul Mackerras mtdbatu 1,r4 25345320bcbSPaul Mackerras lwz r4,SL_DBAT1+4(r11) 25445320bcbSPaul Mackerras mtdbatl 1,r4 25545320bcbSPaul Mackerras lwz r4,SL_DBAT2(r11) 25645320bcbSPaul Mackerras mtdbatu 2,r4 25745320bcbSPaul Mackerras lwz r4,SL_DBAT2+4(r11) 25845320bcbSPaul Mackerras mtdbatl 2,r4 25945320bcbSPaul Mackerras lwz r4,SL_DBAT3(r11) 26045320bcbSPaul Mackerras mtdbatu 3,r4 26145320bcbSPaul Mackerras lwz r4,SL_DBAT3+4(r11) 26245320bcbSPaul Mackerras mtdbatl 3,r4 26345320bcbSPaul Mackerras lwz r4,SL_IBAT0(r11) 26445320bcbSPaul Mackerras mtibatu 0,r4 26545320bcbSPaul Mackerras lwz r4,SL_IBAT0+4(r11) 26645320bcbSPaul Mackerras mtibatl 0,r4 26745320bcbSPaul Mackerras lwz r4,SL_IBAT1(r11) 26845320bcbSPaul Mackerras mtibatu 1,r4 26945320bcbSPaul Mackerras lwz r4,SL_IBAT1+4(r11) 27045320bcbSPaul Mackerras mtibatl 1,r4 27145320bcbSPaul Mackerras lwz r4,SL_IBAT2(r11) 27245320bcbSPaul Mackerras mtibatu 2,r4 27345320bcbSPaul Mackerras lwz r4,SL_IBAT2+4(r11) 27445320bcbSPaul Mackerras mtibatl 2,r4 27545320bcbSPaul Mackerras lwz r4,SL_IBAT3(r11) 27645320bcbSPaul Mackerras mtibatu 3,r4 27745320bcbSPaul Mackerras lwz r4,SL_IBAT3+4(r11) 27845320bcbSPaul Mackerras mtibatl 3,r4 27945320bcbSPaul Mackerras#endif 28045320bcbSPaul Mackerras 28145320bcbSPaul MackerrasBEGIN_FTR_SECTION 28245320bcbSPaul Mackerras li r4,0 28345320bcbSPaul Mackerras mtspr SPRN_DBAT4U,r4 28445320bcbSPaul Mackerras mtspr SPRN_DBAT4L,r4 28545320bcbSPaul Mackerras mtspr SPRN_DBAT5U,r4 28645320bcbSPaul Mackerras mtspr SPRN_DBAT5L,r4 28745320bcbSPaul Mackerras mtspr SPRN_DBAT6U,r4 28845320bcbSPaul Mackerras mtspr SPRN_DBAT6L,r4 28945320bcbSPaul Mackerras mtspr SPRN_DBAT7U,r4 29045320bcbSPaul Mackerras mtspr SPRN_DBAT7L,r4 29145320bcbSPaul Mackerras mtspr SPRN_IBAT4U,r4 29245320bcbSPaul Mackerras mtspr SPRN_IBAT4L,r4 29345320bcbSPaul Mackerras mtspr SPRN_IBAT5U,r4 29445320bcbSPaul Mackerras mtspr SPRN_IBAT5L,r4 29545320bcbSPaul Mackerras mtspr SPRN_IBAT6U,r4 29645320bcbSPaul Mackerras mtspr SPRN_IBAT6L,r4 29745320bcbSPaul Mackerras mtspr SPRN_IBAT7U,r4 29845320bcbSPaul Mackerras mtspr SPRN_IBAT7L,r4 29945320bcbSPaul MackerrasEND_FTR_SECTION_IFSET(CPU_FTR_HAS_HIGH_BATS) 30045320bcbSPaul Mackerras 30145320bcbSPaul Mackerras /* Flush all TLBs */ 30245320bcbSPaul Mackerras lis r4,0x1000 30345320bcbSPaul Mackerras1: addic. r4,r4,-0x1000 30445320bcbSPaul Mackerras tlbie r4 30545320bcbSPaul Mackerras blt 1b 30645320bcbSPaul Mackerras sync 30745320bcbSPaul Mackerras 30845320bcbSPaul Mackerras /* restore the MSR and turn on the MMU */ 30945320bcbSPaul Mackerras lwz r3,SL_MSR(r11) 31045320bcbSPaul Mackerras bl turn_on_mmu 31145320bcbSPaul Mackerras tovirt(r11,r11) 31245320bcbSPaul Mackerras 31345320bcbSPaul Mackerras /* Restore TB */ 31445320bcbSPaul Mackerras li r3,0 31545320bcbSPaul Mackerras mttbl r3 31645320bcbSPaul Mackerras lwz r3,SL_TB(r11) 31745320bcbSPaul Mackerras lwz r4,SL_TB+4(r11) 31845320bcbSPaul Mackerras mttbu r3 31945320bcbSPaul Mackerras mttbl r4 32045320bcbSPaul Mackerras 32145320bcbSPaul Mackerras /* Kick decrementer */ 32245320bcbSPaul Mackerras li r0,1 32345320bcbSPaul Mackerras mtdec r0 32445320bcbSPaul Mackerras 32545320bcbSPaul Mackerras /* Restore the callee-saved registers and return */ 32645320bcbSPaul Mackerras lwz r0,SL_CR(r11) 32745320bcbSPaul Mackerras mtcr r0 32845320bcbSPaul Mackerras lwz r2,SL_R2(r11) 32945320bcbSPaul Mackerras lmw r12,SL_R12(r11) 33045320bcbSPaul Mackerras lwz r1,SL_SP(r11) 33145320bcbSPaul Mackerras lwz r0,SL_LR(r11) 33245320bcbSPaul Mackerras mtlr r0 33345320bcbSPaul Mackerras 33445320bcbSPaul Mackerras // XXX Note: we don't really need to call swsusp_resume 33545320bcbSPaul Mackerras 33645320bcbSPaul Mackerras li r3,0 33745320bcbSPaul Mackerras blr 33845320bcbSPaul Mackerras 33945320bcbSPaul Mackerras/* FIXME:This construct is actually not useful since we don't shut 34045320bcbSPaul Mackerras * down the instruction MMU, we could just flip back MSR-DR on. 34145320bcbSPaul Mackerras */ 34245320bcbSPaul Mackerrasturn_on_mmu: 34345320bcbSPaul Mackerras mflr r4 34445320bcbSPaul Mackerras mtsrr0 r4 34545320bcbSPaul Mackerras mtsrr1 r3 34645320bcbSPaul Mackerras sync 34745320bcbSPaul Mackerras isync 34845320bcbSPaul Mackerras rfi 34945320bcbSPaul Mackerras 350