145320bcbSPaul Mackerras#include <linux/threads.h> 245320bcbSPaul Mackerras#include <asm/processor.h> 345320bcbSPaul Mackerras#include <asm/page.h> 445320bcbSPaul Mackerras#include <asm/cputable.h> 545320bcbSPaul Mackerras#include <asm/thread_info.h> 645320bcbSPaul Mackerras#include <asm/ppc_asm.h> 745320bcbSPaul Mackerras#include <asm/asm-offsets.h> 87c03d653SBenjamin Herrenschmidt#include <asm/mmu.h> 945320bcbSPaul Mackerras 1045320bcbSPaul Mackerras/* 1145320bcbSPaul Mackerras * Structure for storing CPU registers on the save area. 1245320bcbSPaul Mackerras */ 1345320bcbSPaul Mackerras#define SL_SP 0 1445320bcbSPaul Mackerras#define SL_PC 4 1545320bcbSPaul Mackerras#define SL_MSR 8 1645320bcbSPaul Mackerras#define SL_SDR1 0xc 1745320bcbSPaul Mackerras#define SL_SPRG0 0x10 /* 4 sprg's */ 1845320bcbSPaul Mackerras#define SL_DBAT0 0x20 1945320bcbSPaul Mackerras#define SL_IBAT0 0x28 2045320bcbSPaul Mackerras#define SL_DBAT1 0x30 2145320bcbSPaul Mackerras#define SL_IBAT1 0x38 2245320bcbSPaul Mackerras#define SL_DBAT2 0x40 2345320bcbSPaul Mackerras#define SL_IBAT2 0x48 2445320bcbSPaul Mackerras#define SL_DBAT3 0x50 2545320bcbSPaul Mackerras#define SL_IBAT3 0x58 2645320bcbSPaul Mackerras#define SL_TB 0x60 2745320bcbSPaul Mackerras#define SL_R2 0x68 2845320bcbSPaul Mackerras#define SL_CR 0x6c 2945320bcbSPaul Mackerras#define SL_LR 0x70 3045320bcbSPaul Mackerras#define SL_R12 0x74 /* r12 to r31 */ 3145320bcbSPaul Mackerras#define SL_SIZE (SL_R12 + 80) 3245320bcbSPaul Mackerras 3345320bcbSPaul Mackerras .section .data 3445320bcbSPaul Mackerras .align 5 3545320bcbSPaul Mackerras 3645320bcbSPaul Mackerras_GLOBAL(swsusp_save_area) 3745320bcbSPaul Mackerras .space SL_SIZE 3845320bcbSPaul Mackerras 3945320bcbSPaul Mackerras 4045320bcbSPaul Mackerras .section .text 4145320bcbSPaul Mackerras .align 5 4245320bcbSPaul Mackerras 4345320bcbSPaul Mackerras_GLOBAL(swsusp_arch_suspend) 4445320bcbSPaul Mackerras 4545320bcbSPaul Mackerras lis r11,swsusp_save_area@h 4645320bcbSPaul Mackerras ori r11,r11,swsusp_save_area@l 4745320bcbSPaul Mackerras 4845320bcbSPaul Mackerras mflr r0 4945320bcbSPaul Mackerras stw r0,SL_LR(r11) 5045320bcbSPaul Mackerras mfcr r0 5145320bcbSPaul Mackerras stw r0,SL_CR(r11) 5245320bcbSPaul Mackerras stw r1,SL_SP(r11) 5345320bcbSPaul Mackerras stw r2,SL_R2(r11) 5445320bcbSPaul Mackerras stmw r12,SL_R12(r11) 5545320bcbSPaul Mackerras 5645320bcbSPaul Mackerras /* Save MSR & SDR1 */ 5745320bcbSPaul Mackerras mfmsr r4 5845320bcbSPaul Mackerras stw r4,SL_MSR(r11) 5945320bcbSPaul Mackerras mfsdr1 r4 6045320bcbSPaul Mackerras stw r4,SL_SDR1(r11) 6145320bcbSPaul Mackerras 6245320bcbSPaul Mackerras /* Get a stable timebase and save it */ 6345320bcbSPaul Mackerras1: mftbu r4 6445320bcbSPaul Mackerras stw r4,SL_TB(r11) 6545320bcbSPaul Mackerras mftb r5 6645320bcbSPaul Mackerras stw r5,SL_TB+4(r11) 6745320bcbSPaul Mackerras mftbu r3 6845320bcbSPaul Mackerras cmpw r3,r4 6945320bcbSPaul Mackerras bne 1b 7045320bcbSPaul Mackerras 7145320bcbSPaul Mackerras /* Save SPRGs */ 7245320bcbSPaul Mackerras mfsprg r4,0 7345320bcbSPaul Mackerras stw r4,SL_SPRG0(r11) 7445320bcbSPaul Mackerras mfsprg r4,1 7545320bcbSPaul Mackerras stw r4,SL_SPRG0+4(r11) 7645320bcbSPaul Mackerras mfsprg r4,2 7745320bcbSPaul Mackerras stw r4,SL_SPRG0+8(r11) 7845320bcbSPaul Mackerras mfsprg r4,3 7945320bcbSPaul Mackerras stw r4,SL_SPRG0+12(r11) 8045320bcbSPaul Mackerras 8145320bcbSPaul Mackerras /* Save BATs */ 8245320bcbSPaul Mackerras mfdbatu r4,0 8345320bcbSPaul Mackerras stw r4,SL_DBAT0(r11) 8445320bcbSPaul Mackerras mfdbatl r4,0 8545320bcbSPaul Mackerras stw r4,SL_DBAT0+4(r11) 8645320bcbSPaul Mackerras mfdbatu r4,1 8745320bcbSPaul Mackerras stw r4,SL_DBAT1(r11) 8845320bcbSPaul Mackerras mfdbatl r4,1 8945320bcbSPaul Mackerras stw r4,SL_DBAT1+4(r11) 9045320bcbSPaul Mackerras mfdbatu r4,2 9145320bcbSPaul Mackerras stw r4,SL_DBAT2(r11) 9245320bcbSPaul Mackerras mfdbatl r4,2 9345320bcbSPaul Mackerras stw r4,SL_DBAT2+4(r11) 9445320bcbSPaul Mackerras mfdbatu r4,3 9545320bcbSPaul Mackerras stw r4,SL_DBAT3(r11) 9645320bcbSPaul Mackerras mfdbatl r4,3 9745320bcbSPaul Mackerras stw r4,SL_DBAT3+4(r11) 9845320bcbSPaul Mackerras mfibatu r4,0 9945320bcbSPaul Mackerras stw r4,SL_IBAT0(r11) 10045320bcbSPaul Mackerras mfibatl r4,0 10145320bcbSPaul Mackerras stw r4,SL_IBAT0+4(r11) 10245320bcbSPaul Mackerras mfibatu r4,1 10345320bcbSPaul Mackerras stw r4,SL_IBAT1(r11) 10445320bcbSPaul Mackerras mfibatl r4,1 10545320bcbSPaul Mackerras stw r4,SL_IBAT1+4(r11) 10645320bcbSPaul Mackerras mfibatu r4,2 10745320bcbSPaul Mackerras stw r4,SL_IBAT2(r11) 10845320bcbSPaul Mackerras mfibatl r4,2 10945320bcbSPaul Mackerras stw r4,SL_IBAT2+4(r11) 11045320bcbSPaul Mackerras mfibatu r4,3 11145320bcbSPaul Mackerras stw r4,SL_IBAT3(r11) 11245320bcbSPaul Mackerras mfibatl r4,3 11345320bcbSPaul Mackerras stw r4,SL_IBAT3+4(r11) 11445320bcbSPaul Mackerras 11545320bcbSPaul Mackerras#if 0 11645320bcbSPaul Mackerras /* Backup various CPU config stuffs */ 11745320bcbSPaul Mackerras bl __save_cpu_setup 11845320bcbSPaul Mackerras#endif 11945320bcbSPaul Mackerras /* Call the low level suspend stuff (we should probably have made 12045320bcbSPaul Mackerras * a stackframe... 12145320bcbSPaul Mackerras */ 12245320bcbSPaul Mackerras bl swsusp_save 12345320bcbSPaul Mackerras 12445320bcbSPaul Mackerras /* Restore LR from the save area */ 12545320bcbSPaul Mackerras lis r11,swsusp_save_area@h 12645320bcbSPaul Mackerras ori r11,r11,swsusp_save_area@l 12745320bcbSPaul Mackerras lwz r0,SL_LR(r11) 12845320bcbSPaul Mackerras mtlr r0 12945320bcbSPaul Mackerras 13045320bcbSPaul Mackerras blr 13145320bcbSPaul Mackerras 13245320bcbSPaul Mackerras 13345320bcbSPaul Mackerras/* Resume code */ 13445320bcbSPaul Mackerras_GLOBAL(swsusp_arch_resume) 13545320bcbSPaul Mackerras 1362e6f40deSJohannes Berg#ifdef CONFIG_ALTIVEC 13745320bcbSPaul Mackerras /* Stop pending alitvec streams and memory accesses */ 13845320bcbSPaul MackerrasBEGIN_FTR_SECTION 13945320bcbSPaul Mackerras DSSALL 14045320bcbSPaul MackerrasEND_FTR_SECTION_IFSET(CPU_FTR_ALTIVEC) 1412e6f40deSJohannes Berg#endif 14245320bcbSPaul Mackerras sync 14345320bcbSPaul Mackerras 14445320bcbSPaul Mackerras /* Disable MSR:DR to make sure we don't take a TLB or 14545320bcbSPaul Mackerras * hash miss during the copy, as our hash table will 146*25985edcSLucas De Marchi * for a while be unusable. For .text, we assume we are 14745320bcbSPaul Mackerras * covered by a BAT. This works only for non-G5 at this 14845320bcbSPaul Mackerras * point. G5 will need a better approach, possibly using 14945320bcbSPaul Mackerras * a small temporary hash table filled with large mappings, 15045320bcbSPaul Mackerras * disabling the MMU completely isn't a good option for 15145320bcbSPaul Mackerras * performance reasons. 15245320bcbSPaul Mackerras * (Note that 750's may have the same performance issue as 15345320bcbSPaul Mackerras * the G5 in this case, we should investigate using moving 15445320bcbSPaul Mackerras * BATs for these CPUs) 15545320bcbSPaul Mackerras */ 15645320bcbSPaul Mackerras mfmsr r0 15745320bcbSPaul Mackerras sync 15845320bcbSPaul Mackerras rlwinm r0,r0,0,28,26 /* clear MSR_DR */ 15945320bcbSPaul Mackerras mtmsr r0 16045320bcbSPaul Mackerras sync 16145320bcbSPaul Mackerras isync 16245320bcbSPaul Mackerras 16345320bcbSPaul Mackerras /* Load ptr the list of pages to copy in r3 */ 16475534b50SRafael J. Wysocki lis r11,(restore_pblist - KERNELBASE)@h 16575534b50SRafael J. Wysocki ori r11,r11,restore_pblist@l 16645320bcbSPaul Mackerras lwz r10,0(r11) 16745320bcbSPaul Mackerras 16845320bcbSPaul Mackerras /* Copy the pages. This is a very basic implementation, to 16945320bcbSPaul Mackerras * be replaced by something more cache efficient */ 17045320bcbSPaul Mackerras1: 17145320bcbSPaul Mackerras tophys(r3,r10) 17245320bcbSPaul Mackerras li r0,256 17345320bcbSPaul Mackerras mtctr r0 17445320bcbSPaul Mackerras lwz r11,pbe_address(r3) /* source */ 17545320bcbSPaul Mackerras tophys(r5,r11) 17645320bcbSPaul Mackerras lwz r10,pbe_orig_address(r3) /* destination */ 17745320bcbSPaul Mackerras tophys(r6,r10) 17845320bcbSPaul Mackerras2: 17945320bcbSPaul Mackerras lwz r8,0(r5) 18045320bcbSPaul Mackerras lwz r9,4(r5) 18145320bcbSPaul Mackerras lwz r10,8(r5) 18245320bcbSPaul Mackerras lwz r11,12(r5) 18345320bcbSPaul Mackerras addi r5,r5,16 18445320bcbSPaul Mackerras stw r8,0(r6) 18545320bcbSPaul Mackerras stw r9,4(r6) 18645320bcbSPaul Mackerras stw r10,8(r6) 18745320bcbSPaul Mackerras stw r11,12(r6) 18845320bcbSPaul Mackerras addi r6,r6,16 18945320bcbSPaul Mackerras bdnz 2b 19045320bcbSPaul Mackerras lwz r10,pbe_next(r3) 19145320bcbSPaul Mackerras cmpwi 0,r10,0 19245320bcbSPaul Mackerras bne 1b 19345320bcbSPaul Mackerras 19445320bcbSPaul Mackerras /* Do a very simple cache flush/inval of the L1 to ensure 19545320bcbSPaul Mackerras * coherency of the icache 19645320bcbSPaul Mackerras */ 19745320bcbSPaul Mackerras lis r3,0x0002 19845320bcbSPaul Mackerras mtctr r3 19945320bcbSPaul Mackerras li r3, 0 20045320bcbSPaul Mackerras1: 20145320bcbSPaul Mackerras lwz r0,0(r3) 20245320bcbSPaul Mackerras addi r3,r3,0x0020 20345320bcbSPaul Mackerras bdnz 1b 20445320bcbSPaul Mackerras isync 20545320bcbSPaul Mackerras sync 20645320bcbSPaul Mackerras 20745320bcbSPaul Mackerras /* Now flush those cache lines */ 20845320bcbSPaul Mackerras lis r3,0x0002 20945320bcbSPaul Mackerras mtctr r3 21045320bcbSPaul Mackerras li r3, 0 21145320bcbSPaul Mackerras1: 21245320bcbSPaul Mackerras dcbf 0,r3 21345320bcbSPaul Mackerras addi r3,r3,0x0020 21445320bcbSPaul Mackerras bdnz 1b 21545320bcbSPaul Mackerras sync 21645320bcbSPaul Mackerras 21745320bcbSPaul Mackerras /* Ok, we are now running with the kernel data of the old 21845320bcbSPaul Mackerras * kernel fully restored. We can get to the save area 21945320bcbSPaul Mackerras * easily now. As for the rest of the code, it assumes the 22045320bcbSPaul Mackerras * loader kernel and the booted one are exactly identical 22145320bcbSPaul Mackerras */ 22245320bcbSPaul Mackerras lis r11,swsusp_save_area@h 22345320bcbSPaul Mackerras ori r11,r11,swsusp_save_area@l 22445320bcbSPaul Mackerras tophys(r11,r11) 22545320bcbSPaul Mackerras 22645320bcbSPaul Mackerras#if 0 22745320bcbSPaul Mackerras /* Restore various CPU config stuffs */ 22845320bcbSPaul Mackerras bl __restore_cpu_setup 22945320bcbSPaul Mackerras#endif 23045320bcbSPaul Mackerras /* Restore the BATs, and SDR1. Then we can turn on the MMU. 23145320bcbSPaul Mackerras * This is a bit hairy as we are running out of those BATs, 23245320bcbSPaul Mackerras * but first, our code is probably in the icache, and we are 23345320bcbSPaul Mackerras * writing the same value to the BAT, so that should be fine, 23445320bcbSPaul Mackerras * though a better solution will have to be found long-term 23545320bcbSPaul Mackerras */ 23645320bcbSPaul Mackerras lwz r4,SL_SDR1(r11) 23745320bcbSPaul Mackerras mtsdr1 r4 23845320bcbSPaul Mackerras lwz r4,SL_SPRG0(r11) 23945320bcbSPaul Mackerras mtsprg 0,r4 24045320bcbSPaul Mackerras lwz r4,SL_SPRG0+4(r11) 24145320bcbSPaul Mackerras mtsprg 1,r4 24245320bcbSPaul Mackerras lwz r4,SL_SPRG0+8(r11) 24345320bcbSPaul Mackerras mtsprg 2,r4 24445320bcbSPaul Mackerras lwz r4,SL_SPRG0+12(r11) 24545320bcbSPaul Mackerras mtsprg 3,r4 24645320bcbSPaul Mackerras 24745320bcbSPaul Mackerras#if 0 24845320bcbSPaul Mackerras lwz r4,SL_DBAT0(r11) 24945320bcbSPaul Mackerras mtdbatu 0,r4 25045320bcbSPaul Mackerras lwz r4,SL_DBAT0+4(r11) 25145320bcbSPaul Mackerras mtdbatl 0,r4 25245320bcbSPaul Mackerras lwz r4,SL_DBAT1(r11) 25345320bcbSPaul Mackerras mtdbatu 1,r4 25445320bcbSPaul Mackerras lwz r4,SL_DBAT1+4(r11) 25545320bcbSPaul Mackerras mtdbatl 1,r4 25645320bcbSPaul Mackerras lwz r4,SL_DBAT2(r11) 25745320bcbSPaul Mackerras mtdbatu 2,r4 25845320bcbSPaul Mackerras lwz r4,SL_DBAT2+4(r11) 25945320bcbSPaul Mackerras mtdbatl 2,r4 26045320bcbSPaul Mackerras lwz r4,SL_DBAT3(r11) 26145320bcbSPaul Mackerras mtdbatu 3,r4 26245320bcbSPaul Mackerras lwz r4,SL_DBAT3+4(r11) 26345320bcbSPaul Mackerras mtdbatl 3,r4 26445320bcbSPaul Mackerras lwz r4,SL_IBAT0(r11) 26545320bcbSPaul Mackerras mtibatu 0,r4 26645320bcbSPaul Mackerras lwz r4,SL_IBAT0+4(r11) 26745320bcbSPaul Mackerras mtibatl 0,r4 26845320bcbSPaul Mackerras lwz r4,SL_IBAT1(r11) 26945320bcbSPaul Mackerras mtibatu 1,r4 27045320bcbSPaul Mackerras lwz r4,SL_IBAT1+4(r11) 27145320bcbSPaul Mackerras mtibatl 1,r4 27245320bcbSPaul Mackerras lwz r4,SL_IBAT2(r11) 27345320bcbSPaul Mackerras mtibatu 2,r4 27445320bcbSPaul Mackerras lwz r4,SL_IBAT2+4(r11) 27545320bcbSPaul Mackerras mtibatl 2,r4 27645320bcbSPaul Mackerras lwz r4,SL_IBAT3(r11) 27745320bcbSPaul Mackerras mtibatu 3,r4 27845320bcbSPaul Mackerras lwz r4,SL_IBAT3+4(r11) 27945320bcbSPaul Mackerras mtibatl 3,r4 28045320bcbSPaul Mackerras#endif 28145320bcbSPaul Mackerras 2827c03d653SBenjamin HerrenschmidtBEGIN_MMU_FTR_SECTION 28345320bcbSPaul Mackerras li r4,0 28445320bcbSPaul Mackerras mtspr SPRN_DBAT4U,r4 28545320bcbSPaul Mackerras mtspr SPRN_DBAT4L,r4 28645320bcbSPaul Mackerras mtspr SPRN_DBAT5U,r4 28745320bcbSPaul Mackerras mtspr SPRN_DBAT5L,r4 28845320bcbSPaul Mackerras mtspr SPRN_DBAT6U,r4 28945320bcbSPaul Mackerras mtspr SPRN_DBAT6L,r4 29045320bcbSPaul Mackerras mtspr SPRN_DBAT7U,r4 29145320bcbSPaul Mackerras mtspr SPRN_DBAT7L,r4 29245320bcbSPaul Mackerras mtspr SPRN_IBAT4U,r4 29345320bcbSPaul Mackerras mtspr SPRN_IBAT4L,r4 29445320bcbSPaul Mackerras mtspr SPRN_IBAT5U,r4 29545320bcbSPaul Mackerras mtspr SPRN_IBAT5L,r4 29645320bcbSPaul Mackerras mtspr SPRN_IBAT6U,r4 29745320bcbSPaul Mackerras mtspr SPRN_IBAT6L,r4 29845320bcbSPaul Mackerras mtspr SPRN_IBAT7U,r4 29945320bcbSPaul Mackerras mtspr SPRN_IBAT7L,r4 3007c03d653SBenjamin HerrenschmidtEND_MMU_FTR_SECTION_IFSET(MMU_FTR_USE_HIGH_BATS) 30145320bcbSPaul Mackerras 30245320bcbSPaul Mackerras /* Flush all TLBs */ 30345320bcbSPaul Mackerras lis r4,0x1000 30445320bcbSPaul Mackerras1: addic. r4,r4,-0x1000 30545320bcbSPaul Mackerras tlbie r4 306e443ed35SAnton Vorontsov bgt 1b 30745320bcbSPaul Mackerras sync 30845320bcbSPaul Mackerras 30945320bcbSPaul Mackerras /* restore the MSR and turn on the MMU */ 31045320bcbSPaul Mackerras lwz r3,SL_MSR(r11) 31145320bcbSPaul Mackerras bl turn_on_mmu 31245320bcbSPaul Mackerras tovirt(r11,r11) 31345320bcbSPaul Mackerras 31445320bcbSPaul Mackerras /* Restore TB */ 31545320bcbSPaul Mackerras li r3,0 31645320bcbSPaul Mackerras mttbl r3 31745320bcbSPaul Mackerras lwz r3,SL_TB(r11) 31845320bcbSPaul Mackerras lwz r4,SL_TB+4(r11) 31945320bcbSPaul Mackerras mttbu r3 32045320bcbSPaul Mackerras mttbl r4 32145320bcbSPaul Mackerras 32245320bcbSPaul Mackerras /* Kick decrementer */ 32345320bcbSPaul Mackerras li r0,1 32445320bcbSPaul Mackerras mtdec r0 32545320bcbSPaul Mackerras 32645320bcbSPaul Mackerras /* Restore the callee-saved registers and return */ 32745320bcbSPaul Mackerras lwz r0,SL_CR(r11) 32845320bcbSPaul Mackerras mtcr r0 32945320bcbSPaul Mackerras lwz r2,SL_R2(r11) 33045320bcbSPaul Mackerras lmw r12,SL_R12(r11) 33145320bcbSPaul Mackerras lwz r1,SL_SP(r11) 33245320bcbSPaul Mackerras lwz r0,SL_LR(r11) 33345320bcbSPaul Mackerras mtlr r0 33445320bcbSPaul Mackerras 33545320bcbSPaul Mackerras // XXX Note: we don't really need to call swsusp_resume 33645320bcbSPaul Mackerras 33745320bcbSPaul Mackerras li r3,0 33845320bcbSPaul Mackerras blr 33945320bcbSPaul Mackerras 34045320bcbSPaul Mackerras/* FIXME:This construct is actually not useful since we don't shut 34145320bcbSPaul Mackerras * down the instruction MMU, we could just flip back MSR-DR on. 34245320bcbSPaul Mackerras */ 34345320bcbSPaul Mackerrasturn_on_mmu: 34445320bcbSPaul Mackerras mflr r4 34545320bcbSPaul Mackerras mtsrr0 r4 34645320bcbSPaul Mackerras mtsrr1 r3 34745320bcbSPaul Mackerras sync 34845320bcbSPaul Mackerras isync 34945320bcbSPaul Mackerras rfi 35045320bcbSPaul Mackerras 351