18a23fdecSChristophe Leroy /* SPDX-License-Identifier: GPL-2.0 */ 28a23fdecSChristophe Leroy #ifndef __HEAD_32_H__ 38a23fdecSChristophe Leroy #define __HEAD_32_H__ 48a23fdecSChristophe Leroy 58a23fdecSChristophe Leroy #include <asm/ptrace.h> /* for STACK_FRAME_REGS_MARKER */ 68a23fdecSChristophe Leroy 78a23fdecSChristophe Leroy /* 88a23fdecSChristophe Leroy * Exception entry code. This code runs with address translation 98a23fdecSChristophe Leroy * turned off, i.e. using physical addresses. 108a23fdecSChristophe Leroy * We assume sprg3 has the physical address of the current 118a23fdecSChristophe Leroy * task's thread_struct. 128a23fdecSChristophe Leroy */ 1302847487SChristophe Leroy .macro EXCEPTION_PROLOG handle_dar_dsisr=0 1402847487SChristophe Leroy EXCEPTION_PROLOG_0 handle_dar_dsisr=\handle_dar_dsisr 151f1c4d01SChristophe Leroy EXCEPTION_PROLOG_1 1602847487SChristophe Leroy EXCEPTION_PROLOG_2 handle_dar_dsisr=\handle_dar_dsisr 171f1c4d01SChristophe Leroy .endm 181f1c4d01SChristophe Leroy 1902847487SChristophe Leroy .macro EXCEPTION_PROLOG_0 handle_dar_dsisr=0 208a23fdecSChristophe Leroy mtspr SPRN_SPRG_SCRATCH0,r10 218a23fdecSChristophe Leroy mtspr SPRN_SPRG_SCRATCH1,r11 2202847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 2302847487SChristophe Leroy mfspr r10, SPRN_SPRG_THREAD 2402847487SChristophe Leroy .if \handle_dar_dsisr 2502847487SChristophe Leroy mfspr r11, SPRN_DAR 2602847487SChristophe Leroy stw r11, DAR(r10) 2702847487SChristophe Leroy mfspr r11, SPRN_DSISR 2802847487SChristophe Leroy stw r11, DSISR(r10) 2902847487SChristophe Leroy .endif 3002847487SChristophe Leroy mfspr r11, SPRN_SRR0 3102847487SChristophe Leroy stw r11, SRR0(r10) 3202847487SChristophe Leroy #endif 335ae8fabcSChristophe Leroy mfspr r11, SPRN_SRR1 /* check whether user or kernel */ 3402847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 3502847487SChristophe Leroy stw r11, SRR1(r10) 3602847487SChristophe Leroy #endif 378a23fdecSChristophe Leroy mfcr r10 385ae8fabcSChristophe Leroy andi. r11, r11, MSR_PR 398a23fdecSChristophe Leroy .endm 408a23fdecSChristophe Leroy 41cd08f109SChristophe Leroy .macro EXCEPTION_PROLOG_1 for_rtas=0 42*da7bb43aSChristophe Leroy #ifdef CONFIG_VMAP_STACK 43*da7bb43aSChristophe Leroy mr r11, r1 44*da7bb43aSChristophe Leroy subi r1, r1, INT_FRAME_SIZE /* use r1 if kernel */ 45*da7bb43aSChristophe Leroy beq 1f 46*da7bb43aSChristophe Leroy mfspr r1,SPRN_SPRG_THREAD 47*da7bb43aSChristophe Leroy lwz r1,TASK_STACK-THREAD(r1) 48*da7bb43aSChristophe Leroy addi r1, r1, THREAD_SIZE - INT_FRAME_SIZE 49*da7bb43aSChristophe Leroy #else 5002847487SChristophe Leroy subi r11, r1, INT_FRAME_SIZE /* use r1 if kernel */ 518a23fdecSChristophe Leroy beq 1f 528a23fdecSChristophe Leroy mfspr r11,SPRN_SPRG_THREAD 538a23fdecSChristophe Leroy lwz r11,TASK_STACK-THREAD(r11) 5402847487SChristophe Leroy addi r11, r11, THREAD_SIZE - INT_FRAME_SIZE 55*da7bb43aSChristophe Leroy #endif 5602847487SChristophe Leroy 1: 57c118c730SChristophe Leroy tophys_novmstack r11, r11 583978eb78SChristophe Leroy #ifdef CONFIG_VMAP_STACK 59*da7bb43aSChristophe Leroy mtcrf 0x7f, r1 603978eb78SChristophe Leroy bt 32 - THREAD_ALIGN_SHIFT, stack_overflow 613978eb78SChristophe Leroy #endif 628a23fdecSChristophe Leroy .endm 638a23fdecSChristophe Leroy 6402847487SChristophe Leroy .macro EXCEPTION_PROLOG_2 handle_dar_dsisr=0 65c118c730SChristophe Leroy #ifdef CONFIG_VMAP_STACK 66232ca1eeSChristophe Leroy mtcr r10 67c118c730SChristophe Leroy li r10, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */ 68c118c730SChristophe Leroy mtmsr r10 69c118c730SChristophe Leroy isync 70232ca1eeSChristophe Leroy #else 718a23fdecSChristophe Leroy stw r10,_CCR(r11) /* save registers */ 72232ca1eeSChristophe Leroy #endif 73232ca1eeSChristophe Leroy mfspr r10, SPRN_SPRG_SCRATCH0 74*da7bb43aSChristophe Leroy #ifdef CONFIG_VMAP_STACK 75*da7bb43aSChristophe Leroy stw r11,GPR1(r1) 76*da7bb43aSChristophe Leroy stw r11,0(r1) 77*da7bb43aSChristophe Leroy mr r11, r1 78*da7bb43aSChristophe Leroy #else 79*da7bb43aSChristophe Leroy stw r1,GPR1(r11) 80*da7bb43aSChristophe Leroy stw r1,0(r11) 81*da7bb43aSChristophe Leroy tovirt(r1, r11) /* set new kernel sp */ 82*da7bb43aSChristophe Leroy #endif 838a23fdecSChristophe Leroy stw r12,GPR12(r11) 848a23fdecSChristophe Leroy stw r9,GPR9(r11) 858a23fdecSChristophe Leroy stw r10,GPR10(r11) 86c118c730SChristophe Leroy #ifdef CONFIG_VMAP_STACK 87232ca1eeSChristophe Leroy mfcr r10 88232ca1eeSChristophe Leroy stw r10, _CCR(r11) 89232ca1eeSChristophe Leroy #endif 908a23fdecSChristophe Leroy mfspr r12,SPRN_SPRG_SCRATCH1 918a23fdecSChristophe Leroy stw r12,GPR11(r11) 928a23fdecSChristophe Leroy mflr r10 938a23fdecSChristophe Leroy stw r10,_LINK(r11) 9402847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 9502847487SChristophe Leroy mfspr r12, SPRN_SPRG_THREAD 9602847487SChristophe Leroy tovirt(r12, r12) 9702847487SChristophe Leroy .if \handle_dar_dsisr 9802847487SChristophe Leroy lwz r10, DAR(r12) 9902847487SChristophe Leroy stw r10, _DAR(r11) 10002847487SChristophe Leroy lwz r10, DSISR(r12) 10102847487SChristophe Leroy stw r10, _DSISR(r11) 10202847487SChristophe Leroy .endif 10302847487SChristophe Leroy lwz r9, SRR1(r12) 104232ca1eeSChristophe Leroy andi. r10, r9, MSR_PR 10502847487SChristophe Leroy lwz r12, SRR0(r12) 10602847487SChristophe Leroy #else 1078a23fdecSChristophe Leroy mfspr r12,SPRN_SRR0 1088a23fdecSChristophe Leroy mfspr r9,SPRN_SRR1 10902847487SChristophe Leroy #endif 11090f204b9SChristophe Leroy #ifdef CONFIG_40x 11190f204b9SChristophe Leroy rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */ 11290f204b9SChristophe Leroy #else 11302847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 11402847487SChristophe Leroy li r10, MSR_KERNEL & ~MSR_IR /* can take exceptions */ 11502847487SChristophe Leroy #else 1168a23fdecSChristophe Leroy li r10,MSR_KERNEL & ~(MSR_IR|MSR_DR) /* can take exceptions */ 11702847487SChristophe Leroy #endif 11839bccfd1SChristophe Leroy mtmsr r10 /* (except for mach check in rtas) */ 11990f204b9SChristophe Leroy #endif 1208a23fdecSChristophe Leroy stw r0,GPR0(r11) 1218a23fdecSChristophe Leroy lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */ 1228a23fdecSChristophe Leroy addi r10,r10,STACK_FRAME_REGS_MARKER@l 1238a23fdecSChristophe Leroy stw r10,8(r11) 1248a23fdecSChristophe Leroy SAVE_4GPRS(3, r11) 1258a23fdecSChristophe Leroy SAVE_2GPRS(7, r11) 1268a23fdecSChristophe Leroy .endm 1278a23fdecSChristophe Leroy 128b86fb888SChristophe Leroy .macro SYSCALL_ENTRY trapno 129b86fb888SChristophe Leroy mfspr r12,SPRN_SPRG_THREAD 1309e270862SChristophe Leroy mfspr r9, SPRN_SRR1 13102847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 1329e270862SChristophe Leroy mfspr r11, SPRN_SRR0 133c06f0affSChristophe Leroy mtctr r11 13402847487SChristophe Leroy #endif 1359e270862SChristophe Leroy andi. r11, r9, MSR_PR 136b86fb888SChristophe Leroy lwz r11,TASK_STACK-THREAD(r12) 1379e270862SChristophe Leroy beq- 99f 13802847487SChristophe Leroy addi r11, r11, THREAD_SIZE - INT_FRAME_SIZE 13902847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 140c06f0affSChristophe Leroy li r10, MSR_KERNEL & ~(MSR_IR | MSR_RI) /* can take DTLB miss */ 141c06f0affSChristophe Leroy mtmsr r10 14202847487SChristophe Leroy isync 14302847487SChristophe Leroy #endif 14402847487SChristophe Leroy tovirt_vmstack r12, r12 14502847487SChristophe Leroy tophys_novmstack r11, r11 1469e270862SChristophe Leroy mflr r10 1479e270862SChristophe Leroy stw r10, _LINK(r11) 14802847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 149c06f0affSChristophe Leroy mfctr r10 15002847487SChristophe Leroy #else 15102847487SChristophe Leroy mfspr r10,SPRN_SRR0 15202847487SChristophe Leroy #endif 153b86fb888SChristophe Leroy stw r1,GPR1(r11) 154b86fb888SChristophe Leroy stw r1,0(r11) 15502847487SChristophe Leroy tovirt_novmstack r1, r11 /* set new kernel sp */ 156b86fb888SChristophe Leroy stw r10,_NIP(r11) 157c06f0affSChristophe Leroy mfcr r10 158c06f0affSChristophe Leroy rlwinm r10,r10,0,4,2 /* Clear SO bit in CR */ 159c06f0affSChristophe Leroy stw r10,_CCR(r11) /* save registers */ 160b86fb888SChristophe Leroy #ifdef CONFIG_40x 161b86fb888SChristophe Leroy rlwinm r9,r9,0,14,12 /* clear MSR_WE (necessary?) */ 162b86fb888SChristophe Leroy #else 16302847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 16402847487SChristophe Leroy LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~MSR_IR) /* can take exceptions */ 16502847487SChristophe Leroy #else 166ba18025fSChristophe Leroy LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR|MSR_DR)) /* can take exceptions */ 16702847487SChristophe Leroy #endif 16839bccfd1SChristophe Leroy mtmsr r10 /* (except for mach check in rtas) */ 169b86fb888SChristophe Leroy #endif 170b86fb888SChristophe Leroy lis r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */ 171b86fb888SChristophe Leroy stw r2,GPR2(r11) 172b86fb888SChristophe Leroy addi r10,r10,STACK_FRAME_REGS_MARKER@l 173b86fb888SChristophe Leroy stw r9,_MSR(r11) 174b86fb888SChristophe Leroy li r2, \trapno + 1 175b86fb888SChristophe Leroy stw r10,8(r11) 176b86fb888SChristophe Leroy stw r2,_TRAP(r11) 177b86fb888SChristophe Leroy SAVE_GPR(0, r11) 178b86fb888SChristophe Leroy SAVE_4GPRS(3, r11) 179b86fb888SChristophe Leroy SAVE_2GPRS(7, r11) 180b86fb888SChristophe Leroy addi r11,r1,STACK_FRAME_OVERHEAD 181b86fb888SChristophe Leroy addi r2,r12,-THREAD 182b86fb888SChristophe Leroy stw r11,PT_REGS(r12) 183b86fb888SChristophe Leroy #if defined(CONFIG_40x) 184b86fb888SChristophe Leroy /* Check to see if the dbcr0 register is set up to debug. Use the 185b86fb888SChristophe Leroy internal debug mode bit to do this. */ 186b86fb888SChristophe Leroy lwz r12,THREAD_DBCR0(r12) 187b86fb888SChristophe Leroy andis. r12,r12,DBCR0_IDM@h 188b86fb888SChristophe Leroy #endif 189b86fb888SChristophe Leroy ACCOUNT_CPU_USER_ENTRY(r2, r11, r12) 190b86fb888SChristophe Leroy #if defined(CONFIG_40x) 191b86fb888SChristophe Leroy beq+ 3f 192b86fb888SChristophe Leroy /* From user and task is ptraced - load up global dbcr0 */ 193b86fb888SChristophe Leroy li r12,-1 /* clear all pending debug events */ 194b86fb888SChristophe Leroy mtspr SPRN_DBSR,r12 195b86fb888SChristophe Leroy lis r11,global_dbcr0@ha 196b86fb888SChristophe Leroy tophys(r11,r11) 197b86fb888SChristophe Leroy addi r11,r11,global_dbcr0@l 198b86fb888SChristophe Leroy lwz r12,0(r11) 199b86fb888SChristophe Leroy mtspr SPRN_DBCR0,r12 200b86fb888SChristophe Leroy lwz r12,4(r11) 201b86fb888SChristophe Leroy addi r12,r12,-1 202b86fb888SChristophe Leroy stw r12,4(r11) 203b86fb888SChristophe Leroy #endif 204b86fb888SChristophe Leroy 205b86fb888SChristophe Leroy 3: 20602847487SChristophe Leroy tovirt_novmstack r2, r2 /* set r2 to current */ 207b86fb888SChristophe Leroy lis r11, transfer_to_syscall@h 208b86fb888SChristophe Leroy ori r11, r11, transfer_to_syscall@l 209b86fb888SChristophe Leroy #ifdef CONFIG_TRACE_IRQFLAGS 210b86fb888SChristophe Leroy /* 211b86fb888SChristophe Leroy * If MSR is changing we need to keep interrupts disabled at this point 212b86fb888SChristophe Leroy * otherwise we might risk taking an interrupt before we tell lockdep 213b86fb888SChristophe Leroy * they are enabled. 214b86fb888SChristophe Leroy */ 215ba18025fSChristophe Leroy LOAD_REG_IMMEDIATE(r10, MSR_KERNEL) 216b86fb888SChristophe Leroy rlwimi r10, r9, 0, MSR_EE 217b86fb888SChristophe Leroy #else 218ba18025fSChristophe Leroy LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE) 219b86fb888SChristophe Leroy #endif 220b86fb888SChristophe Leroy #if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS) 221b86fb888SChristophe Leroy mtspr SPRN_NRI, r0 222b86fb888SChristophe Leroy #endif 223b86fb888SChristophe Leroy mtspr SPRN_SRR1,r10 224b86fb888SChristophe Leroy mtspr SPRN_SRR0,r11 225b86fb888SChristophe Leroy SYNC 226b86fb888SChristophe Leroy RFI /* jump to handler, enable MMU */ 2279e270862SChristophe Leroy 99: b ret_from_kernel_syscall 228b86fb888SChristophe Leroy .endm 229b86fb888SChristophe Leroy 230c9c84fd9SChristophe Leroy .macro save_dar_dsisr_on_stack reg1, reg2, sp 23102847487SChristophe Leroy #ifndef CONFIG_VMAP_STACK 232c9c84fd9SChristophe Leroy mfspr \reg1, SPRN_DAR 233c9c84fd9SChristophe Leroy mfspr \reg2, SPRN_DSISR 234c9c84fd9SChristophe Leroy stw \reg1, _DAR(\sp) 235c9c84fd9SChristophe Leroy stw \reg2, _DSISR(\sp) 23602847487SChristophe Leroy #endif 237c9c84fd9SChristophe Leroy .endm 238c9c84fd9SChristophe Leroy 239c9c84fd9SChristophe Leroy .macro get_and_save_dar_dsisr_on_stack reg1, reg2, sp 24002847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 24102847487SChristophe Leroy lwz \reg1, _DAR(\sp) 24202847487SChristophe Leroy lwz \reg2, _DSISR(\sp) 24302847487SChristophe Leroy #else 244c9c84fd9SChristophe Leroy save_dar_dsisr_on_stack \reg1, \reg2, \sp 24502847487SChristophe Leroy #endif 24602847487SChristophe Leroy .endm 24702847487SChristophe Leroy 24802847487SChristophe Leroy .macro tovirt_vmstack dst, src 24902847487SChristophe Leroy #ifdef CONFIG_VMAP_STACK 25002847487SChristophe Leroy tovirt(\dst, \src) 25102847487SChristophe Leroy #else 25202847487SChristophe Leroy .ifnc \dst, \src 25302847487SChristophe Leroy mr \dst, \src 25402847487SChristophe Leroy .endif 25502847487SChristophe Leroy #endif 25602847487SChristophe Leroy .endm 25702847487SChristophe Leroy 25802847487SChristophe Leroy .macro tovirt_novmstack dst, src 25902847487SChristophe Leroy #ifndef CONFIG_VMAP_STACK 26002847487SChristophe Leroy tovirt(\dst, \src) 26102847487SChristophe Leroy #else 26202847487SChristophe Leroy .ifnc \dst, \src 26302847487SChristophe Leroy mr \dst, \src 26402847487SChristophe Leroy .endif 26502847487SChristophe Leroy #endif 26602847487SChristophe Leroy .endm 26702847487SChristophe Leroy 26802847487SChristophe Leroy .macro tophys_novmstack dst, src 26902847487SChristophe Leroy #ifndef CONFIG_VMAP_STACK 27002847487SChristophe Leroy tophys(\dst, \src) 27102847487SChristophe Leroy #else 27202847487SChristophe Leroy .ifnc \dst, \src 27302847487SChristophe Leroy mr \dst, \src 27402847487SChristophe Leroy .endif 27502847487SChristophe Leroy #endif 276c9c84fd9SChristophe Leroy .endm 277c9c84fd9SChristophe Leroy 2788a23fdecSChristophe Leroy /* 2798a23fdecSChristophe Leroy * Note: code which follows this uses cr0.eq (set if from kernel), 2808a23fdecSChristophe Leroy * r11, r12 (SRR0), and r9 (SRR1). 2818a23fdecSChristophe Leroy * 2828a23fdecSChristophe Leroy * Note2: once we have set r1 we are in a position to take exceptions 2838a23fdecSChristophe Leroy * again, and we could thus set MSR:RI at that point. 2848a23fdecSChristophe Leroy */ 2858a23fdecSChristophe Leroy 2868a23fdecSChristophe Leroy /* 2878a23fdecSChristophe Leroy * Exception vectors. 2888a23fdecSChristophe Leroy */ 2898a23fdecSChristophe Leroy #ifdef CONFIG_PPC_BOOK3S 2908a23fdecSChristophe Leroy #define START_EXCEPTION(n, label) \ 2918a23fdecSChristophe Leroy . = n; \ 2928a23fdecSChristophe Leroy DO_KVM n; \ 2938a23fdecSChristophe Leroy label: 2948a23fdecSChristophe Leroy 2958a23fdecSChristophe Leroy #else 2968a23fdecSChristophe Leroy #define START_EXCEPTION(n, label) \ 2978a23fdecSChristophe Leroy . = n; \ 2988a23fdecSChristophe Leroy label: 2998a23fdecSChristophe Leroy 3008a23fdecSChristophe Leroy #endif 3018a23fdecSChristophe Leroy 3028a23fdecSChristophe Leroy #define EXCEPTION(n, label, hdlr, xfer) \ 3038a23fdecSChristophe Leroy START_EXCEPTION(n, label) \ 3048a23fdecSChristophe Leroy EXCEPTION_PROLOG; \ 3058a23fdecSChristophe Leroy addi r3,r1,STACK_FRAME_OVERHEAD; \ 3068a23fdecSChristophe Leroy xfer(n, hdlr) 3078a23fdecSChristophe Leroy 3081ae99b4bSChristophe Leroy #define EXC_XFER_TEMPLATE(hdlr, trap, msr, tfer, ret) \ 3098a23fdecSChristophe Leroy li r10,trap; \ 3108a23fdecSChristophe Leroy stw r10,_TRAP(r11); \ 311ba18025fSChristophe Leroy LOAD_REG_IMMEDIATE(r10, msr); \ 3128a23fdecSChristophe Leroy bl tfer; \ 3138a23fdecSChristophe Leroy .long hdlr; \ 3148a23fdecSChristophe Leroy .long ret 3158a23fdecSChristophe Leroy 3168a23fdecSChristophe Leroy #define EXC_XFER_STD(n, hdlr) \ 3171ae99b4bSChristophe Leroy EXC_XFER_TEMPLATE(hdlr, n, MSR_KERNEL, transfer_to_handler_full, \ 3188a23fdecSChristophe Leroy ret_from_except_full) 3198a23fdecSChristophe Leroy 3208a23fdecSChristophe Leroy #define EXC_XFER_LITE(n, hdlr) \ 3211ae99b4bSChristophe Leroy EXC_XFER_TEMPLATE(hdlr, n+1, MSR_KERNEL, transfer_to_handler, \ 3228a23fdecSChristophe Leroy ret_from_except) 3238a23fdecSChristophe Leroy 3243978eb78SChristophe Leroy .macro vmap_stack_overflow_exception 3253978eb78SChristophe Leroy #ifdef CONFIG_VMAP_STACK 3263978eb78SChristophe Leroy #ifdef CONFIG_SMP 327*da7bb43aSChristophe Leroy mfspr r1, SPRN_SPRG_THREAD 328*da7bb43aSChristophe Leroy lwz r1, TASK_CPU - THREAD(r1) 329*da7bb43aSChristophe Leroy slwi r1, r1, 3 330*da7bb43aSChristophe Leroy addis r1, r1, emergency_ctx@ha 3313978eb78SChristophe Leroy #else 332*da7bb43aSChristophe Leroy lis r1, emergency_ctx@ha 3333978eb78SChristophe Leroy #endif 334*da7bb43aSChristophe Leroy lwz r1, emergency_ctx@l(r1) 335*da7bb43aSChristophe Leroy cmpwi cr1, r1, 0 3363978eb78SChristophe Leroy bne cr1, 1f 337*da7bb43aSChristophe Leroy lis r1, init_thread_union@ha 338*da7bb43aSChristophe Leroy addi r1, r1, init_thread_union@l 339*da7bb43aSChristophe Leroy 1: addi r1, r1, THREAD_SIZE - INT_FRAME_SIZE 3403978eb78SChristophe Leroy EXCEPTION_PROLOG_2 3413978eb78SChristophe Leroy SAVE_NVGPRS(r11) 3423978eb78SChristophe Leroy addi r3, r1, STACK_FRAME_OVERHEAD 3433978eb78SChristophe Leroy EXC_XFER_STD(0, stack_overflow_exception) 3443978eb78SChristophe Leroy #endif 3453978eb78SChristophe Leroy .endm 3463978eb78SChristophe Leroy 3478a23fdecSChristophe Leroy #endif /* __HEAD_32_H__ */ 348