1 /* 2 * include/asm-alpha/processor.h 3 * 4 * Copyright (C) 1994 Linus Torvalds 5 */ 6 7 #ifndef __ASM_ALPHA_PROCESSOR_H 8 #define __ASM_ALPHA_PROCESSOR_H 9 10 #include <linux/personality.h> /* for ADDR_LIMIT_32BIT */ 11 12 /* 13 * Returns current instruction pointer ("program counter"). 14 */ 15 #define current_text_addr() \ 16 ({ void *__pc; __asm__ ("br %0,.+4" : "=r"(__pc)); __pc; }) 17 18 /* 19 * We have a 42-bit user address space: 4TB user VM... 20 */ 21 #define TASK_SIZE (0x40000000000UL) 22 23 #define STACK_TOP \ 24 (current->personality & ADDR_LIMIT_32BIT ? 0x80000000 : 0x00120000000UL) 25 26 #define STACK_TOP_MAX 0x00120000000UL 27 28 /* This decides where the kernel will search for a free chunk of vm 29 * space during mmap's. 30 */ 31 #define TASK_UNMAPPED_BASE \ 32 ((current->personality & ADDR_LIMIT_32BIT) ? 0x40000000 : TASK_SIZE / 2) 33 34 typedef struct { 35 unsigned long seg; 36 } mm_segment_t; 37 38 /* This is dead. Everything has been moved to thread_info. */ 39 struct thread_struct { }; 40 #define INIT_THREAD { } 41 42 /* Return saved PC of a blocked thread. */ 43 struct task_struct; 44 extern unsigned long thread_saved_pc(struct task_struct *); 45 46 /* Do necessary setup to start up a newly executed thread. */ 47 extern void start_thread(struct pt_regs *, unsigned long, unsigned long); 48 49 /* Free all resources held by a thread. */ 50 extern void release_thread(struct task_struct *); 51 52 /* Prepare to copy thread state - unlazy all lazy status */ 53 #define prepare_to_copy(tsk) do { } while (0) 54 55 /* Create a kernel thread without removing it from tasklists. */ 56 extern long kernel_thread(int (*fn)(void *), void *arg, unsigned long flags); 57 58 unsigned long get_wchan(struct task_struct *p); 59 60 #define KSTK_EIP(tsk) (task_pt_regs(tsk)->pc) 61 62 #define KSTK_ESP(tsk) \ 63 ((tsk) == current ? rdusp() : task_thread_info(tsk)->pcb.usp) 64 65 #define cpu_relax() barrier() 66 67 #define ARCH_HAS_PREFETCH 68 #define ARCH_HAS_PREFETCHW 69 #define ARCH_HAS_SPINLOCK_PREFETCH 70 71 #ifndef CONFIG_SMP 72 /* Nothing to prefetch. */ 73 #define spin_lock_prefetch(lock) do { } while (0) 74 #endif 75 76 extern inline void prefetch(const void *ptr) 77 { 78 __builtin_prefetch(ptr, 0, 3); 79 } 80 81 extern inline void prefetchw(const void *ptr) 82 { 83 __builtin_prefetch(ptr, 1, 3); 84 } 85 86 #ifdef CONFIG_SMP 87 extern inline void spin_lock_prefetch(const void *ptr) 88 { 89 __builtin_prefetch(ptr, 1, 3); 90 } 91 #endif 92 93 #endif /* __ASM_ALPHA_PROCESSOR_H */ 94