1 /* SPDX-License-Identifier: GPL-2.0 */ 2 /* 3 * include/asm-alpha/processor.h 4 * 5 * Copyright (C) 1994 Linus Torvalds 6 */ 7 8 #ifndef __ASM_ALPHA_PROCESSOR_H 9 #define __ASM_ALPHA_PROCESSOR_H 10 11 #include <linux/personality.h> /* for ADDR_LIMIT_32BIT */ 12 13 /* 14 * Returns current instruction pointer ("program counter"). 15 */ 16 #define current_text_addr() \ 17 ({ void *__pc; __asm__ ("br %0,.+4" : "=r"(__pc)); __pc; }) 18 19 /* 20 * We have a 42-bit user address space: 4TB user VM... 21 */ 22 #define TASK_SIZE (0x40000000000UL) 23 24 #define STACK_TOP \ 25 (current->personality & ADDR_LIMIT_32BIT ? 0x80000000 : 0x00120000000UL) 26 27 #define STACK_TOP_MAX 0x00120000000UL 28 29 /* This decides where the kernel will search for a free chunk of vm 30 * space during mmap's. 31 */ 32 #define TASK_UNMAPPED_BASE \ 33 ((current->personality & ADDR_LIMIT_32BIT) ? 0x40000000 : TASK_SIZE / 2) 34 35 typedef struct { 36 unsigned long seg; 37 } mm_segment_t; 38 39 /* This is dead. Everything has been moved to thread_info. */ 40 struct thread_struct { }; 41 #define INIT_THREAD { } 42 43 /* Return saved PC of a blocked thread. */ 44 struct task_struct; 45 extern unsigned long thread_saved_pc(struct task_struct *); 46 47 /* Do necessary setup to start up a newly executed thread. */ 48 struct pt_regs; 49 extern void start_thread(struct pt_regs *, unsigned long, unsigned long); 50 51 /* Free all resources held by a thread. */ 52 extern void release_thread(struct task_struct *); 53 54 unsigned long get_wchan(struct task_struct *p); 55 56 #define KSTK_EIP(tsk) (task_pt_regs(tsk)->pc) 57 58 #define KSTK_ESP(tsk) \ 59 ((tsk) == current ? rdusp() : task_thread_info(tsk)->pcb.usp) 60 61 #define cpu_relax() barrier() 62 63 #define ARCH_HAS_PREFETCH 64 #define ARCH_HAS_PREFETCHW 65 #define ARCH_HAS_SPINLOCK_PREFETCH 66 67 #ifndef CONFIG_SMP 68 /* Nothing to prefetch. */ 69 #define spin_lock_prefetch(lock) do { } while (0) 70 #endif 71 72 extern inline void prefetch(const void *ptr) 73 { 74 __builtin_prefetch(ptr, 0, 3); 75 } 76 77 extern inline void prefetchw(const void *ptr) 78 { 79 __builtin_prefetch(ptr, 1, 3); 80 } 81 82 #ifdef CONFIG_SMP 83 extern inline void spin_lock_prefetch(const void *ptr) 84 { 85 __builtin_prefetch(ptr, 1, 3); 86 } 87 #endif 88 89 #endif /* __ASM_ALPHA_PROCESSOR_H */ 90