xref: /openbmc/linux/arch/alpha/include/asm/processor.h (revision e2c75e76)
1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3  * include/asm-alpha/processor.h
4  *
5  * Copyright (C) 1994 Linus Torvalds
6  */
7 
8 #ifndef __ASM_ALPHA_PROCESSOR_H
9 #define __ASM_ALPHA_PROCESSOR_H
10 
11 #include <linux/personality.h>	/* for ADDR_LIMIT_32BIT */
12 
13 /*
14  * Returns current instruction pointer ("program counter").
15  */
16 #define current_text_addr() \
17   ({ void *__pc; __asm__ ("br %0,.+4" : "=r"(__pc)); __pc; })
18 
19 /*
20  * We have a 42-bit user address space: 4TB user VM...
21  */
22 #define TASK_SIZE (0x40000000000UL)
23 
24 #define STACK_TOP \
25   (current->personality & ADDR_LIMIT_32BIT ? 0x80000000 : 0x00120000000UL)
26 
27 #define STACK_TOP_MAX	0x00120000000UL
28 
29 /* This decides where the kernel will search for a free chunk of vm
30  * space during mmap's.
31  */
32 #define TASK_UNMAPPED_BASE \
33   ((current->personality & ADDR_LIMIT_32BIT) ? 0x40000000 : TASK_SIZE / 2)
34 
35 typedef struct {
36 	unsigned long seg;
37 } mm_segment_t;
38 
39 /* This is dead.  Everything has been moved to thread_info.  */
40 struct thread_struct { };
41 #define INIT_THREAD  { }
42 
43 /* Do necessary setup to start up a newly executed thread.  */
44 struct pt_regs;
45 extern void start_thread(struct pt_regs *, unsigned long, unsigned long);
46 
47 /* Free all resources held by a thread. */
48 struct task_struct;
49 extern void release_thread(struct task_struct *);
50 
51 unsigned long get_wchan(struct task_struct *p);
52 
53 #define KSTK_EIP(tsk) (task_pt_regs(tsk)->pc)
54 
55 #define KSTK_ESP(tsk) \
56   ((tsk) == current ? rdusp() : task_thread_info(tsk)->pcb.usp)
57 
58 #define cpu_relax()	barrier()
59 
60 #define ARCH_HAS_PREFETCH
61 #define ARCH_HAS_PREFETCHW
62 #define ARCH_HAS_SPINLOCK_PREFETCH
63 
64 #ifndef CONFIG_SMP
65 /* Nothing to prefetch. */
66 #define spin_lock_prefetch(lock)  	do { } while (0)
67 #endif
68 
69 extern inline void prefetch(const void *ptr)
70 {
71 	__builtin_prefetch(ptr, 0, 3);
72 }
73 
74 extern inline void prefetchw(const void *ptr)
75 {
76 	__builtin_prefetch(ptr, 1, 3);
77 }
78 
79 #ifdef CONFIG_SMP
80 extern inline void spin_lock_prefetch(const void *ptr)
81 {
82 	__builtin_prefetch(ptr, 1, 3);
83 }
84 #endif
85 
86 #endif /* __ASM_ALPHA_PROCESSOR_H */
87