xref: /openbmc/linux/arch/arm64/include/asm/irqflags.h (revision 4ee812f6)
1 /* SPDX-License-Identifier: GPL-2.0-only */
2 /*
3  * Copyright (C) 2012 ARM Ltd.
4  */
5 #ifndef __ASM_IRQFLAGS_H
6 #define __ASM_IRQFLAGS_H
7 
8 #include <asm/alternative.h>
9 #include <asm/ptrace.h>
10 #include <asm/sysreg.h>
11 
12 /*
13  * Aarch64 has flags for masking: Debug, Asynchronous (serror), Interrupts and
14  * FIQ exceptions, in the 'daif' register. We mask and unmask them in 'dai'
15  * order:
16  * Masking debug exceptions causes all other exceptions to be masked too/
17  * Masking SError masks irq, but not debug exceptions. Masking irqs has no
18  * side effects for other flags. Keeping to this order makes it easier for
19  * entry.S to know which exceptions should be unmasked.
20  *
21  * FIQ is never expected, but we mask it when we disable debug exceptions, and
22  * unmask it at all other times.
23  */
24 
25 /*
26  * CPU interrupt mask handling.
27  */
28 static inline void arch_local_irq_enable(void)
29 {
30 	if (system_has_prio_mask_debugging()) {
31 		u32 pmr = read_sysreg_s(SYS_ICC_PMR_EL1);
32 
33 		WARN_ON_ONCE(pmr != GIC_PRIO_IRQON && pmr != GIC_PRIO_IRQOFF);
34 	}
35 
36 	asm volatile(ALTERNATIVE(
37 		"msr	daifclr, #2		// arch_local_irq_enable\n"
38 		"nop",
39 		__msr_s(SYS_ICC_PMR_EL1, "%0")
40 		"dsb	sy",
41 		ARM64_HAS_IRQ_PRIO_MASKING)
42 		:
43 		: "r" ((unsigned long) GIC_PRIO_IRQON)
44 		: "memory");
45 }
46 
47 static inline void arch_local_irq_disable(void)
48 {
49 	if (system_has_prio_mask_debugging()) {
50 		u32 pmr = read_sysreg_s(SYS_ICC_PMR_EL1);
51 
52 		WARN_ON_ONCE(pmr != GIC_PRIO_IRQON && pmr != GIC_PRIO_IRQOFF);
53 	}
54 
55 	asm volatile(ALTERNATIVE(
56 		"msr	daifset, #2		// arch_local_irq_disable",
57 		__msr_s(SYS_ICC_PMR_EL1, "%0"),
58 		ARM64_HAS_IRQ_PRIO_MASKING)
59 		:
60 		: "r" ((unsigned long) GIC_PRIO_IRQOFF)
61 		: "memory");
62 }
63 
64 /*
65  * Save the current interrupt enable state.
66  */
67 static inline unsigned long arch_local_save_flags(void)
68 {
69 	unsigned long flags;
70 
71 	asm volatile(ALTERNATIVE(
72 		"mrs	%0, daif",
73 		__mrs_s("%0", SYS_ICC_PMR_EL1),
74 		ARM64_HAS_IRQ_PRIO_MASKING)
75 		: "=&r" (flags)
76 		:
77 		: "memory");
78 
79 	return flags;
80 }
81 
82 static inline int arch_irqs_disabled_flags(unsigned long flags)
83 {
84 	int res;
85 
86 	asm volatile(ALTERNATIVE(
87 		"and	%w0, %w1, #" __stringify(PSR_I_BIT),
88 		"eor	%w0, %w1, #" __stringify(GIC_PRIO_IRQON),
89 		ARM64_HAS_IRQ_PRIO_MASKING)
90 		: "=&r" (res)
91 		: "r" ((int) flags)
92 		: "memory");
93 
94 	return res;
95 }
96 
97 static inline unsigned long arch_local_irq_save(void)
98 {
99 	unsigned long flags;
100 
101 	flags = arch_local_save_flags();
102 
103 	/*
104 	 * There are too many states with IRQs disabled, just keep the current
105 	 * state if interrupts are already disabled/masked.
106 	 */
107 	if (!arch_irqs_disabled_flags(flags))
108 		arch_local_irq_disable();
109 
110 	return flags;
111 }
112 
113 /*
114  * restore saved IRQ state
115  */
116 static inline void arch_local_irq_restore(unsigned long flags)
117 {
118 	asm volatile(ALTERNATIVE(
119 			"msr	daif, %0\n"
120 			"nop",
121 			__msr_s(SYS_ICC_PMR_EL1, "%0")
122 			"dsb	sy",
123 			ARM64_HAS_IRQ_PRIO_MASKING)
124 		:
125 		: "r" (flags)
126 		: "memory");
127 }
128 
129 #endif /* __ASM_IRQFLAGS_H */
130