1 /* SPDX-License-Identifier: MIT */
2 /*
3  * Copyright © 2022 Intel Corporation
4  */
5 
6 #ifndef __I915_REG_DEFS__
7 #define __I915_REG_DEFS__
8 
9 #include <linux/bitfield.h>
10 #include <linux/bits.h>
11 
12 /**
13  * REG_BIT() - Prepare a u32 bit value
14  * @__n: 0-based bit number
15  *
16  * Local wrapper for BIT() to force u32, with compile time checks.
17  *
18  * @return: Value with bit @__n set.
19  */
20 #define REG_BIT(__n)							\
21 	((u32)(BIT(__n) +						\
22 	       BUILD_BUG_ON_ZERO(__is_constexpr(__n) &&		\
23 				 ((__n) < 0 || (__n) > 31))))
24 
25 /**
26  * REG_GENMASK() - Prepare a continuous u32 bitmask
27  * @__high: 0-based high bit
28  * @__low: 0-based low bit
29  *
30  * Local wrapper for GENMASK() to force u32, with compile time checks.
31  *
32  * @return: Continuous bitmask from @__high to @__low, inclusive.
33  */
34 #define REG_GENMASK(__high, __low)					\
35 	((u32)(GENMASK(__high, __low) +					\
36 	       BUILD_BUG_ON_ZERO(__is_constexpr(__high) &&	\
37 				 __is_constexpr(__low) &&		\
38 				 ((__low) < 0 || (__high) > 31 || (__low) > (__high)))))
39 
40 /**
41  * REG_GENMASK64() - Prepare a continuous u64 bitmask
42  * @__high: 0-based high bit
43  * @__low: 0-based low bit
44  *
45  * Local wrapper for GENMASK_ULL() to force u64, with compile time checks.
46  *
47  * @return: Continuous bitmask from @__high to @__low, inclusive.
48  */
49 #define REG_GENMASK64(__high, __low)					\
50 	((u64)(GENMASK_ULL(__high, __low) +				\
51 	       BUILD_BUG_ON_ZERO(__is_constexpr(__high) &&		\
52 				 __is_constexpr(__low) &&		\
53 				 ((__low) < 0 || (__high) > 63 || (__low) > (__high)))))
54 
55 /*
56  * Local integer constant expression version of is_power_of_2().
57  */
58 #define IS_POWER_OF_2(__x)		((__x) && (((__x) & ((__x) - 1)) == 0))
59 
60 /**
61  * REG_FIELD_PREP() - Prepare a u32 bitfield value
62  * @__mask: shifted mask defining the field's length and position
63  * @__val: value to put in the field
64  *
65  * Local copy of FIELD_PREP() to generate an integer constant expression, force
66  * u32 and for consistency with REG_FIELD_GET(), REG_BIT() and REG_GENMASK().
67  *
68  * @return: @__val masked and shifted into the field defined by @__mask.
69  */
70 #define REG_FIELD_PREP(__mask, __val)						\
71 	((u32)((((typeof(__mask))(__val) << __bf_shf(__mask)) & (__mask)) +	\
72 	       BUILD_BUG_ON_ZERO(!__is_constexpr(__mask)) +		\
73 	       BUILD_BUG_ON_ZERO((__mask) == 0 || (__mask) > U32_MAX) +		\
74 	       BUILD_BUG_ON_ZERO(!IS_POWER_OF_2((__mask) + (1ULL << __bf_shf(__mask)))) + \
75 	       BUILD_BUG_ON_ZERO(__builtin_choose_expr(__is_constexpr(__val), (~((__mask) >> __bf_shf(__mask)) & (__val)), 0))))
76 
77 /**
78  * REG_FIELD_GET() - Extract a u32 bitfield value
79  * @__mask: shifted mask defining the field's length and position
80  * @__val: value to extract the bitfield value from
81  *
82  * Local wrapper for FIELD_GET() to force u32 and for consistency with
83  * REG_FIELD_PREP(), REG_BIT() and REG_GENMASK().
84  *
85  * @return: Masked and shifted value of the field defined by @__mask in @__val.
86  */
87 #define REG_FIELD_GET(__mask, __val)	((u32)FIELD_GET(__mask, __val))
88 
89 /**
90  * REG_FIELD_GET64() - Extract a u64 bitfield value
91  * @__mask: shifted mask defining the field's length and position
92  * @__val: value to extract the bitfield value from
93  *
94  * Local wrapper for FIELD_GET() to force u64 and for consistency with
95  * REG_GENMASK64().
96  *
97  * @return: Masked and shifted value of the field defined by @__mask in @__val.
98  */
99 #define REG_FIELD_GET64(__mask, __val)	((u64)FIELD_GET(__mask, __val))
100 
101 #define __MASKED_FIELD(mask, value) ((mask) << 16 | (value))
102 #define _MASKED_FIELD(mask, value) ({					   \
103 	if (__builtin_constant_p(mask))					   \
104 		BUILD_BUG_ON_MSG(((mask) & 0xffff0000), "Incorrect mask"); \
105 	if (__builtin_constant_p(value))				   \
106 		BUILD_BUG_ON_MSG((value) & 0xffff0000, "Incorrect value"); \
107 	if (__builtin_constant_p(mask) && __builtin_constant_p(value))	   \
108 		BUILD_BUG_ON_MSG((value) & ~(mask),			   \
109 				 "Incorrect value for mask");		   \
110 	__MASKED_FIELD(mask, value); })
111 #define _MASKED_BIT_ENABLE(a)	({ typeof(a) _a = (a); _MASKED_FIELD(_a, _a); })
112 #define _MASKED_BIT_DISABLE(a)	(_MASKED_FIELD((a), 0))
113 
114 /*
115  * Given the first two numbers __a and __b of arbitrarily many evenly spaced
116  * numbers, pick the 0-based __index'th value.
117  *
118  * Always prefer this over _PICK() if the numbers are evenly spaced.
119  */
120 #define _PICK_EVEN(__index, __a, __b) ((__a) + (__index) * ((__b) - (__a)))
121 
122 /*
123  * Given the arbitrary numbers in varargs, pick the 0-based __index'th number.
124  *
125  * Always prefer _PICK_EVEN() over this if the numbers are evenly spaced.
126  */
127 #define _PICK(__index, ...) (((const u32 []){ __VA_ARGS__ })[__index])
128 
129 typedef struct {
130 	u32 reg;
131 } i915_reg_t;
132 
133 #define _MMIO(r) ((const i915_reg_t){ .reg = (r) })
134 
135 typedef struct {
136 	u32 reg;
137 } i915_mcr_reg_t;
138 
139 #define INVALID_MMIO_REG _MMIO(0)
140 
141 /*
142  * These macros can be used on either i915_reg_t or i915_mcr_reg_t since they're
143  * simply operations on the register's offset and don't care about the MCR vs
144  * non-MCR nature of the register.
145  */
146 #define i915_mmio_reg_offset(r) \
147 	_Generic((r), i915_reg_t: (r).reg, i915_mcr_reg_t: (r).reg)
148 #define i915_mmio_reg_equal(a, b) (i915_mmio_reg_offset(a) == i915_mmio_reg_offset(b))
149 #define i915_mmio_reg_valid(r) (!i915_mmio_reg_equal(r, INVALID_MMIO_REG))
150 
151 #endif /* __I915_REG_DEFS__ */
152