1 /*
2  * SPDX-License-Identifier: MIT
3  *
4  * Copyright � 2008-2018 Intel Corporation
5  */
6 
7 #ifndef _I915_GPU_ERROR_H_
8 #define _I915_GPU_ERROR_H_
9 
10 #include <linux/atomic.h>
11 #include <linux/kref.h>
12 #include <linux/ktime.h>
13 #include <linux/sched.h>
14 
15 #include <drm/drm_mm.h>
16 
17 #include "gt/intel_engine.h"
18 #include "gt/uc/intel_uc_fw.h"
19 
20 #include "intel_device_info.h"
21 
22 #include "i915_gem.h"
23 #include "i915_gem_gtt.h"
24 #include "i915_params.h"
25 #include "i915_scheduler.h"
26 
27 struct drm_i915_private;
28 struct i915_vma_compress;
29 struct intel_engine_capture_vma;
30 struct intel_overlay_error_state;
31 struct intel_display_error_state;
32 
33 struct i915_vma_coredump {
34 	struct i915_vma_coredump *next;
35 
36 	char name[20];
37 
38 	u64 gtt_offset;
39 	u64 gtt_size;
40 	u32 gtt_page_sizes;
41 
42 	int num_pages;
43 	int page_count;
44 	int unused;
45 	u32 *pages[0];
46 };
47 
48 struct i915_request_coredump {
49 	unsigned long flags;
50 	pid_t pid;
51 	u32 context;
52 	u32 seqno;
53 	u32 start;
54 	u32 head;
55 	u32 tail;
56 	struct i915_sched_attr sched_attr;
57 };
58 
59 struct intel_engine_coredump {
60 	const struct intel_engine_cs *engine;
61 
62 	bool simulated;
63 	u32 reset_count;
64 
65 	/* position of active request inside the ring */
66 	u32 rq_head, rq_post, rq_tail;
67 
68 	/* Register state */
69 	u32 ccid;
70 	u32 start;
71 	u32 tail;
72 	u32 head;
73 	u32 ctl;
74 	u32 mode;
75 	u32 hws;
76 	u32 ipeir;
77 	u32 ipehr;
78 	u32 bbstate;
79 	u32 instpm;
80 	u32 instps;
81 	u64 bbaddr;
82 	u64 acthd;
83 	u32 fault_reg;
84 	u64 faddr;
85 	u32 rc_psmi; /* sleep state */
86 	struct intel_instdone instdone;
87 
88 	struct i915_gem_context_coredump {
89 		char comm[TASK_COMM_LEN];
90 		pid_t pid;
91 		int active;
92 		int guilty;
93 		struct i915_sched_attr sched_attr;
94 	} context;
95 
96 	struct i915_vma_coredump *vma;
97 
98 	struct i915_request_coredump execlist[EXECLIST_MAX_PORTS];
99 	unsigned int num_ports;
100 
101 	struct {
102 		u32 gfx_mode;
103 		union {
104 			u64 pdp[4];
105 			u32 pp_dir_base;
106 		};
107 	} vm_info;
108 
109 	struct intel_engine_coredump *next;
110 };
111 
112 struct intel_gt_coredump {
113 	const struct intel_gt *_gt;
114 	bool awake;
115 	bool simulated;
116 
117 	/* Generic register state */
118 	u32 eir;
119 	u32 pgtbl_er;
120 	u32 ier;
121 	u32 gtier[6], ngtier;
122 	u32 derrmr;
123 	u32 forcewake;
124 	u32 error; /* gen6+ */
125 	u32 err_int; /* gen7 */
126 	u32 fault_data0; /* gen8, gen9 */
127 	u32 fault_data1; /* gen8, gen9 */
128 	u32 done_reg;
129 	u32 gac_eco;
130 	u32 gam_ecochk;
131 	u32 gab_ctl;
132 	u32 gfx_mode;
133 	u32 gtt_cache;
134 	u32 aux_err; /* gen12 */
135 	u32 sfc_done[GEN12_SFC_DONE_MAX]; /* gen12 */
136 	u32 gam_done; /* gen12 */
137 
138 	u32 nfence;
139 	u64 fence[I915_MAX_NUM_FENCES];
140 
141 	struct intel_engine_coredump *engine;
142 
143 	struct intel_uc_coredump {
144 		struct intel_uc_fw guc_fw;
145 		struct intel_uc_fw huc_fw;
146 		struct i915_vma_coredump *guc_log;
147 	} *uc;
148 
149 	struct intel_gt_coredump *next;
150 };
151 
152 struct i915_gpu_coredump {
153 	struct kref ref;
154 	ktime_t time;
155 	ktime_t boottime;
156 	ktime_t uptime;
157 	unsigned long capture;
158 
159 	struct drm_i915_private *i915;
160 
161 	struct intel_gt_coredump *gt;
162 
163 	char error_msg[128];
164 	bool simulated;
165 	bool wakelock;
166 	bool suspended;
167 	int iommu;
168 	u32 reset_count;
169 	u32 suspend_count;
170 
171 	struct intel_device_info device_info;
172 	struct intel_runtime_info runtime_info;
173 	struct intel_driver_caps driver_caps;
174 	struct i915_params params;
175 
176 	struct intel_overlay_error_state *overlay;
177 	struct intel_display_error_state *display;
178 
179 	struct scatterlist *sgl, *fit;
180 };
181 
182 struct i915_gpu_error {
183 	/* For reset and error_state handling. */
184 	spinlock_t lock;
185 	/* Protected by the above dev->gpu_error.lock. */
186 	struct i915_gpu_coredump *first_error;
187 
188 	atomic_t pending_fb_pin;
189 
190 	/** Number of times the device has been reset (global) */
191 	atomic_t reset_count;
192 
193 	/** Number of times an engine has been reset */
194 	atomic_t reset_engine_count[I915_NUM_ENGINES];
195 };
196 
197 struct drm_i915_error_state_buf {
198 	struct drm_i915_private *i915;
199 	struct scatterlist *sgl, *cur, *end;
200 
201 	char *buf;
202 	size_t bytes;
203 	size_t size;
204 	loff_t iter;
205 
206 	int err;
207 };
208 
209 #if IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR)
210 
211 __printf(2, 3)
212 void i915_error_printf(struct drm_i915_error_state_buf *e, const char *f, ...);
213 
214 struct i915_gpu_coredump *i915_gpu_coredump(struct drm_i915_private *i915);
215 void i915_capture_error_state(struct drm_i915_private *i915);
216 
217 struct i915_gpu_coredump *
218 i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp);
219 
220 struct intel_gt_coredump *
221 intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp);
222 
223 struct intel_engine_coredump *
224 intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp);
225 
226 struct intel_engine_capture_vma *
227 intel_engine_coredump_add_request(struct intel_engine_coredump *ee,
228 				  struct i915_request *rq,
229 				  gfp_t gfp);
230 
231 void intel_engine_coredump_add_vma(struct intel_engine_coredump *ee,
232 				   struct intel_engine_capture_vma *capture,
233 				   struct i915_vma_compress *compress);
234 
235 struct i915_vma_compress *
236 i915_vma_capture_prepare(struct intel_gt_coredump *gt);
237 
238 void i915_vma_capture_finish(struct intel_gt_coredump *gt,
239 			     struct i915_vma_compress *compress);
240 
241 void i915_error_state_store(struct i915_gpu_coredump *error);
242 
243 static inline struct i915_gpu_coredump *
244 i915_gpu_coredump_get(struct i915_gpu_coredump *gpu)
245 {
246 	kref_get(&gpu->ref);
247 	return gpu;
248 }
249 
250 ssize_t
251 i915_gpu_coredump_copy_to_buffer(struct i915_gpu_coredump *error,
252 				 char *buf, loff_t offset, size_t count);
253 
254 void __i915_gpu_coredump_free(struct kref *kref);
255 static inline void i915_gpu_coredump_put(struct i915_gpu_coredump *gpu)
256 {
257 	if (gpu)
258 		kref_put(&gpu->ref, __i915_gpu_coredump_free);
259 }
260 
261 struct i915_gpu_coredump *i915_first_error_state(struct drm_i915_private *i915);
262 void i915_reset_error_state(struct drm_i915_private *i915);
263 void i915_disable_error_state(struct drm_i915_private *i915, int err);
264 
265 #else
266 
267 static inline void i915_capture_error_state(struct drm_i915_private *i915)
268 {
269 }
270 
271 static inline struct i915_gpu_coredump *
272 i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp)
273 {
274 	return NULL;
275 }
276 
277 static inline struct intel_gt_coredump *
278 intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp)
279 {
280 	return NULL;
281 }
282 
283 static inline struct intel_engine_coredump *
284 intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp)
285 {
286 	return NULL;
287 }
288 
289 static inline struct intel_engine_capture_vma *
290 intel_engine_coredump_add_request(struct intel_engine_coredump *ee,
291 				  struct i915_request *rq,
292 				  gfp_t gfp)
293 {
294 	return NULL;
295 }
296 
297 static inline void
298 intel_engine_coredump_add_vma(struct intel_engine_coredump *ee,
299 			      struct intel_engine_capture_vma *capture,
300 			      struct i915_vma_compress *compress)
301 {
302 }
303 
304 static inline struct i915_vma_compress *
305 i915_vma_capture_prepare(struct intel_gt_coredump *gt)
306 {
307 	return NULL;
308 }
309 
310 static inline void
311 i915_vma_capture_finish(struct intel_gt_coredump *gt,
312 			struct i915_vma_compress *compress)
313 {
314 }
315 
316 static inline void
317 i915_error_state_store(struct drm_i915_private *i915,
318 		       struct i915_gpu_coredump *error)
319 {
320 }
321 
322 static inline struct i915_gpu_coredump *
323 i915_first_error_state(struct drm_i915_private *i915)
324 {
325 	return ERR_PTR(-ENODEV);
326 }
327 
328 static inline void i915_reset_error_state(struct drm_i915_private *i915)
329 {
330 }
331 
332 static inline void i915_disable_error_state(struct drm_i915_private *i915,
333 					    int err)
334 {
335 }
336 
337 #endif /* IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR) */
338 
339 #endif /* _I915_GPU_ERROR_H_ */
340