xref: /openbmc/linux/drivers/gpu/drm/radeon/evergreen.c (revision a8da474e)
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <drm/drmP.h>
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include "radeon_audio.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37 
38 /*
39  * Indirect registers accessor
40  */
41 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
42 {
43 	unsigned long flags;
44 	u32 r;
45 
46 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
47 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
48 	r = RREG32(EVERGREEN_CG_IND_DATA);
49 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
50 	return r;
51 }
52 
53 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
54 {
55 	unsigned long flags;
56 
57 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
58 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
59 	WREG32(EVERGREEN_CG_IND_DATA, (v));
60 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
61 }
62 
63 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
64 {
65 	unsigned long flags;
66 	u32 r;
67 
68 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
69 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
70 	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
71 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
72 	return r;
73 }
74 
75 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
76 {
77 	unsigned long flags;
78 
79 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
80 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
81 	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
82 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
83 }
84 
85 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
86 {
87 	unsigned long flags;
88 	u32 r;
89 
90 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
91 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
92 	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
93 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
94 	return r;
95 }
96 
97 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
98 {
99 	unsigned long flags;
100 
101 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
102 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
103 	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
104 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
105 }
106 
107 static const u32 crtc_offsets[6] =
108 {
109 	EVERGREEN_CRTC0_REGISTER_OFFSET,
110 	EVERGREEN_CRTC1_REGISTER_OFFSET,
111 	EVERGREEN_CRTC2_REGISTER_OFFSET,
112 	EVERGREEN_CRTC3_REGISTER_OFFSET,
113 	EVERGREEN_CRTC4_REGISTER_OFFSET,
114 	EVERGREEN_CRTC5_REGISTER_OFFSET
115 };
116 
117 #include "clearstate_evergreen.h"
118 
119 static const u32 sumo_rlc_save_restore_register_list[] =
120 {
121 	0x98fc,
122 	0x9830,
123 	0x9834,
124 	0x9838,
125 	0x9870,
126 	0x9874,
127 	0x8a14,
128 	0x8b24,
129 	0x8bcc,
130 	0x8b10,
131 	0x8d00,
132 	0x8d04,
133 	0x8c00,
134 	0x8c04,
135 	0x8c08,
136 	0x8c0c,
137 	0x8d8c,
138 	0x8c20,
139 	0x8c24,
140 	0x8c28,
141 	0x8c18,
142 	0x8c1c,
143 	0x8cf0,
144 	0x8e2c,
145 	0x8e38,
146 	0x8c30,
147 	0x9508,
148 	0x9688,
149 	0x9608,
150 	0x960c,
151 	0x9610,
152 	0x9614,
153 	0x88c4,
154 	0x88d4,
155 	0xa008,
156 	0x900c,
157 	0x9100,
158 	0x913c,
159 	0x98f8,
160 	0x98f4,
161 	0x9b7c,
162 	0x3f8c,
163 	0x8950,
164 	0x8954,
165 	0x8a18,
166 	0x8b28,
167 	0x9144,
168 	0x9148,
169 	0x914c,
170 	0x3f90,
171 	0x3f94,
172 	0x915c,
173 	0x9160,
174 	0x9178,
175 	0x917c,
176 	0x9180,
177 	0x918c,
178 	0x9190,
179 	0x9194,
180 	0x9198,
181 	0x919c,
182 	0x91a8,
183 	0x91ac,
184 	0x91b0,
185 	0x91b4,
186 	0x91b8,
187 	0x91c4,
188 	0x91c8,
189 	0x91cc,
190 	0x91d0,
191 	0x91d4,
192 	0x91e0,
193 	0x91e4,
194 	0x91ec,
195 	0x91f0,
196 	0x91f4,
197 	0x9200,
198 	0x9204,
199 	0x929c,
200 	0x9150,
201 	0x802c,
202 };
203 
204 static void evergreen_gpu_init(struct radeon_device *rdev);
205 void evergreen_fini(struct radeon_device *rdev);
206 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
207 void evergreen_program_aspm(struct radeon_device *rdev);
208 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
209 				     int ring, u32 cp_int_cntl);
210 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
211 				   u32 status, u32 addr);
212 void cik_init_cp_pg_table(struct radeon_device *rdev);
213 
214 extern u32 si_get_csb_size(struct radeon_device *rdev);
215 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
216 extern u32 cik_get_csb_size(struct radeon_device *rdev);
217 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
218 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
219 
220 static const u32 evergreen_golden_registers[] =
221 {
222 	0x3f90, 0xffff0000, 0xff000000,
223 	0x9148, 0xffff0000, 0xff000000,
224 	0x3f94, 0xffff0000, 0xff000000,
225 	0x914c, 0xffff0000, 0xff000000,
226 	0x9b7c, 0xffffffff, 0x00000000,
227 	0x8a14, 0xffffffff, 0x00000007,
228 	0x8b10, 0xffffffff, 0x00000000,
229 	0x960c, 0xffffffff, 0x54763210,
230 	0x88c4, 0xffffffff, 0x000000c2,
231 	0x88d4, 0xffffffff, 0x00000010,
232 	0x8974, 0xffffffff, 0x00000000,
233 	0xc78, 0x00000080, 0x00000080,
234 	0x5eb4, 0xffffffff, 0x00000002,
235 	0x5e78, 0xffffffff, 0x001000f0,
236 	0x6104, 0x01000300, 0x00000000,
237 	0x5bc0, 0x00300000, 0x00000000,
238 	0x7030, 0xffffffff, 0x00000011,
239 	0x7c30, 0xffffffff, 0x00000011,
240 	0x10830, 0xffffffff, 0x00000011,
241 	0x11430, 0xffffffff, 0x00000011,
242 	0x12030, 0xffffffff, 0x00000011,
243 	0x12c30, 0xffffffff, 0x00000011,
244 	0xd02c, 0xffffffff, 0x08421000,
245 	0x240c, 0xffffffff, 0x00000380,
246 	0x8b24, 0xffffffff, 0x00ff0fff,
247 	0x28a4c, 0x06000000, 0x06000000,
248 	0x10c, 0x00000001, 0x00000001,
249 	0x8d00, 0xffffffff, 0x100e4848,
250 	0x8d04, 0xffffffff, 0x00164745,
251 	0x8c00, 0xffffffff, 0xe4000003,
252 	0x8c04, 0xffffffff, 0x40600060,
253 	0x8c08, 0xffffffff, 0x001c001c,
254 	0x8cf0, 0xffffffff, 0x08e00620,
255 	0x8c20, 0xffffffff, 0x00800080,
256 	0x8c24, 0xffffffff, 0x00800080,
257 	0x8c18, 0xffffffff, 0x20202078,
258 	0x8c1c, 0xffffffff, 0x00001010,
259 	0x28350, 0xffffffff, 0x00000000,
260 	0xa008, 0xffffffff, 0x00010000,
261 	0x5c4, 0xffffffff, 0x00000001,
262 	0x9508, 0xffffffff, 0x00000002,
263 	0x913c, 0x0000000f, 0x0000000a
264 };
265 
266 static const u32 evergreen_golden_registers2[] =
267 {
268 	0x2f4c, 0xffffffff, 0x00000000,
269 	0x54f4, 0xffffffff, 0x00000000,
270 	0x54f0, 0xffffffff, 0x00000000,
271 	0x5498, 0xffffffff, 0x00000000,
272 	0x549c, 0xffffffff, 0x00000000,
273 	0x5494, 0xffffffff, 0x00000000,
274 	0x53cc, 0xffffffff, 0x00000000,
275 	0x53c8, 0xffffffff, 0x00000000,
276 	0x53c4, 0xffffffff, 0x00000000,
277 	0x53c0, 0xffffffff, 0x00000000,
278 	0x53bc, 0xffffffff, 0x00000000,
279 	0x53b8, 0xffffffff, 0x00000000,
280 	0x53b4, 0xffffffff, 0x00000000,
281 	0x53b0, 0xffffffff, 0x00000000
282 };
283 
284 static const u32 cypress_mgcg_init[] =
285 {
286 	0x802c, 0xffffffff, 0xc0000000,
287 	0x5448, 0xffffffff, 0x00000100,
288 	0x55e4, 0xffffffff, 0x00000100,
289 	0x160c, 0xffffffff, 0x00000100,
290 	0x5644, 0xffffffff, 0x00000100,
291 	0xc164, 0xffffffff, 0x00000100,
292 	0x8a18, 0xffffffff, 0x00000100,
293 	0x897c, 0xffffffff, 0x06000100,
294 	0x8b28, 0xffffffff, 0x00000100,
295 	0x9144, 0xffffffff, 0x00000100,
296 	0x9a60, 0xffffffff, 0x00000100,
297 	0x9868, 0xffffffff, 0x00000100,
298 	0x8d58, 0xffffffff, 0x00000100,
299 	0x9510, 0xffffffff, 0x00000100,
300 	0x949c, 0xffffffff, 0x00000100,
301 	0x9654, 0xffffffff, 0x00000100,
302 	0x9030, 0xffffffff, 0x00000100,
303 	0x9034, 0xffffffff, 0x00000100,
304 	0x9038, 0xffffffff, 0x00000100,
305 	0x903c, 0xffffffff, 0x00000100,
306 	0x9040, 0xffffffff, 0x00000100,
307 	0xa200, 0xffffffff, 0x00000100,
308 	0xa204, 0xffffffff, 0x00000100,
309 	0xa208, 0xffffffff, 0x00000100,
310 	0xa20c, 0xffffffff, 0x00000100,
311 	0x971c, 0xffffffff, 0x00000100,
312 	0x977c, 0xffffffff, 0x00000100,
313 	0x3f80, 0xffffffff, 0x00000100,
314 	0xa210, 0xffffffff, 0x00000100,
315 	0xa214, 0xffffffff, 0x00000100,
316 	0x4d8, 0xffffffff, 0x00000100,
317 	0x9784, 0xffffffff, 0x00000100,
318 	0x9698, 0xffffffff, 0x00000100,
319 	0x4d4, 0xffffffff, 0x00000200,
320 	0x30cc, 0xffffffff, 0x00000100,
321 	0xd0c0, 0xffffffff, 0xff000100,
322 	0x802c, 0xffffffff, 0x40000000,
323 	0x915c, 0xffffffff, 0x00010000,
324 	0x9160, 0xffffffff, 0x00030002,
325 	0x9178, 0xffffffff, 0x00070000,
326 	0x917c, 0xffffffff, 0x00030002,
327 	0x9180, 0xffffffff, 0x00050004,
328 	0x918c, 0xffffffff, 0x00010006,
329 	0x9190, 0xffffffff, 0x00090008,
330 	0x9194, 0xffffffff, 0x00070000,
331 	0x9198, 0xffffffff, 0x00030002,
332 	0x919c, 0xffffffff, 0x00050004,
333 	0x91a8, 0xffffffff, 0x00010006,
334 	0x91ac, 0xffffffff, 0x00090008,
335 	0x91b0, 0xffffffff, 0x00070000,
336 	0x91b4, 0xffffffff, 0x00030002,
337 	0x91b8, 0xffffffff, 0x00050004,
338 	0x91c4, 0xffffffff, 0x00010006,
339 	0x91c8, 0xffffffff, 0x00090008,
340 	0x91cc, 0xffffffff, 0x00070000,
341 	0x91d0, 0xffffffff, 0x00030002,
342 	0x91d4, 0xffffffff, 0x00050004,
343 	0x91e0, 0xffffffff, 0x00010006,
344 	0x91e4, 0xffffffff, 0x00090008,
345 	0x91e8, 0xffffffff, 0x00000000,
346 	0x91ec, 0xffffffff, 0x00070000,
347 	0x91f0, 0xffffffff, 0x00030002,
348 	0x91f4, 0xffffffff, 0x00050004,
349 	0x9200, 0xffffffff, 0x00010006,
350 	0x9204, 0xffffffff, 0x00090008,
351 	0x9208, 0xffffffff, 0x00070000,
352 	0x920c, 0xffffffff, 0x00030002,
353 	0x9210, 0xffffffff, 0x00050004,
354 	0x921c, 0xffffffff, 0x00010006,
355 	0x9220, 0xffffffff, 0x00090008,
356 	0x9224, 0xffffffff, 0x00070000,
357 	0x9228, 0xffffffff, 0x00030002,
358 	0x922c, 0xffffffff, 0x00050004,
359 	0x9238, 0xffffffff, 0x00010006,
360 	0x923c, 0xffffffff, 0x00090008,
361 	0x9240, 0xffffffff, 0x00070000,
362 	0x9244, 0xffffffff, 0x00030002,
363 	0x9248, 0xffffffff, 0x00050004,
364 	0x9254, 0xffffffff, 0x00010006,
365 	0x9258, 0xffffffff, 0x00090008,
366 	0x925c, 0xffffffff, 0x00070000,
367 	0x9260, 0xffffffff, 0x00030002,
368 	0x9264, 0xffffffff, 0x00050004,
369 	0x9270, 0xffffffff, 0x00010006,
370 	0x9274, 0xffffffff, 0x00090008,
371 	0x9278, 0xffffffff, 0x00070000,
372 	0x927c, 0xffffffff, 0x00030002,
373 	0x9280, 0xffffffff, 0x00050004,
374 	0x928c, 0xffffffff, 0x00010006,
375 	0x9290, 0xffffffff, 0x00090008,
376 	0x9294, 0xffffffff, 0x00000000,
377 	0x929c, 0xffffffff, 0x00000001,
378 	0x802c, 0xffffffff, 0x40010000,
379 	0x915c, 0xffffffff, 0x00010000,
380 	0x9160, 0xffffffff, 0x00030002,
381 	0x9178, 0xffffffff, 0x00070000,
382 	0x917c, 0xffffffff, 0x00030002,
383 	0x9180, 0xffffffff, 0x00050004,
384 	0x918c, 0xffffffff, 0x00010006,
385 	0x9190, 0xffffffff, 0x00090008,
386 	0x9194, 0xffffffff, 0x00070000,
387 	0x9198, 0xffffffff, 0x00030002,
388 	0x919c, 0xffffffff, 0x00050004,
389 	0x91a8, 0xffffffff, 0x00010006,
390 	0x91ac, 0xffffffff, 0x00090008,
391 	0x91b0, 0xffffffff, 0x00070000,
392 	0x91b4, 0xffffffff, 0x00030002,
393 	0x91b8, 0xffffffff, 0x00050004,
394 	0x91c4, 0xffffffff, 0x00010006,
395 	0x91c8, 0xffffffff, 0x00090008,
396 	0x91cc, 0xffffffff, 0x00070000,
397 	0x91d0, 0xffffffff, 0x00030002,
398 	0x91d4, 0xffffffff, 0x00050004,
399 	0x91e0, 0xffffffff, 0x00010006,
400 	0x91e4, 0xffffffff, 0x00090008,
401 	0x91e8, 0xffffffff, 0x00000000,
402 	0x91ec, 0xffffffff, 0x00070000,
403 	0x91f0, 0xffffffff, 0x00030002,
404 	0x91f4, 0xffffffff, 0x00050004,
405 	0x9200, 0xffffffff, 0x00010006,
406 	0x9204, 0xffffffff, 0x00090008,
407 	0x9208, 0xffffffff, 0x00070000,
408 	0x920c, 0xffffffff, 0x00030002,
409 	0x9210, 0xffffffff, 0x00050004,
410 	0x921c, 0xffffffff, 0x00010006,
411 	0x9220, 0xffffffff, 0x00090008,
412 	0x9224, 0xffffffff, 0x00070000,
413 	0x9228, 0xffffffff, 0x00030002,
414 	0x922c, 0xffffffff, 0x00050004,
415 	0x9238, 0xffffffff, 0x00010006,
416 	0x923c, 0xffffffff, 0x00090008,
417 	0x9240, 0xffffffff, 0x00070000,
418 	0x9244, 0xffffffff, 0x00030002,
419 	0x9248, 0xffffffff, 0x00050004,
420 	0x9254, 0xffffffff, 0x00010006,
421 	0x9258, 0xffffffff, 0x00090008,
422 	0x925c, 0xffffffff, 0x00070000,
423 	0x9260, 0xffffffff, 0x00030002,
424 	0x9264, 0xffffffff, 0x00050004,
425 	0x9270, 0xffffffff, 0x00010006,
426 	0x9274, 0xffffffff, 0x00090008,
427 	0x9278, 0xffffffff, 0x00070000,
428 	0x927c, 0xffffffff, 0x00030002,
429 	0x9280, 0xffffffff, 0x00050004,
430 	0x928c, 0xffffffff, 0x00010006,
431 	0x9290, 0xffffffff, 0x00090008,
432 	0x9294, 0xffffffff, 0x00000000,
433 	0x929c, 0xffffffff, 0x00000001,
434 	0x802c, 0xffffffff, 0xc0000000
435 };
436 
437 static const u32 redwood_mgcg_init[] =
438 {
439 	0x802c, 0xffffffff, 0xc0000000,
440 	0x5448, 0xffffffff, 0x00000100,
441 	0x55e4, 0xffffffff, 0x00000100,
442 	0x160c, 0xffffffff, 0x00000100,
443 	0x5644, 0xffffffff, 0x00000100,
444 	0xc164, 0xffffffff, 0x00000100,
445 	0x8a18, 0xffffffff, 0x00000100,
446 	0x897c, 0xffffffff, 0x06000100,
447 	0x8b28, 0xffffffff, 0x00000100,
448 	0x9144, 0xffffffff, 0x00000100,
449 	0x9a60, 0xffffffff, 0x00000100,
450 	0x9868, 0xffffffff, 0x00000100,
451 	0x8d58, 0xffffffff, 0x00000100,
452 	0x9510, 0xffffffff, 0x00000100,
453 	0x949c, 0xffffffff, 0x00000100,
454 	0x9654, 0xffffffff, 0x00000100,
455 	0x9030, 0xffffffff, 0x00000100,
456 	0x9034, 0xffffffff, 0x00000100,
457 	0x9038, 0xffffffff, 0x00000100,
458 	0x903c, 0xffffffff, 0x00000100,
459 	0x9040, 0xffffffff, 0x00000100,
460 	0xa200, 0xffffffff, 0x00000100,
461 	0xa204, 0xffffffff, 0x00000100,
462 	0xa208, 0xffffffff, 0x00000100,
463 	0xa20c, 0xffffffff, 0x00000100,
464 	0x971c, 0xffffffff, 0x00000100,
465 	0x977c, 0xffffffff, 0x00000100,
466 	0x3f80, 0xffffffff, 0x00000100,
467 	0xa210, 0xffffffff, 0x00000100,
468 	0xa214, 0xffffffff, 0x00000100,
469 	0x4d8, 0xffffffff, 0x00000100,
470 	0x9784, 0xffffffff, 0x00000100,
471 	0x9698, 0xffffffff, 0x00000100,
472 	0x4d4, 0xffffffff, 0x00000200,
473 	0x30cc, 0xffffffff, 0x00000100,
474 	0xd0c0, 0xffffffff, 0xff000100,
475 	0x802c, 0xffffffff, 0x40000000,
476 	0x915c, 0xffffffff, 0x00010000,
477 	0x9160, 0xffffffff, 0x00030002,
478 	0x9178, 0xffffffff, 0x00070000,
479 	0x917c, 0xffffffff, 0x00030002,
480 	0x9180, 0xffffffff, 0x00050004,
481 	0x918c, 0xffffffff, 0x00010006,
482 	0x9190, 0xffffffff, 0x00090008,
483 	0x9194, 0xffffffff, 0x00070000,
484 	0x9198, 0xffffffff, 0x00030002,
485 	0x919c, 0xffffffff, 0x00050004,
486 	0x91a8, 0xffffffff, 0x00010006,
487 	0x91ac, 0xffffffff, 0x00090008,
488 	0x91b0, 0xffffffff, 0x00070000,
489 	0x91b4, 0xffffffff, 0x00030002,
490 	0x91b8, 0xffffffff, 0x00050004,
491 	0x91c4, 0xffffffff, 0x00010006,
492 	0x91c8, 0xffffffff, 0x00090008,
493 	0x91cc, 0xffffffff, 0x00070000,
494 	0x91d0, 0xffffffff, 0x00030002,
495 	0x91d4, 0xffffffff, 0x00050004,
496 	0x91e0, 0xffffffff, 0x00010006,
497 	0x91e4, 0xffffffff, 0x00090008,
498 	0x91e8, 0xffffffff, 0x00000000,
499 	0x91ec, 0xffffffff, 0x00070000,
500 	0x91f0, 0xffffffff, 0x00030002,
501 	0x91f4, 0xffffffff, 0x00050004,
502 	0x9200, 0xffffffff, 0x00010006,
503 	0x9204, 0xffffffff, 0x00090008,
504 	0x9294, 0xffffffff, 0x00000000,
505 	0x929c, 0xffffffff, 0x00000001,
506 	0x802c, 0xffffffff, 0xc0000000
507 };
508 
509 static const u32 cedar_golden_registers[] =
510 {
511 	0x3f90, 0xffff0000, 0xff000000,
512 	0x9148, 0xffff0000, 0xff000000,
513 	0x3f94, 0xffff0000, 0xff000000,
514 	0x914c, 0xffff0000, 0xff000000,
515 	0x9b7c, 0xffffffff, 0x00000000,
516 	0x8a14, 0xffffffff, 0x00000007,
517 	0x8b10, 0xffffffff, 0x00000000,
518 	0x960c, 0xffffffff, 0x54763210,
519 	0x88c4, 0xffffffff, 0x000000c2,
520 	0x88d4, 0xffffffff, 0x00000000,
521 	0x8974, 0xffffffff, 0x00000000,
522 	0xc78, 0x00000080, 0x00000080,
523 	0x5eb4, 0xffffffff, 0x00000002,
524 	0x5e78, 0xffffffff, 0x001000f0,
525 	0x6104, 0x01000300, 0x00000000,
526 	0x5bc0, 0x00300000, 0x00000000,
527 	0x7030, 0xffffffff, 0x00000011,
528 	0x7c30, 0xffffffff, 0x00000011,
529 	0x10830, 0xffffffff, 0x00000011,
530 	0x11430, 0xffffffff, 0x00000011,
531 	0xd02c, 0xffffffff, 0x08421000,
532 	0x240c, 0xffffffff, 0x00000380,
533 	0x8b24, 0xffffffff, 0x00ff0fff,
534 	0x28a4c, 0x06000000, 0x06000000,
535 	0x10c, 0x00000001, 0x00000001,
536 	0x8d00, 0xffffffff, 0x100e4848,
537 	0x8d04, 0xffffffff, 0x00164745,
538 	0x8c00, 0xffffffff, 0xe4000003,
539 	0x8c04, 0xffffffff, 0x40600060,
540 	0x8c08, 0xffffffff, 0x001c001c,
541 	0x8cf0, 0xffffffff, 0x08e00410,
542 	0x8c20, 0xffffffff, 0x00800080,
543 	0x8c24, 0xffffffff, 0x00800080,
544 	0x8c18, 0xffffffff, 0x20202078,
545 	0x8c1c, 0xffffffff, 0x00001010,
546 	0x28350, 0xffffffff, 0x00000000,
547 	0xa008, 0xffffffff, 0x00010000,
548 	0x5c4, 0xffffffff, 0x00000001,
549 	0x9508, 0xffffffff, 0x00000002
550 };
551 
552 static const u32 cedar_mgcg_init[] =
553 {
554 	0x802c, 0xffffffff, 0xc0000000,
555 	0x5448, 0xffffffff, 0x00000100,
556 	0x55e4, 0xffffffff, 0x00000100,
557 	0x160c, 0xffffffff, 0x00000100,
558 	0x5644, 0xffffffff, 0x00000100,
559 	0xc164, 0xffffffff, 0x00000100,
560 	0x8a18, 0xffffffff, 0x00000100,
561 	0x897c, 0xffffffff, 0x06000100,
562 	0x8b28, 0xffffffff, 0x00000100,
563 	0x9144, 0xffffffff, 0x00000100,
564 	0x9a60, 0xffffffff, 0x00000100,
565 	0x9868, 0xffffffff, 0x00000100,
566 	0x8d58, 0xffffffff, 0x00000100,
567 	0x9510, 0xffffffff, 0x00000100,
568 	0x949c, 0xffffffff, 0x00000100,
569 	0x9654, 0xffffffff, 0x00000100,
570 	0x9030, 0xffffffff, 0x00000100,
571 	0x9034, 0xffffffff, 0x00000100,
572 	0x9038, 0xffffffff, 0x00000100,
573 	0x903c, 0xffffffff, 0x00000100,
574 	0x9040, 0xffffffff, 0x00000100,
575 	0xa200, 0xffffffff, 0x00000100,
576 	0xa204, 0xffffffff, 0x00000100,
577 	0xa208, 0xffffffff, 0x00000100,
578 	0xa20c, 0xffffffff, 0x00000100,
579 	0x971c, 0xffffffff, 0x00000100,
580 	0x977c, 0xffffffff, 0x00000100,
581 	0x3f80, 0xffffffff, 0x00000100,
582 	0xa210, 0xffffffff, 0x00000100,
583 	0xa214, 0xffffffff, 0x00000100,
584 	0x4d8, 0xffffffff, 0x00000100,
585 	0x9784, 0xffffffff, 0x00000100,
586 	0x9698, 0xffffffff, 0x00000100,
587 	0x4d4, 0xffffffff, 0x00000200,
588 	0x30cc, 0xffffffff, 0x00000100,
589 	0xd0c0, 0xffffffff, 0xff000100,
590 	0x802c, 0xffffffff, 0x40000000,
591 	0x915c, 0xffffffff, 0x00010000,
592 	0x9178, 0xffffffff, 0x00050000,
593 	0x917c, 0xffffffff, 0x00030002,
594 	0x918c, 0xffffffff, 0x00010004,
595 	0x9190, 0xffffffff, 0x00070006,
596 	0x9194, 0xffffffff, 0x00050000,
597 	0x9198, 0xffffffff, 0x00030002,
598 	0x91a8, 0xffffffff, 0x00010004,
599 	0x91ac, 0xffffffff, 0x00070006,
600 	0x91e8, 0xffffffff, 0x00000000,
601 	0x9294, 0xffffffff, 0x00000000,
602 	0x929c, 0xffffffff, 0x00000001,
603 	0x802c, 0xffffffff, 0xc0000000
604 };
605 
606 static const u32 juniper_mgcg_init[] =
607 {
608 	0x802c, 0xffffffff, 0xc0000000,
609 	0x5448, 0xffffffff, 0x00000100,
610 	0x55e4, 0xffffffff, 0x00000100,
611 	0x160c, 0xffffffff, 0x00000100,
612 	0x5644, 0xffffffff, 0x00000100,
613 	0xc164, 0xffffffff, 0x00000100,
614 	0x8a18, 0xffffffff, 0x00000100,
615 	0x897c, 0xffffffff, 0x06000100,
616 	0x8b28, 0xffffffff, 0x00000100,
617 	0x9144, 0xffffffff, 0x00000100,
618 	0x9a60, 0xffffffff, 0x00000100,
619 	0x9868, 0xffffffff, 0x00000100,
620 	0x8d58, 0xffffffff, 0x00000100,
621 	0x9510, 0xffffffff, 0x00000100,
622 	0x949c, 0xffffffff, 0x00000100,
623 	0x9654, 0xffffffff, 0x00000100,
624 	0x9030, 0xffffffff, 0x00000100,
625 	0x9034, 0xffffffff, 0x00000100,
626 	0x9038, 0xffffffff, 0x00000100,
627 	0x903c, 0xffffffff, 0x00000100,
628 	0x9040, 0xffffffff, 0x00000100,
629 	0xa200, 0xffffffff, 0x00000100,
630 	0xa204, 0xffffffff, 0x00000100,
631 	0xa208, 0xffffffff, 0x00000100,
632 	0xa20c, 0xffffffff, 0x00000100,
633 	0x971c, 0xffffffff, 0x00000100,
634 	0xd0c0, 0xffffffff, 0xff000100,
635 	0x802c, 0xffffffff, 0x40000000,
636 	0x915c, 0xffffffff, 0x00010000,
637 	0x9160, 0xffffffff, 0x00030002,
638 	0x9178, 0xffffffff, 0x00070000,
639 	0x917c, 0xffffffff, 0x00030002,
640 	0x9180, 0xffffffff, 0x00050004,
641 	0x918c, 0xffffffff, 0x00010006,
642 	0x9190, 0xffffffff, 0x00090008,
643 	0x9194, 0xffffffff, 0x00070000,
644 	0x9198, 0xffffffff, 0x00030002,
645 	0x919c, 0xffffffff, 0x00050004,
646 	0x91a8, 0xffffffff, 0x00010006,
647 	0x91ac, 0xffffffff, 0x00090008,
648 	0x91b0, 0xffffffff, 0x00070000,
649 	0x91b4, 0xffffffff, 0x00030002,
650 	0x91b8, 0xffffffff, 0x00050004,
651 	0x91c4, 0xffffffff, 0x00010006,
652 	0x91c8, 0xffffffff, 0x00090008,
653 	0x91cc, 0xffffffff, 0x00070000,
654 	0x91d0, 0xffffffff, 0x00030002,
655 	0x91d4, 0xffffffff, 0x00050004,
656 	0x91e0, 0xffffffff, 0x00010006,
657 	0x91e4, 0xffffffff, 0x00090008,
658 	0x91e8, 0xffffffff, 0x00000000,
659 	0x91ec, 0xffffffff, 0x00070000,
660 	0x91f0, 0xffffffff, 0x00030002,
661 	0x91f4, 0xffffffff, 0x00050004,
662 	0x9200, 0xffffffff, 0x00010006,
663 	0x9204, 0xffffffff, 0x00090008,
664 	0x9208, 0xffffffff, 0x00070000,
665 	0x920c, 0xffffffff, 0x00030002,
666 	0x9210, 0xffffffff, 0x00050004,
667 	0x921c, 0xffffffff, 0x00010006,
668 	0x9220, 0xffffffff, 0x00090008,
669 	0x9224, 0xffffffff, 0x00070000,
670 	0x9228, 0xffffffff, 0x00030002,
671 	0x922c, 0xffffffff, 0x00050004,
672 	0x9238, 0xffffffff, 0x00010006,
673 	0x923c, 0xffffffff, 0x00090008,
674 	0x9240, 0xffffffff, 0x00070000,
675 	0x9244, 0xffffffff, 0x00030002,
676 	0x9248, 0xffffffff, 0x00050004,
677 	0x9254, 0xffffffff, 0x00010006,
678 	0x9258, 0xffffffff, 0x00090008,
679 	0x925c, 0xffffffff, 0x00070000,
680 	0x9260, 0xffffffff, 0x00030002,
681 	0x9264, 0xffffffff, 0x00050004,
682 	0x9270, 0xffffffff, 0x00010006,
683 	0x9274, 0xffffffff, 0x00090008,
684 	0x9278, 0xffffffff, 0x00070000,
685 	0x927c, 0xffffffff, 0x00030002,
686 	0x9280, 0xffffffff, 0x00050004,
687 	0x928c, 0xffffffff, 0x00010006,
688 	0x9290, 0xffffffff, 0x00090008,
689 	0x9294, 0xffffffff, 0x00000000,
690 	0x929c, 0xffffffff, 0x00000001,
691 	0x802c, 0xffffffff, 0xc0000000,
692 	0x977c, 0xffffffff, 0x00000100,
693 	0x3f80, 0xffffffff, 0x00000100,
694 	0xa210, 0xffffffff, 0x00000100,
695 	0xa214, 0xffffffff, 0x00000100,
696 	0x4d8, 0xffffffff, 0x00000100,
697 	0x9784, 0xffffffff, 0x00000100,
698 	0x9698, 0xffffffff, 0x00000100,
699 	0x4d4, 0xffffffff, 0x00000200,
700 	0x30cc, 0xffffffff, 0x00000100,
701 	0x802c, 0xffffffff, 0xc0000000
702 };
703 
704 static const u32 supersumo_golden_registers[] =
705 {
706 	0x5eb4, 0xffffffff, 0x00000002,
707 	0x5c4, 0xffffffff, 0x00000001,
708 	0x7030, 0xffffffff, 0x00000011,
709 	0x7c30, 0xffffffff, 0x00000011,
710 	0x6104, 0x01000300, 0x00000000,
711 	0x5bc0, 0x00300000, 0x00000000,
712 	0x8c04, 0xffffffff, 0x40600060,
713 	0x8c08, 0xffffffff, 0x001c001c,
714 	0x8c20, 0xffffffff, 0x00800080,
715 	0x8c24, 0xffffffff, 0x00800080,
716 	0x8c18, 0xffffffff, 0x20202078,
717 	0x8c1c, 0xffffffff, 0x00001010,
718 	0x918c, 0xffffffff, 0x00010006,
719 	0x91a8, 0xffffffff, 0x00010006,
720 	0x91c4, 0xffffffff, 0x00010006,
721 	0x91e0, 0xffffffff, 0x00010006,
722 	0x9200, 0xffffffff, 0x00010006,
723 	0x9150, 0xffffffff, 0x6e944040,
724 	0x917c, 0xffffffff, 0x00030002,
725 	0x9180, 0xffffffff, 0x00050004,
726 	0x9198, 0xffffffff, 0x00030002,
727 	0x919c, 0xffffffff, 0x00050004,
728 	0x91b4, 0xffffffff, 0x00030002,
729 	0x91b8, 0xffffffff, 0x00050004,
730 	0x91d0, 0xffffffff, 0x00030002,
731 	0x91d4, 0xffffffff, 0x00050004,
732 	0x91f0, 0xffffffff, 0x00030002,
733 	0x91f4, 0xffffffff, 0x00050004,
734 	0x915c, 0xffffffff, 0x00010000,
735 	0x9160, 0xffffffff, 0x00030002,
736 	0x3f90, 0xffff0000, 0xff000000,
737 	0x9178, 0xffffffff, 0x00070000,
738 	0x9194, 0xffffffff, 0x00070000,
739 	0x91b0, 0xffffffff, 0x00070000,
740 	0x91cc, 0xffffffff, 0x00070000,
741 	0x91ec, 0xffffffff, 0x00070000,
742 	0x9148, 0xffff0000, 0xff000000,
743 	0x9190, 0xffffffff, 0x00090008,
744 	0x91ac, 0xffffffff, 0x00090008,
745 	0x91c8, 0xffffffff, 0x00090008,
746 	0x91e4, 0xffffffff, 0x00090008,
747 	0x9204, 0xffffffff, 0x00090008,
748 	0x3f94, 0xffff0000, 0xff000000,
749 	0x914c, 0xffff0000, 0xff000000,
750 	0x929c, 0xffffffff, 0x00000001,
751 	0x8a18, 0xffffffff, 0x00000100,
752 	0x8b28, 0xffffffff, 0x00000100,
753 	0x9144, 0xffffffff, 0x00000100,
754 	0x5644, 0xffffffff, 0x00000100,
755 	0x9b7c, 0xffffffff, 0x00000000,
756 	0x8030, 0xffffffff, 0x0000100a,
757 	0x8a14, 0xffffffff, 0x00000007,
758 	0x8b24, 0xffffffff, 0x00ff0fff,
759 	0x8b10, 0xffffffff, 0x00000000,
760 	0x28a4c, 0x06000000, 0x06000000,
761 	0x4d8, 0xffffffff, 0x00000100,
762 	0x913c, 0xffff000f, 0x0100000a,
763 	0x960c, 0xffffffff, 0x54763210,
764 	0x88c4, 0xffffffff, 0x000000c2,
765 	0x88d4, 0xffffffff, 0x00000010,
766 	0x8974, 0xffffffff, 0x00000000,
767 	0xc78, 0x00000080, 0x00000080,
768 	0x5e78, 0xffffffff, 0x001000f0,
769 	0xd02c, 0xffffffff, 0x08421000,
770 	0xa008, 0xffffffff, 0x00010000,
771 	0x8d00, 0xffffffff, 0x100e4848,
772 	0x8d04, 0xffffffff, 0x00164745,
773 	0x8c00, 0xffffffff, 0xe4000003,
774 	0x8cf0, 0x1fffffff, 0x08e00620,
775 	0x28350, 0xffffffff, 0x00000000,
776 	0x9508, 0xffffffff, 0x00000002
777 };
778 
779 static const u32 sumo_golden_registers[] =
780 {
781 	0x900c, 0x00ffffff, 0x0017071f,
782 	0x8c18, 0xffffffff, 0x10101060,
783 	0x8c1c, 0xffffffff, 0x00001010,
784 	0x8c30, 0x0000000f, 0x00000005,
785 	0x9688, 0x0000000f, 0x00000007
786 };
787 
788 static const u32 wrestler_golden_registers[] =
789 {
790 	0x5eb4, 0xffffffff, 0x00000002,
791 	0x5c4, 0xffffffff, 0x00000001,
792 	0x7030, 0xffffffff, 0x00000011,
793 	0x7c30, 0xffffffff, 0x00000011,
794 	0x6104, 0x01000300, 0x00000000,
795 	0x5bc0, 0x00300000, 0x00000000,
796 	0x918c, 0xffffffff, 0x00010006,
797 	0x91a8, 0xffffffff, 0x00010006,
798 	0x9150, 0xffffffff, 0x6e944040,
799 	0x917c, 0xffffffff, 0x00030002,
800 	0x9198, 0xffffffff, 0x00030002,
801 	0x915c, 0xffffffff, 0x00010000,
802 	0x3f90, 0xffff0000, 0xff000000,
803 	0x9178, 0xffffffff, 0x00070000,
804 	0x9194, 0xffffffff, 0x00070000,
805 	0x9148, 0xffff0000, 0xff000000,
806 	0x9190, 0xffffffff, 0x00090008,
807 	0x91ac, 0xffffffff, 0x00090008,
808 	0x3f94, 0xffff0000, 0xff000000,
809 	0x914c, 0xffff0000, 0xff000000,
810 	0x929c, 0xffffffff, 0x00000001,
811 	0x8a18, 0xffffffff, 0x00000100,
812 	0x8b28, 0xffffffff, 0x00000100,
813 	0x9144, 0xffffffff, 0x00000100,
814 	0x9b7c, 0xffffffff, 0x00000000,
815 	0x8030, 0xffffffff, 0x0000100a,
816 	0x8a14, 0xffffffff, 0x00000001,
817 	0x8b24, 0xffffffff, 0x00ff0fff,
818 	0x8b10, 0xffffffff, 0x00000000,
819 	0x28a4c, 0x06000000, 0x06000000,
820 	0x4d8, 0xffffffff, 0x00000100,
821 	0x913c, 0xffff000f, 0x0100000a,
822 	0x960c, 0xffffffff, 0x54763210,
823 	0x88c4, 0xffffffff, 0x000000c2,
824 	0x88d4, 0xffffffff, 0x00000010,
825 	0x8974, 0xffffffff, 0x00000000,
826 	0xc78, 0x00000080, 0x00000080,
827 	0x5e78, 0xffffffff, 0x001000f0,
828 	0xd02c, 0xffffffff, 0x08421000,
829 	0xa008, 0xffffffff, 0x00010000,
830 	0x8d00, 0xffffffff, 0x100e4848,
831 	0x8d04, 0xffffffff, 0x00164745,
832 	0x8c00, 0xffffffff, 0xe4000003,
833 	0x8cf0, 0x1fffffff, 0x08e00410,
834 	0x28350, 0xffffffff, 0x00000000,
835 	0x9508, 0xffffffff, 0x00000002,
836 	0x900c, 0xffffffff, 0x0017071f,
837 	0x8c18, 0xffffffff, 0x10101060,
838 	0x8c1c, 0xffffffff, 0x00001010
839 };
840 
841 static const u32 barts_golden_registers[] =
842 {
843 	0x5eb4, 0xffffffff, 0x00000002,
844 	0x5e78, 0x8f311ff1, 0x001000f0,
845 	0x3f90, 0xffff0000, 0xff000000,
846 	0x9148, 0xffff0000, 0xff000000,
847 	0x3f94, 0xffff0000, 0xff000000,
848 	0x914c, 0xffff0000, 0xff000000,
849 	0xc78, 0x00000080, 0x00000080,
850 	0xbd4, 0x70073777, 0x00010001,
851 	0xd02c, 0xbfffff1f, 0x08421000,
852 	0xd0b8, 0x03773777, 0x02011003,
853 	0x5bc0, 0x00200000, 0x50100000,
854 	0x98f8, 0x33773777, 0x02011003,
855 	0x98fc, 0xffffffff, 0x76543210,
856 	0x7030, 0x31000311, 0x00000011,
857 	0x2f48, 0x00000007, 0x02011003,
858 	0x6b28, 0x00000010, 0x00000012,
859 	0x7728, 0x00000010, 0x00000012,
860 	0x10328, 0x00000010, 0x00000012,
861 	0x10f28, 0x00000010, 0x00000012,
862 	0x11b28, 0x00000010, 0x00000012,
863 	0x12728, 0x00000010, 0x00000012,
864 	0x240c, 0x000007ff, 0x00000380,
865 	0x8a14, 0xf000001f, 0x00000007,
866 	0x8b24, 0x3fff3fff, 0x00ff0fff,
867 	0x8b10, 0x0000ff0f, 0x00000000,
868 	0x28a4c, 0x07ffffff, 0x06000000,
869 	0x10c, 0x00000001, 0x00010003,
870 	0xa02c, 0xffffffff, 0x0000009b,
871 	0x913c, 0x0000000f, 0x0100000a,
872 	0x8d00, 0xffff7f7f, 0x100e4848,
873 	0x8d04, 0x00ffffff, 0x00164745,
874 	0x8c00, 0xfffc0003, 0xe4000003,
875 	0x8c04, 0xf8ff00ff, 0x40600060,
876 	0x8c08, 0x00ff00ff, 0x001c001c,
877 	0x8cf0, 0x1fff1fff, 0x08e00620,
878 	0x8c20, 0x0fff0fff, 0x00800080,
879 	0x8c24, 0x0fff0fff, 0x00800080,
880 	0x8c18, 0xffffffff, 0x20202078,
881 	0x8c1c, 0x0000ffff, 0x00001010,
882 	0x28350, 0x00000f01, 0x00000000,
883 	0x9508, 0x3700001f, 0x00000002,
884 	0x960c, 0xffffffff, 0x54763210,
885 	0x88c4, 0x001f3ae3, 0x000000c2,
886 	0x88d4, 0x0000001f, 0x00000010,
887 	0x8974, 0xffffffff, 0x00000000
888 };
889 
890 static const u32 turks_golden_registers[] =
891 {
892 	0x5eb4, 0xffffffff, 0x00000002,
893 	0x5e78, 0x8f311ff1, 0x001000f0,
894 	0x8c8, 0x00003000, 0x00001070,
895 	0x8cc, 0x000fffff, 0x00040035,
896 	0x3f90, 0xffff0000, 0xfff00000,
897 	0x9148, 0xffff0000, 0xfff00000,
898 	0x3f94, 0xffff0000, 0xfff00000,
899 	0x914c, 0xffff0000, 0xfff00000,
900 	0xc78, 0x00000080, 0x00000080,
901 	0xbd4, 0x00073007, 0x00010002,
902 	0xd02c, 0xbfffff1f, 0x08421000,
903 	0xd0b8, 0x03773777, 0x02010002,
904 	0x5bc0, 0x00200000, 0x50100000,
905 	0x98f8, 0x33773777, 0x00010002,
906 	0x98fc, 0xffffffff, 0x33221100,
907 	0x7030, 0x31000311, 0x00000011,
908 	0x2f48, 0x33773777, 0x00010002,
909 	0x6b28, 0x00000010, 0x00000012,
910 	0x7728, 0x00000010, 0x00000012,
911 	0x10328, 0x00000010, 0x00000012,
912 	0x10f28, 0x00000010, 0x00000012,
913 	0x11b28, 0x00000010, 0x00000012,
914 	0x12728, 0x00000010, 0x00000012,
915 	0x240c, 0x000007ff, 0x00000380,
916 	0x8a14, 0xf000001f, 0x00000007,
917 	0x8b24, 0x3fff3fff, 0x00ff0fff,
918 	0x8b10, 0x0000ff0f, 0x00000000,
919 	0x28a4c, 0x07ffffff, 0x06000000,
920 	0x10c, 0x00000001, 0x00010003,
921 	0xa02c, 0xffffffff, 0x0000009b,
922 	0x913c, 0x0000000f, 0x0100000a,
923 	0x8d00, 0xffff7f7f, 0x100e4848,
924 	0x8d04, 0x00ffffff, 0x00164745,
925 	0x8c00, 0xfffc0003, 0xe4000003,
926 	0x8c04, 0xf8ff00ff, 0x40600060,
927 	0x8c08, 0x00ff00ff, 0x001c001c,
928 	0x8cf0, 0x1fff1fff, 0x08e00410,
929 	0x8c20, 0x0fff0fff, 0x00800080,
930 	0x8c24, 0x0fff0fff, 0x00800080,
931 	0x8c18, 0xffffffff, 0x20202078,
932 	0x8c1c, 0x0000ffff, 0x00001010,
933 	0x28350, 0x00000f01, 0x00000000,
934 	0x9508, 0x3700001f, 0x00000002,
935 	0x960c, 0xffffffff, 0x54763210,
936 	0x88c4, 0x001f3ae3, 0x000000c2,
937 	0x88d4, 0x0000001f, 0x00000010,
938 	0x8974, 0xffffffff, 0x00000000
939 };
940 
941 static const u32 caicos_golden_registers[] =
942 {
943 	0x5eb4, 0xffffffff, 0x00000002,
944 	0x5e78, 0x8f311ff1, 0x001000f0,
945 	0x8c8, 0x00003420, 0x00001450,
946 	0x8cc, 0x000fffff, 0x00040035,
947 	0x3f90, 0xffff0000, 0xfffc0000,
948 	0x9148, 0xffff0000, 0xfffc0000,
949 	0x3f94, 0xffff0000, 0xfffc0000,
950 	0x914c, 0xffff0000, 0xfffc0000,
951 	0xc78, 0x00000080, 0x00000080,
952 	0xbd4, 0x00073007, 0x00010001,
953 	0xd02c, 0xbfffff1f, 0x08421000,
954 	0xd0b8, 0x03773777, 0x02010001,
955 	0x5bc0, 0x00200000, 0x50100000,
956 	0x98f8, 0x33773777, 0x02010001,
957 	0x98fc, 0xffffffff, 0x33221100,
958 	0x7030, 0x31000311, 0x00000011,
959 	0x2f48, 0x33773777, 0x02010001,
960 	0x6b28, 0x00000010, 0x00000012,
961 	0x7728, 0x00000010, 0x00000012,
962 	0x10328, 0x00000010, 0x00000012,
963 	0x10f28, 0x00000010, 0x00000012,
964 	0x11b28, 0x00000010, 0x00000012,
965 	0x12728, 0x00000010, 0x00000012,
966 	0x240c, 0x000007ff, 0x00000380,
967 	0x8a14, 0xf000001f, 0x00000001,
968 	0x8b24, 0x3fff3fff, 0x00ff0fff,
969 	0x8b10, 0x0000ff0f, 0x00000000,
970 	0x28a4c, 0x07ffffff, 0x06000000,
971 	0x10c, 0x00000001, 0x00010003,
972 	0xa02c, 0xffffffff, 0x0000009b,
973 	0x913c, 0x0000000f, 0x0100000a,
974 	0x8d00, 0xffff7f7f, 0x100e4848,
975 	0x8d04, 0x00ffffff, 0x00164745,
976 	0x8c00, 0xfffc0003, 0xe4000003,
977 	0x8c04, 0xf8ff00ff, 0x40600060,
978 	0x8c08, 0x00ff00ff, 0x001c001c,
979 	0x8cf0, 0x1fff1fff, 0x08e00410,
980 	0x8c20, 0x0fff0fff, 0x00800080,
981 	0x8c24, 0x0fff0fff, 0x00800080,
982 	0x8c18, 0xffffffff, 0x20202078,
983 	0x8c1c, 0x0000ffff, 0x00001010,
984 	0x28350, 0x00000f01, 0x00000000,
985 	0x9508, 0x3700001f, 0x00000002,
986 	0x960c, 0xffffffff, 0x54763210,
987 	0x88c4, 0x001f3ae3, 0x000000c2,
988 	0x88d4, 0x0000001f, 0x00000010,
989 	0x8974, 0xffffffff, 0x00000000
990 };
991 
992 static void evergreen_init_golden_registers(struct radeon_device *rdev)
993 {
994 	switch (rdev->family) {
995 	case CHIP_CYPRESS:
996 	case CHIP_HEMLOCK:
997 		radeon_program_register_sequence(rdev,
998 						 evergreen_golden_registers,
999 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1000 		radeon_program_register_sequence(rdev,
1001 						 evergreen_golden_registers2,
1002 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1003 		radeon_program_register_sequence(rdev,
1004 						 cypress_mgcg_init,
1005 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1006 		break;
1007 	case CHIP_JUNIPER:
1008 		radeon_program_register_sequence(rdev,
1009 						 evergreen_golden_registers,
1010 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1011 		radeon_program_register_sequence(rdev,
1012 						 evergreen_golden_registers2,
1013 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1014 		radeon_program_register_sequence(rdev,
1015 						 juniper_mgcg_init,
1016 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1017 		break;
1018 	case CHIP_REDWOOD:
1019 		radeon_program_register_sequence(rdev,
1020 						 evergreen_golden_registers,
1021 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1022 		radeon_program_register_sequence(rdev,
1023 						 evergreen_golden_registers2,
1024 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1025 		radeon_program_register_sequence(rdev,
1026 						 redwood_mgcg_init,
1027 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1028 		break;
1029 	case CHIP_CEDAR:
1030 		radeon_program_register_sequence(rdev,
1031 						 cedar_golden_registers,
1032 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
1033 		radeon_program_register_sequence(rdev,
1034 						 evergreen_golden_registers2,
1035 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1036 		radeon_program_register_sequence(rdev,
1037 						 cedar_mgcg_init,
1038 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1039 		break;
1040 	case CHIP_PALM:
1041 		radeon_program_register_sequence(rdev,
1042 						 wrestler_golden_registers,
1043 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1044 		break;
1045 	case CHIP_SUMO:
1046 		radeon_program_register_sequence(rdev,
1047 						 supersumo_golden_registers,
1048 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1049 		break;
1050 	case CHIP_SUMO2:
1051 		radeon_program_register_sequence(rdev,
1052 						 supersumo_golden_registers,
1053 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1054 		radeon_program_register_sequence(rdev,
1055 						 sumo_golden_registers,
1056 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
1057 		break;
1058 	case CHIP_BARTS:
1059 		radeon_program_register_sequence(rdev,
1060 						 barts_golden_registers,
1061 						 (const u32)ARRAY_SIZE(barts_golden_registers));
1062 		break;
1063 	case CHIP_TURKS:
1064 		radeon_program_register_sequence(rdev,
1065 						 turks_golden_registers,
1066 						 (const u32)ARRAY_SIZE(turks_golden_registers));
1067 		break;
1068 	case CHIP_CAICOS:
1069 		radeon_program_register_sequence(rdev,
1070 						 caicos_golden_registers,
1071 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1072 		break;
1073 	default:
1074 		break;
1075 	}
1076 }
1077 
1078 /**
1079  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1080  *
1081  * @rdev: radeon_device pointer
1082  * @reg: register offset in bytes
1083  * @val: register value
1084  *
1085  * Returns 0 for success or -EINVAL for an invalid register
1086  *
1087  */
1088 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1089 					u32 reg, u32 *val)
1090 {
1091 	switch (reg) {
1092 	case GRBM_STATUS:
1093 	case GRBM_STATUS_SE0:
1094 	case GRBM_STATUS_SE1:
1095 	case SRBM_STATUS:
1096 	case SRBM_STATUS2:
1097 	case DMA_STATUS_REG:
1098 	case UVD_STATUS:
1099 		*val = RREG32(reg);
1100 		return 0;
1101 	default:
1102 		return -EINVAL;
1103 	}
1104 }
1105 
1106 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1107 			     unsigned *bankh, unsigned *mtaspect,
1108 			     unsigned *tile_split)
1109 {
1110 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1111 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1112 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1113 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1114 	switch (*bankw) {
1115 	default:
1116 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1117 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1118 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1119 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1120 	}
1121 	switch (*bankh) {
1122 	default:
1123 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1124 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1125 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1126 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1127 	}
1128 	switch (*mtaspect) {
1129 	default:
1130 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1131 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1132 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1133 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1134 	}
1135 }
1136 
1137 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1138 			      u32 cntl_reg, u32 status_reg)
1139 {
1140 	int r, i;
1141 	struct atom_clock_dividers dividers;
1142 
1143         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1144 					   clock, false, &dividers);
1145 	if (r)
1146 		return r;
1147 
1148 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1149 
1150 	for (i = 0; i < 100; i++) {
1151 		if (RREG32(status_reg) & DCLK_STATUS)
1152 			break;
1153 		mdelay(10);
1154 	}
1155 	if (i == 100)
1156 		return -ETIMEDOUT;
1157 
1158 	return 0;
1159 }
1160 
1161 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1162 {
1163 	int r = 0;
1164 	u32 cg_scratch = RREG32(CG_SCRATCH1);
1165 
1166 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1167 	if (r)
1168 		goto done;
1169 	cg_scratch &= 0xffff0000;
1170 	cg_scratch |= vclk / 100; /* Mhz */
1171 
1172 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1173 	if (r)
1174 		goto done;
1175 	cg_scratch &= 0x0000ffff;
1176 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1177 
1178 done:
1179 	WREG32(CG_SCRATCH1, cg_scratch);
1180 
1181 	return r;
1182 }
1183 
1184 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1185 {
1186 	/* start off with something large */
1187 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1188 	int r;
1189 
1190 	/* bypass vclk and dclk with bclk */
1191 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1192 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1193 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1194 
1195 	/* put PLL in bypass mode */
1196 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1197 
1198 	if (!vclk || !dclk) {
1199 		/* keep the Bypass mode, put PLL to sleep */
1200 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1201 		return 0;
1202 	}
1203 
1204 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1205 					  16384, 0x03FFFFFF, 0, 128, 5,
1206 					  &fb_div, &vclk_div, &dclk_div);
1207 	if (r)
1208 		return r;
1209 
1210 	/* set VCO_MODE to 1 */
1211 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1212 
1213 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1214 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1215 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1216 
1217 	/* deassert UPLL_RESET */
1218 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1219 
1220 	mdelay(1);
1221 
1222 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1223 	if (r)
1224 		return r;
1225 
1226 	/* assert UPLL_RESET again */
1227 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1228 
1229 	/* disable spread spectrum. */
1230 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1231 
1232 	/* set feedback divider */
1233 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1234 
1235 	/* set ref divider to 0 */
1236 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1237 
1238 	if (fb_div < 307200)
1239 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1240 	else
1241 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1242 
1243 	/* set PDIV_A and PDIV_B */
1244 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1245 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1246 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1247 
1248 	/* give the PLL some time to settle */
1249 	mdelay(15);
1250 
1251 	/* deassert PLL_RESET */
1252 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1253 
1254 	mdelay(15);
1255 
1256 	/* switch from bypass mode to normal mode */
1257 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1258 
1259 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1260 	if (r)
1261 		return r;
1262 
1263 	/* switch VCLK and DCLK selection */
1264 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1265 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1266 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1267 
1268 	mdelay(100);
1269 
1270 	return 0;
1271 }
1272 
1273 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1274 {
1275 	int readrq;
1276 	u16 v;
1277 
1278 	readrq = pcie_get_readrq(rdev->pdev);
1279 	v = ffs(readrq) - 8;
1280 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1281 	 * to avoid hangs or perfomance issues
1282 	 */
1283 	if ((v == 0) || (v == 6) || (v == 7))
1284 		pcie_set_readrq(rdev->pdev, 512);
1285 }
1286 
1287 void dce4_program_fmt(struct drm_encoder *encoder)
1288 {
1289 	struct drm_device *dev = encoder->dev;
1290 	struct radeon_device *rdev = dev->dev_private;
1291 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1292 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1293 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1294 	int bpc = 0;
1295 	u32 tmp = 0;
1296 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1297 
1298 	if (connector) {
1299 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1300 		bpc = radeon_get_monitor_bpc(connector);
1301 		dither = radeon_connector->dither;
1302 	}
1303 
1304 	/* LVDS/eDP FMT is set up by atom */
1305 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1306 		return;
1307 
1308 	/* not needed for analog */
1309 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1310 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1311 		return;
1312 
1313 	if (bpc == 0)
1314 		return;
1315 
1316 	switch (bpc) {
1317 	case 6:
1318 		if (dither == RADEON_FMT_DITHER_ENABLE)
1319 			/* XXX sort out optimal dither settings */
1320 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1321 				FMT_SPATIAL_DITHER_EN);
1322 		else
1323 			tmp |= FMT_TRUNCATE_EN;
1324 		break;
1325 	case 8:
1326 		if (dither == RADEON_FMT_DITHER_ENABLE)
1327 			/* XXX sort out optimal dither settings */
1328 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1329 				FMT_RGB_RANDOM_ENABLE |
1330 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1331 		else
1332 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1333 		break;
1334 	case 10:
1335 	default:
1336 		/* not needed */
1337 		break;
1338 	}
1339 
1340 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1341 }
1342 
1343 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1344 {
1345 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1346 		return true;
1347 	else
1348 		return false;
1349 }
1350 
1351 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1352 {
1353 	u32 pos1, pos2;
1354 
1355 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1356 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1357 
1358 	if (pos1 != pos2)
1359 		return true;
1360 	else
1361 		return false;
1362 }
1363 
1364 /**
1365  * dce4_wait_for_vblank - vblank wait asic callback.
1366  *
1367  * @rdev: radeon_device pointer
1368  * @crtc: crtc to wait for vblank on
1369  *
1370  * Wait for vblank on the requested crtc (evergreen+).
1371  */
1372 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1373 {
1374 	unsigned i = 0;
1375 
1376 	if (crtc >= rdev->num_crtc)
1377 		return;
1378 
1379 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1380 		return;
1381 
1382 	/* depending on when we hit vblank, we may be close to active; if so,
1383 	 * wait for another frame.
1384 	 */
1385 	while (dce4_is_in_vblank(rdev, crtc)) {
1386 		if (i++ % 100 == 0) {
1387 			if (!dce4_is_counter_moving(rdev, crtc))
1388 				break;
1389 		}
1390 	}
1391 
1392 	while (!dce4_is_in_vblank(rdev, crtc)) {
1393 		if (i++ % 100 == 0) {
1394 			if (!dce4_is_counter_moving(rdev, crtc))
1395 				break;
1396 		}
1397 	}
1398 }
1399 
1400 /**
1401  * evergreen_page_flip - pageflip callback.
1402  *
1403  * @rdev: radeon_device pointer
1404  * @crtc_id: crtc to cleanup pageflip on
1405  * @crtc_base: new address of the crtc (GPU MC address)
1406  *
1407  * Triggers the actual pageflip by updating the primary
1408  * surface base address (evergreen+).
1409  */
1410 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1411 {
1412 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1413 
1414 	/* update the scanout addresses */
1415 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1416 	       upper_32_bits(crtc_base));
1417 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1418 	       (u32)crtc_base);
1419 	/* post the write */
1420 	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1421 }
1422 
1423 /**
1424  * evergreen_page_flip_pending - check if page flip is still pending
1425  *
1426  * @rdev: radeon_device pointer
1427  * @crtc_id: crtc to check
1428  *
1429  * Returns the current update pending status.
1430  */
1431 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1432 {
1433 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1434 
1435 	/* Return current update_pending status: */
1436 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1437 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1438 }
1439 
1440 /* get temperature in millidegrees */
1441 int evergreen_get_temp(struct radeon_device *rdev)
1442 {
1443 	u32 temp, toffset;
1444 	int actual_temp = 0;
1445 
1446 	if (rdev->family == CHIP_JUNIPER) {
1447 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1448 			TOFFSET_SHIFT;
1449 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1450 			TS0_ADC_DOUT_SHIFT;
1451 
1452 		if (toffset & 0x100)
1453 			actual_temp = temp / 2 - (0x200 - toffset);
1454 		else
1455 			actual_temp = temp / 2 + toffset;
1456 
1457 		actual_temp = actual_temp * 1000;
1458 
1459 	} else {
1460 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1461 			ASIC_T_SHIFT;
1462 
1463 		if (temp & 0x400)
1464 			actual_temp = -256;
1465 		else if (temp & 0x200)
1466 			actual_temp = 255;
1467 		else if (temp & 0x100) {
1468 			actual_temp = temp & 0x1ff;
1469 			actual_temp |= ~0x1ff;
1470 		} else
1471 			actual_temp = temp & 0xff;
1472 
1473 		actual_temp = (actual_temp * 1000) / 2;
1474 	}
1475 
1476 	return actual_temp;
1477 }
1478 
1479 int sumo_get_temp(struct radeon_device *rdev)
1480 {
1481 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1482 	int actual_temp = temp - 49;
1483 
1484 	return actual_temp * 1000;
1485 }
1486 
1487 /**
1488  * sumo_pm_init_profile - Initialize power profiles callback.
1489  *
1490  * @rdev: radeon_device pointer
1491  *
1492  * Initialize the power states used in profile mode
1493  * (sumo, trinity, SI).
1494  * Used for profile mode only.
1495  */
1496 void sumo_pm_init_profile(struct radeon_device *rdev)
1497 {
1498 	int idx;
1499 
1500 	/* default */
1501 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1502 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1503 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1504 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1505 
1506 	/* low,mid sh/mh */
1507 	if (rdev->flags & RADEON_IS_MOBILITY)
1508 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1509 	else
1510 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1511 
1512 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1513 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1514 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1515 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1516 
1517 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1518 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1519 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1520 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1521 
1522 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1523 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1524 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1525 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1526 
1527 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1528 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1529 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1530 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1531 
1532 	/* high sh/mh */
1533 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1534 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1535 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1536 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1537 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1538 		rdev->pm.power_state[idx].num_clock_modes - 1;
1539 
1540 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1541 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1542 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1543 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1544 		rdev->pm.power_state[idx].num_clock_modes - 1;
1545 }
1546 
1547 /**
1548  * btc_pm_init_profile - Initialize power profiles callback.
1549  *
1550  * @rdev: radeon_device pointer
1551  *
1552  * Initialize the power states used in profile mode
1553  * (BTC, cayman).
1554  * Used for profile mode only.
1555  */
1556 void btc_pm_init_profile(struct radeon_device *rdev)
1557 {
1558 	int idx;
1559 
1560 	/* default */
1561 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1562 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1563 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1564 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1565 	/* starting with BTC, there is one state that is used for both
1566 	 * MH and SH.  Difference is that we always use the high clock index for
1567 	 * mclk.
1568 	 */
1569 	if (rdev->flags & RADEON_IS_MOBILITY)
1570 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1571 	else
1572 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1573 	/* low sh */
1574 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1575 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1576 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1577 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1578 	/* mid sh */
1579 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1580 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1581 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1582 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1583 	/* high sh */
1584 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1585 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1586 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1587 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1588 	/* low mh */
1589 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1590 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1591 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1592 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1593 	/* mid mh */
1594 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1595 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1596 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1597 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1598 	/* high mh */
1599 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1600 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1601 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1602 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1603 }
1604 
1605 /**
1606  * evergreen_pm_misc - set additional pm hw parameters callback.
1607  *
1608  * @rdev: radeon_device pointer
1609  *
1610  * Set non-clock parameters associated with a power state
1611  * (voltage, etc.) (evergreen+).
1612  */
1613 void evergreen_pm_misc(struct radeon_device *rdev)
1614 {
1615 	int req_ps_idx = rdev->pm.requested_power_state_index;
1616 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1617 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1618 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1619 
1620 	if (voltage->type == VOLTAGE_SW) {
1621 		/* 0xff0x are flags rather then an actual voltage */
1622 		if ((voltage->voltage & 0xff00) == 0xff00)
1623 			return;
1624 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1625 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1626 			rdev->pm.current_vddc = voltage->voltage;
1627 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1628 		}
1629 
1630 		/* starting with BTC, there is one state that is used for both
1631 		 * MH and SH.  Difference is that we always use the high clock index for
1632 		 * mclk and vddci.
1633 		 */
1634 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1635 		    (rdev->family >= CHIP_BARTS) &&
1636 		    rdev->pm.active_crtc_count &&
1637 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1638 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1639 			voltage = &rdev->pm.power_state[req_ps_idx].
1640 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1641 
1642 		/* 0xff0x are flags rather then an actual voltage */
1643 		if ((voltage->vddci & 0xff00) == 0xff00)
1644 			return;
1645 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1646 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1647 			rdev->pm.current_vddci = voltage->vddci;
1648 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1649 		}
1650 	}
1651 }
1652 
1653 /**
1654  * evergreen_pm_prepare - pre-power state change callback.
1655  *
1656  * @rdev: radeon_device pointer
1657  *
1658  * Prepare for a power state change (evergreen+).
1659  */
1660 void evergreen_pm_prepare(struct radeon_device *rdev)
1661 {
1662 	struct drm_device *ddev = rdev->ddev;
1663 	struct drm_crtc *crtc;
1664 	struct radeon_crtc *radeon_crtc;
1665 	u32 tmp;
1666 
1667 	/* disable any active CRTCs */
1668 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1669 		radeon_crtc = to_radeon_crtc(crtc);
1670 		if (radeon_crtc->enabled) {
1671 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1672 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1673 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1674 		}
1675 	}
1676 }
1677 
1678 /**
1679  * evergreen_pm_finish - post-power state change callback.
1680  *
1681  * @rdev: radeon_device pointer
1682  *
1683  * Clean up after a power state change (evergreen+).
1684  */
1685 void evergreen_pm_finish(struct radeon_device *rdev)
1686 {
1687 	struct drm_device *ddev = rdev->ddev;
1688 	struct drm_crtc *crtc;
1689 	struct radeon_crtc *radeon_crtc;
1690 	u32 tmp;
1691 
1692 	/* enable any active CRTCs */
1693 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1694 		radeon_crtc = to_radeon_crtc(crtc);
1695 		if (radeon_crtc->enabled) {
1696 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1697 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1698 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1699 		}
1700 	}
1701 }
1702 
1703 /**
1704  * evergreen_hpd_sense - hpd sense callback.
1705  *
1706  * @rdev: radeon_device pointer
1707  * @hpd: hpd (hotplug detect) pin
1708  *
1709  * Checks if a digital monitor is connected (evergreen+).
1710  * Returns true if connected, false if not connected.
1711  */
1712 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1713 {
1714 	bool connected = false;
1715 
1716 	switch (hpd) {
1717 	case RADEON_HPD_1:
1718 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1719 			connected = true;
1720 		break;
1721 	case RADEON_HPD_2:
1722 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1723 			connected = true;
1724 		break;
1725 	case RADEON_HPD_3:
1726 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1727 			connected = true;
1728 		break;
1729 	case RADEON_HPD_4:
1730 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1731 			connected = true;
1732 		break;
1733 	case RADEON_HPD_5:
1734 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1735 			connected = true;
1736 		break;
1737 	case RADEON_HPD_6:
1738 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1739 			connected = true;
1740 		break;
1741 	default:
1742 		break;
1743 	}
1744 
1745 	return connected;
1746 }
1747 
1748 /**
1749  * evergreen_hpd_set_polarity - hpd set polarity callback.
1750  *
1751  * @rdev: radeon_device pointer
1752  * @hpd: hpd (hotplug detect) pin
1753  *
1754  * Set the polarity of the hpd pin (evergreen+).
1755  */
1756 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1757 				enum radeon_hpd_id hpd)
1758 {
1759 	u32 tmp;
1760 	bool connected = evergreen_hpd_sense(rdev, hpd);
1761 
1762 	switch (hpd) {
1763 	case RADEON_HPD_1:
1764 		tmp = RREG32(DC_HPD1_INT_CONTROL);
1765 		if (connected)
1766 			tmp &= ~DC_HPDx_INT_POLARITY;
1767 		else
1768 			tmp |= DC_HPDx_INT_POLARITY;
1769 		WREG32(DC_HPD1_INT_CONTROL, tmp);
1770 		break;
1771 	case RADEON_HPD_2:
1772 		tmp = RREG32(DC_HPD2_INT_CONTROL);
1773 		if (connected)
1774 			tmp &= ~DC_HPDx_INT_POLARITY;
1775 		else
1776 			tmp |= DC_HPDx_INT_POLARITY;
1777 		WREG32(DC_HPD2_INT_CONTROL, tmp);
1778 		break;
1779 	case RADEON_HPD_3:
1780 		tmp = RREG32(DC_HPD3_INT_CONTROL);
1781 		if (connected)
1782 			tmp &= ~DC_HPDx_INT_POLARITY;
1783 		else
1784 			tmp |= DC_HPDx_INT_POLARITY;
1785 		WREG32(DC_HPD3_INT_CONTROL, tmp);
1786 		break;
1787 	case RADEON_HPD_4:
1788 		tmp = RREG32(DC_HPD4_INT_CONTROL);
1789 		if (connected)
1790 			tmp &= ~DC_HPDx_INT_POLARITY;
1791 		else
1792 			tmp |= DC_HPDx_INT_POLARITY;
1793 		WREG32(DC_HPD4_INT_CONTROL, tmp);
1794 		break;
1795 	case RADEON_HPD_5:
1796 		tmp = RREG32(DC_HPD5_INT_CONTROL);
1797 		if (connected)
1798 			tmp &= ~DC_HPDx_INT_POLARITY;
1799 		else
1800 			tmp |= DC_HPDx_INT_POLARITY;
1801 		WREG32(DC_HPD5_INT_CONTROL, tmp);
1802 			break;
1803 	case RADEON_HPD_6:
1804 		tmp = RREG32(DC_HPD6_INT_CONTROL);
1805 		if (connected)
1806 			tmp &= ~DC_HPDx_INT_POLARITY;
1807 		else
1808 			tmp |= DC_HPDx_INT_POLARITY;
1809 		WREG32(DC_HPD6_INT_CONTROL, tmp);
1810 		break;
1811 	default:
1812 		break;
1813 	}
1814 }
1815 
1816 /**
1817  * evergreen_hpd_init - hpd setup callback.
1818  *
1819  * @rdev: radeon_device pointer
1820  *
1821  * Setup the hpd pins used by the card (evergreen+).
1822  * Enable the pin, set the polarity, and enable the hpd interrupts.
1823  */
1824 void evergreen_hpd_init(struct radeon_device *rdev)
1825 {
1826 	struct drm_device *dev = rdev->ddev;
1827 	struct drm_connector *connector;
1828 	unsigned enabled = 0;
1829 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1830 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1831 
1832 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1833 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1834 
1835 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1836 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1837 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1838 			 * aux dp channel on imac and help (but not completely fix)
1839 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1840 			 * also avoid interrupt storms during dpms.
1841 			 */
1842 			continue;
1843 		}
1844 		switch (radeon_connector->hpd.hpd) {
1845 		case RADEON_HPD_1:
1846 			WREG32(DC_HPD1_CONTROL, tmp);
1847 			break;
1848 		case RADEON_HPD_2:
1849 			WREG32(DC_HPD2_CONTROL, tmp);
1850 			break;
1851 		case RADEON_HPD_3:
1852 			WREG32(DC_HPD3_CONTROL, tmp);
1853 			break;
1854 		case RADEON_HPD_4:
1855 			WREG32(DC_HPD4_CONTROL, tmp);
1856 			break;
1857 		case RADEON_HPD_5:
1858 			WREG32(DC_HPD5_CONTROL, tmp);
1859 			break;
1860 		case RADEON_HPD_6:
1861 			WREG32(DC_HPD6_CONTROL, tmp);
1862 			break;
1863 		default:
1864 			break;
1865 		}
1866 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1867 		enabled |= 1 << radeon_connector->hpd.hpd;
1868 	}
1869 	radeon_irq_kms_enable_hpd(rdev, enabled);
1870 }
1871 
1872 /**
1873  * evergreen_hpd_fini - hpd tear down callback.
1874  *
1875  * @rdev: radeon_device pointer
1876  *
1877  * Tear down the hpd pins used by the card (evergreen+).
1878  * Disable the hpd interrupts.
1879  */
1880 void evergreen_hpd_fini(struct radeon_device *rdev)
1881 {
1882 	struct drm_device *dev = rdev->ddev;
1883 	struct drm_connector *connector;
1884 	unsigned disabled = 0;
1885 
1886 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1887 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1888 		switch (radeon_connector->hpd.hpd) {
1889 		case RADEON_HPD_1:
1890 			WREG32(DC_HPD1_CONTROL, 0);
1891 			break;
1892 		case RADEON_HPD_2:
1893 			WREG32(DC_HPD2_CONTROL, 0);
1894 			break;
1895 		case RADEON_HPD_3:
1896 			WREG32(DC_HPD3_CONTROL, 0);
1897 			break;
1898 		case RADEON_HPD_4:
1899 			WREG32(DC_HPD4_CONTROL, 0);
1900 			break;
1901 		case RADEON_HPD_5:
1902 			WREG32(DC_HPD5_CONTROL, 0);
1903 			break;
1904 		case RADEON_HPD_6:
1905 			WREG32(DC_HPD6_CONTROL, 0);
1906 			break;
1907 		default:
1908 			break;
1909 		}
1910 		disabled |= 1 << radeon_connector->hpd.hpd;
1911 	}
1912 	radeon_irq_kms_disable_hpd(rdev, disabled);
1913 }
1914 
1915 /* watermark setup */
1916 
1917 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1918 					struct radeon_crtc *radeon_crtc,
1919 					struct drm_display_mode *mode,
1920 					struct drm_display_mode *other_mode)
1921 {
1922 	u32 tmp, buffer_alloc, i;
1923 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1924 	/*
1925 	 * Line Buffer Setup
1926 	 * There are 3 line buffers, each one shared by 2 display controllers.
1927 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1928 	 * the display controllers.  The paritioning is done via one of four
1929 	 * preset allocations specified in bits 2:0:
1930 	 * first display controller
1931 	 *  0 - first half of lb (3840 * 2)
1932 	 *  1 - first 3/4 of lb (5760 * 2)
1933 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1934 	 *  3 - first 1/4 of lb (1920 * 2)
1935 	 * second display controller
1936 	 *  4 - second half of lb (3840 * 2)
1937 	 *  5 - second 3/4 of lb (5760 * 2)
1938 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1939 	 *  7 - last 1/4 of lb (1920 * 2)
1940 	 */
1941 	/* this can get tricky if we have two large displays on a paired group
1942 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1943 	 * non-linked crtcs for maximum line buffer allocation.
1944 	 */
1945 	if (radeon_crtc->base.enabled && mode) {
1946 		if (other_mode) {
1947 			tmp = 0; /* 1/2 */
1948 			buffer_alloc = 1;
1949 		} else {
1950 			tmp = 2; /* whole */
1951 			buffer_alloc = 2;
1952 		}
1953 	} else {
1954 		tmp = 0;
1955 		buffer_alloc = 0;
1956 	}
1957 
1958 	/* second controller of the pair uses second half of the lb */
1959 	if (radeon_crtc->crtc_id % 2)
1960 		tmp += 4;
1961 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1962 
1963 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1964 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1965 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1966 		for (i = 0; i < rdev->usec_timeout; i++) {
1967 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1968 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1969 				break;
1970 			udelay(1);
1971 		}
1972 	}
1973 
1974 	if (radeon_crtc->base.enabled && mode) {
1975 		switch (tmp) {
1976 		case 0:
1977 		case 4:
1978 		default:
1979 			if (ASIC_IS_DCE5(rdev))
1980 				return 4096 * 2;
1981 			else
1982 				return 3840 * 2;
1983 		case 1:
1984 		case 5:
1985 			if (ASIC_IS_DCE5(rdev))
1986 				return 6144 * 2;
1987 			else
1988 				return 5760 * 2;
1989 		case 2:
1990 		case 6:
1991 			if (ASIC_IS_DCE5(rdev))
1992 				return 8192 * 2;
1993 			else
1994 				return 7680 * 2;
1995 		case 3:
1996 		case 7:
1997 			if (ASIC_IS_DCE5(rdev))
1998 				return 2048 * 2;
1999 			else
2000 				return 1920 * 2;
2001 		}
2002 	}
2003 
2004 	/* controller not enabled, so no lb used */
2005 	return 0;
2006 }
2007 
2008 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2009 {
2010 	u32 tmp = RREG32(MC_SHARED_CHMAP);
2011 
2012 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2013 	case 0:
2014 	default:
2015 		return 1;
2016 	case 1:
2017 		return 2;
2018 	case 2:
2019 		return 4;
2020 	case 3:
2021 		return 8;
2022 	}
2023 }
2024 
2025 struct evergreen_wm_params {
2026 	u32 dram_channels; /* number of dram channels */
2027 	u32 yclk;          /* bandwidth per dram data pin in kHz */
2028 	u32 sclk;          /* engine clock in kHz */
2029 	u32 disp_clk;      /* display clock in kHz */
2030 	u32 src_width;     /* viewport width */
2031 	u32 active_time;   /* active display time in ns */
2032 	u32 blank_time;    /* blank time in ns */
2033 	bool interlaced;    /* mode is interlaced */
2034 	fixed20_12 vsc;    /* vertical scale ratio */
2035 	u32 num_heads;     /* number of active crtcs */
2036 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2037 	u32 lb_size;       /* line buffer allocated to pipe */
2038 	u32 vtaps;         /* vertical scaler taps */
2039 };
2040 
2041 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2042 {
2043 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2044 	fixed20_12 dram_efficiency; /* 0.7 */
2045 	fixed20_12 yclk, dram_channels, bandwidth;
2046 	fixed20_12 a;
2047 
2048 	a.full = dfixed_const(1000);
2049 	yclk.full = dfixed_const(wm->yclk);
2050 	yclk.full = dfixed_div(yclk, a);
2051 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2052 	a.full = dfixed_const(10);
2053 	dram_efficiency.full = dfixed_const(7);
2054 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
2055 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2056 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2057 
2058 	return dfixed_trunc(bandwidth);
2059 }
2060 
2061 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2062 {
2063 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2064 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2065 	fixed20_12 yclk, dram_channels, bandwidth;
2066 	fixed20_12 a;
2067 
2068 	a.full = dfixed_const(1000);
2069 	yclk.full = dfixed_const(wm->yclk);
2070 	yclk.full = dfixed_div(yclk, a);
2071 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2072 	a.full = dfixed_const(10);
2073 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2074 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2075 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2076 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2077 
2078 	return dfixed_trunc(bandwidth);
2079 }
2080 
2081 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2082 {
2083 	/* Calculate the display Data return Bandwidth */
2084 	fixed20_12 return_efficiency; /* 0.8 */
2085 	fixed20_12 sclk, bandwidth;
2086 	fixed20_12 a;
2087 
2088 	a.full = dfixed_const(1000);
2089 	sclk.full = dfixed_const(wm->sclk);
2090 	sclk.full = dfixed_div(sclk, a);
2091 	a.full = dfixed_const(10);
2092 	return_efficiency.full = dfixed_const(8);
2093 	return_efficiency.full = dfixed_div(return_efficiency, a);
2094 	a.full = dfixed_const(32);
2095 	bandwidth.full = dfixed_mul(a, sclk);
2096 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2097 
2098 	return dfixed_trunc(bandwidth);
2099 }
2100 
2101 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2102 {
2103 	/* Calculate the DMIF Request Bandwidth */
2104 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2105 	fixed20_12 disp_clk, bandwidth;
2106 	fixed20_12 a;
2107 
2108 	a.full = dfixed_const(1000);
2109 	disp_clk.full = dfixed_const(wm->disp_clk);
2110 	disp_clk.full = dfixed_div(disp_clk, a);
2111 	a.full = dfixed_const(10);
2112 	disp_clk_request_efficiency.full = dfixed_const(8);
2113 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2114 	a.full = dfixed_const(32);
2115 	bandwidth.full = dfixed_mul(a, disp_clk);
2116 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2117 
2118 	return dfixed_trunc(bandwidth);
2119 }
2120 
2121 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2122 {
2123 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2124 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2125 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2126 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2127 
2128 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2129 }
2130 
2131 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2132 {
2133 	/* Calculate the display mode Average Bandwidth
2134 	 * DisplayMode should contain the source and destination dimensions,
2135 	 * timing, etc.
2136 	 */
2137 	fixed20_12 bpp;
2138 	fixed20_12 line_time;
2139 	fixed20_12 src_width;
2140 	fixed20_12 bandwidth;
2141 	fixed20_12 a;
2142 
2143 	a.full = dfixed_const(1000);
2144 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2145 	line_time.full = dfixed_div(line_time, a);
2146 	bpp.full = dfixed_const(wm->bytes_per_pixel);
2147 	src_width.full = dfixed_const(wm->src_width);
2148 	bandwidth.full = dfixed_mul(src_width, bpp);
2149 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2150 	bandwidth.full = dfixed_div(bandwidth, line_time);
2151 
2152 	return dfixed_trunc(bandwidth);
2153 }
2154 
2155 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2156 {
2157 	/* First calcualte the latency in ns */
2158 	u32 mc_latency = 2000; /* 2000 ns. */
2159 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2160 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2161 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2162 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2163 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2164 		(wm->num_heads * cursor_line_pair_return_time);
2165 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2166 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2167 	fixed20_12 a, b, c;
2168 
2169 	if (wm->num_heads == 0)
2170 		return 0;
2171 
2172 	a.full = dfixed_const(2);
2173 	b.full = dfixed_const(1);
2174 	if ((wm->vsc.full > a.full) ||
2175 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2176 	    (wm->vtaps >= 5) ||
2177 	    ((wm->vsc.full >= a.full) && wm->interlaced))
2178 		max_src_lines_per_dst_line = 4;
2179 	else
2180 		max_src_lines_per_dst_line = 2;
2181 
2182 	a.full = dfixed_const(available_bandwidth);
2183 	b.full = dfixed_const(wm->num_heads);
2184 	a.full = dfixed_div(a, b);
2185 
2186 	b.full = dfixed_const(1000);
2187 	c.full = dfixed_const(wm->disp_clk);
2188 	b.full = dfixed_div(c, b);
2189 	c.full = dfixed_const(wm->bytes_per_pixel);
2190 	b.full = dfixed_mul(b, c);
2191 
2192 	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2193 
2194 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2195 	b.full = dfixed_const(1000);
2196 	c.full = dfixed_const(lb_fill_bw);
2197 	b.full = dfixed_div(c, b);
2198 	a.full = dfixed_div(a, b);
2199 	line_fill_time = dfixed_trunc(a);
2200 
2201 	if (line_fill_time < wm->active_time)
2202 		return latency;
2203 	else
2204 		return latency + (line_fill_time - wm->active_time);
2205 
2206 }
2207 
2208 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2209 {
2210 	if (evergreen_average_bandwidth(wm) <=
2211 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2212 		return true;
2213 	else
2214 		return false;
2215 };
2216 
2217 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2218 {
2219 	if (evergreen_average_bandwidth(wm) <=
2220 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2221 		return true;
2222 	else
2223 		return false;
2224 };
2225 
2226 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2227 {
2228 	u32 lb_partitions = wm->lb_size / wm->src_width;
2229 	u32 line_time = wm->active_time + wm->blank_time;
2230 	u32 latency_tolerant_lines;
2231 	u32 latency_hiding;
2232 	fixed20_12 a;
2233 
2234 	a.full = dfixed_const(1);
2235 	if (wm->vsc.full > a.full)
2236 		latency_tolerant_lines = 1;
2237 	else {
2238 		if (lb_partitions <= (wm->vtaps + 1))
2239 			latency_tolerant_lines = 1;
2240 		else
2241 			latency_tolerant_lines = 2;
2242 	}
2243 
2244 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2245 
2246 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2247 		return true;
2248 	else
2249 		return false;
2250 }
2251 
2252 static void evergreen_program_watermarks(struct radeon_device *rdev,
2253 					 struct radeon_crtc *radeon_crtc,
2254 					 u32 lb_size, u32 num_heads)
2255 {
2256 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2257 	struct evergreen_wm_params wm_low, wm_high;
2258 	u32 dram_channels;
2259 	u32 pixel_period;
2260 	u32 line_time = 0;
2261 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2262 	u32 priority_a_mark = 0, priority_b_mark = 0;
2263 	u32 priority_a_cnt = PRIORITY_OFF;
2264 	u32 priority_b_cnt = PRIORITY_OFF;
2265 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2266 	u32 tmp, arb_control3;
2267 	fixed20_12 a, b, c;
2268 
2269 	if (radeon_crtc->base.enabled && num_heads && mode) {
2270 		pixel_period = 1000000 / (u32)mode->clock;
2271 		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2272 		priority_a_cnt = 0;
2273 		priority_b_cnt = 0;
2274 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2275 
2276 		/* watermark for high clocks */
2277 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2278 			wm_high.yclk =
2279 				radeon_dpm_get_mclk(rdev, false) * 10;
2280 			wm_high.sclk =
2281 				radeon_dpm_get_sclk(rdev, false) * 10;
2282 		} else {
2283 			wm_high.yclk = rdev->pm.current_mclk * 10;
2284 			wm_high.sclk = rdev->pm.current_sclk * 10;
2285 		}
2286 
2287 		wm_high.disp_clk = mode->clock;
2288 		wm_high.src_width = mode->crtc_hdisplay;
2289 		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2290 		wm_high.blank_time = line_time - wm_high.active_time;
2291 		wm_high.interlaced = false;
2292 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2293 			wm_high.interlaced = true;
2294 		wm_high.vsc = radeon_crtc->vsc;
2295 		wm_high.vtaps = 1;
2296 		if (radeon_crtc->rmx_type != RMX_OFF)
2297 			wm_high.vtaps = 2;
2298 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2299 		wm_high.lb_size = lb_size;
2300 		wm_high.dram_channels = dram_channels;
2301 		wm_high.num_heads = num_heads;
2302 
2303 		/* watermark for low clocks */
2304 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2305 			wm_low.yclk =
2306 				radeon_dpm_get_mclk(rdev, true) * 10;
2307 			wm_low.sclk =
2308 				radeon_dpm_get_sclk(rdev, true) * 10;
2309 		} else {
2310 			wm_low.yclk = rdev->pm.current_mclk * 10;
2311 			wm_low.sclk = rdev->pm.current_sclk * 10;
2312 		}
2313 
2314 		wm_low.disp_clk = mode->clock;
2315 		wm_low.src_width = mode->crtc_hdisplay;
2316 		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2317 		wm_low.blank_time = line_time - wm_low.active_time;
2318 		wm_low.interlaced = false;
2319 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2320 			wm_low.interlaced = true;
2321 		wm_low.vsc = radeon_crtc->vsc;
2322 		wm_low.vtaps = 1;
2323 		if (radeon_crtc->rmx_type != RMX_OFF)
2324 			wm_low.vtaps = 2;
2325 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2326 		wm_low.lb_size = lb_size;
2327 		wm_low.dram_channels = dram_channels;
2328 		wm_low.num_heads = num_heads;
2329 
2330 		/* set for high clocks */
2331 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2332 		/* set for low clocks */
2333 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2334 
2335 		/* possibly force display priority to high */
2336 		/* should really do this at mode validation time... */
2337 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2338 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2339 		    !evergreen_check_latency_hiding(&wm_high) ||
2340 		    (rdev->disp_priority == 2)) {
2341 			DRM_DEBUG_KMS("force priority a to high\n");
2342 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2343 		}
2344 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2345 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2346 		    !evergreen_check_latency_hiding(&wm_low) ||
2347 		    (rdev->disp_priority == 2)) {
2348 			DRM_DEBUG_KMS("force priority b to high\n");
2349 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2350 		}
2351 
2352 		a.full = dfixed_const(1000);
2353 		b.full = dfixed_const(mode->clock);
2354 		b.full = dfixed_div(b, a);
2355 		c.full = dfixed_const(latency_watermark_a);
2356 		c.full = dfixed_mul(c, b);
2357 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2358 		c.full = dfixed_div(c, a);
2359 		a.full = dfixed_const(16);
2360 		c.full = dfixed_div(c, a);
2361 		priority_a_mark = dfixed_trunc(c);
2362 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2363 
2364 		a.full = dfixed_const(1000);
2365 		b.full = dfixed_const(mode->clock);
2366 		b.full = dfixed_div(b, a);
2367 		c.full = dfixed_const(latency_watermark_b);
2368 		c.full = dfixed_mul(c, b);
2369 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2370 		c.full = dfixed_div(c, a);
2371 		a.full = dfixed_const(16);
2372 		c.full = dfixed_div(c, a);
2373 		priority_b_mark = dfixed_trunc(c);
2374 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2375 	}
2376 
2377 	/* select wm A */
2378 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2379 	tmp = arb_control3;
2380 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2381 	tmp |= LATENCY_WATERMARK_MASK(1);
2382 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2383 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2384 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2385 		LATENCY_HIGH_WATERMARK(line_time)));
2386 	/* select wm B */
2387 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2388 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2389 	tmp |= LATENCY_WATERMARK_MASK(2);
2390 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2391 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2392 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2393 		LATENCY_HIGH_WATERMARK(line_time)));
2394 	/* restore original selection */
2395 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2396 
2397 	/* write the priority marks */
2398 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2399 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2400 
2401 	/* save values for DPM */
2402 	radeon_crtc->line_time = line_time;
2403 	radeon_crtc->wm_high = latency_watermark_a;
2404 	radeon_crtc->wm_low = latency_watermark_b;
2405 }
2406 
2407 /**
2408  * evergreen_bandwidth_update - update display watermarks callback.
2409  *
2410  * @rdev: radeon_device pointer
2411  *
2412  * Update the display watermarks based on the requested mode(s)
2413  * (evergreen+).
2414  */
2415 void evergreen_bandwidth_update(struct radeon_device *rdev)
2416 {
2417 	struct drm_display_mode *mode0 = NULL;
2418 	struct drm_display_mode *mode1 = NULL;
2419 	u32 num_heads = 0, lb_size;
2420 	int i;
2421 
2422 	if (!rdev->mode_info.mode_config_initialized)
2423 		return;
2424 
2425 	radeon_update_display_priority(rdev);
2426 
2427 	for (i = 0; i < rdev->num_crtc; i++) {
2428 		if (rdev->mode_info.crtcs[i]->base.enabled)
2429 			num_heads++;
2430 	}
2431 	for (i = 0; i < rdev->num_crtc; i += 2) {
2432 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2433 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2434 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2435 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2436 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2437 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2438 	}
2439 }
2440 
2441 /**
2442  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2443  *
2444  * @rdev: radeon_device pointer
2445  *
2446  * Wait for the MC (memory controller) to be idle.
2447  * (evergreen+).
2448  * Returns 0 if the MC is idle, -1 if not.
2449  */
2450 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2451 {
2452 	unsigned i;
2453 	u32 tmp;
2454 
2455 	for (i = 0; i < rdev->usec_timeout; i++) {
2456 		/* read MC_STATUS */
2457 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2458 		if (!tmp)
2459 			return 0;
2460 		udelay(1);
2461 	}
2462 	return -1;
2463 }
2464 
2465 /*
2466  * GART
2467  */
2468 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2469 {
2470 	unsigned i;
2471 	u32 tmp;
2472 
2473 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2474 
2475 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2476 	for (i = 0; i < rdev->usec_timeout; i++) {
2477 		/* read MC_STATUS */
2478 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2479 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2480 		if (tmp == 2) {
2481 			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2482 			return;
2483 		}
2484 		if (tmp) {
2485 			return;
2486 		}
2487 		udelay(1);
2488 	}
2489 }
2490 
2491 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2492 {
2493 	u32 tmp;
2494 	int r;
2495 
2496 	if (rdev->gart.robj == NULL) {
2497 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2498 		return -EINVAL;
2499 	}
2500 	r = radeon_gart_table_vram_pin(rdev);
2501 	if (r)
2502 		return r;
2503 	/* Setup L2 cache */
2504 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2505 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2506 				EFFECTIVE_L2_QUEUE_SIZE(7));
2507 	WREG32(VM_L2_CNTL2, 0);
2508 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2509 	/* Setup TLB control */
2510 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2511 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2512 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2513 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2514 	if (rdev->flags & RADEON_IS_IGP) {
2515 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2516 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2517 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2518 	} else {
2519 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2520 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2521 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2522 		if ((rdev->family == CHIP_JUNIPER) ||
2523 		    (rdev->family == CHIP_CYPRESS) ||
2524 		    (rdev->family == CHIP_HEMLOCK) ||
2525 		    (rdev->family == CHIP_BARTS))
2526 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2527 	}
2528 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2529 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2530 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2531 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2532 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2533 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2534 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2535 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2536 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2537 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2538 			(u32)(rdev->dummy_page.addr >> 12));
2539 	WREG32(VM_CONTEXT1_CNTL, 0);
2540 
2541 	evergreen_pcie_gart_tlb_flush(rdev);
2542 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2543 		 (unsigned)(rdev->mc.gtt_size >> 20),
2544 		 (unsigned long long)rdev->gart.table_addr);
2545 	rdev->gart.ready = true;
2546 	return 0;
2547 }
2548 
2549 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2550 {
2551 	u32 tmp;
2552 
2553 	/* Disable all tables */
2554 	WREG32(VM_CONTEXT0_CNTL, 0);
2555 	WREG32(VM_CONTEXT1_CNTL, 0);
2556 
2557 	/* Setup L2 cache */
2558 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2559 				EFFECTIVE_L2_QUEUE_SIZE(7));
2560 	WREG32(VM_L2_CNTL2, 0);
2561 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2562 	/* Setup TLB control */
2563 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2564 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2565 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2566 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2567 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2568 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2569 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2570 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2571 	radeon_gart_table_vram_unpin(rdev);
2572 }
2573 
2574 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2575 {
2576 	evergreen_pcie_gart_disable(rdev);
2577 	radeon_gart_table_vram_free(rdev);
2578 	radeon_gart_fini(rdev);
2579 }
2580 
2581 
2582 static void evergreen_agp_enable(struct radeon_device *rdev)
2583 {
2584 	u32 tmp;
2585 
2586 	/* Setup L2 cache */
2587 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2588 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2589 				EFFECTIVE_L2_QUEUE_SIZE(7));
2590 	WREG32(VM_L2_CNTL2, 0);
2591 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2592 	/* Setup TLB control */
2593 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2594 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2595 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2596 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2597 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2598 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2599 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2600 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2601 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2602 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2603 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2604 	WREG32(VM_CONTEXT0_CNTL, 0);
2605 	WREG32(VM_CONTEXT1_CNTL, 0);
2606 }
2607 
2608 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2609 {
2610 	u32 crtc_enabled, tmp, frame_count, blackout;
2611 	int i, j;
2612 
2613 	if (!ASIC_IS_NODCE(rdev)) {
2614 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2615 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2616 
2617 		/* disable VGA render */
2618 		WREG32(VGA_RENDER_CONTROL, 0);
2619 	}
2620 	/* blank the display controllers */
2621 	for (i = 0; i < rdev->num_crtc; i++) {
2622 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2623 		if (crtc_enabled) {
2624 			save->crtc_enabled[i] = true;
2625 			if (ASIC_IS_DCE6(rdev)) {
2626 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2627 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2628 					radeon_wait_for_vblank(rdev, i);
2629 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2630 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2631 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2632 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2633 				}
2634 			} else {
2635 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2636 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2637 					radeon_wait_for_vblank(rdev, i);
2638 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2639 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2640 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2641 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2642 				}
2643 			}
2644 			/* wait for the next frame */
2645 			frame_count = radeon_get_vblank_counter(rdev, i);
2646 			for (j = 0; j < rdev->usec_timeout; j++) {
2647 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2648 					break;
2649 				udelay(1);
2650 			}
2651 
2652 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2653 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2654 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2655 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2656 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2657 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2658 			save->crtc_enabled[i] = false;
2659 			/* ***** */
2660 		} else {
2661 			save->crtc_enabled[i] = false;
2662 		}
2663 	}
2664 
2665 	radeon_mc_wait_for_idle(rdev);
2666 
2667 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2668 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2669 		/* Block CPU access */
2670 		WREG32(BIF_FB_EN, 0);
2671 		/* blackout the MC */
2672 		blackout &= ~BLACKOUT_MODE_MASK;
2673 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2674 	}
2675 	/* wait for the MC to settle */
2676 	udelay(100);
2677 
2678 	/* lock double buffered regs */
2679 	for (i = 0; i < rdev->num_crtc; i++) {
2680 		if (save->crtc_enabled[i]) {
2681 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2682 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2683 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2684 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2685 			}
2686 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2687 			if (!(tmp & 1)) {
2688 				tmp |= 1;
2689 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2690 			}
2691 		}
2692 	}
2693 }
2694 
2695 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2696 {
2697 	u32 tmp, frame_count;
2698 	int i, j;
2699 
2700 	/* update crtc base addresses */
2701 	for (i = 0; i < rdev->num_crtc; i++) {
2702 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2703 		       upper_32_bits(rdev->mc.vram_start));
2704 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2705 		       upper_32_bits(rdev->mc.vram_start));
2706 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2707 		       (u32)rdev->mc.vram_start);
2708 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2709 		       (u32)rdev->mc.vram_start);
2710 	}
2711 
2712 	if (!ASIC_IS_NODCE(rdev)) {
2713 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2714 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2715 	}
2716 
2717 	/* unlock regs and wait for update */
2718 	for (i = 0; i < rdev->num_crtc; i++) {
2719 		if (save->crtc_enabled[i]) {
2720 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2721 			if ((tmp & 0x7) != 3) {
2722 				tmp &= ~0x7;
2723 				tmp |= 0x3;
2724 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2725 			}
2726 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2727 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2728 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2729 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2730 			}
2731 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2732 			if (tmp & 1) {
2733 				tmp &= ~1;
2734 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2735 			}
2736 			for (j = 0; j < rdev->usec_timeout; j++) {
2737 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2738 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2739 					break;
2740 				udelay(1);
2741 			}
2742 		}
2743 	}
2744 
2745 	/* unblackout the MC */
2746 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2747 	tmp &= ~BLACKOUT_MODE_MASK;
2748 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2749 	/* allow CPU access */
2750 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2751 
2752 	for (i = 0; i < rdev->num_crtc; i++) {
2753 		if (save->crtc_enabled[i]) {
2754 			if (ASIC_IS_DCE6(rdev)) {
2755 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2756 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2757 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2758 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2759 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2760 			} else {
2761 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2762 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2763 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2764 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2765 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2766 			}
2767 			/* wait for the next frame */
2768 			frame_count = radeon_get_vblank_counter(rdev, i);
2769 			for (j = 0; j < rdev->usec_timeout; j++) {
2770 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2771 					break;
2772 				udelay(1);
2773 			}
2774 		}
2775 	}
2776 	if (!ASIC_IS_NODCE(rdev)) {
2777 		/* Unlock vga access */
2778 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2779 		mdelay(1);
2780 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2781 	}
2782 }
2783 
2784 void evergreen_mc_program(struct radeon_device *rdev)
2785 {
2786 	struct evergreen_mc_save save;
2787 	u32 tmp;
2788 	int i, j;
2789 
2790 	/* Initialize HDP */
2791 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2792 		WREG32((0x2c14 + j), 0x00000000);
2793 		WREG32((0x2c18 + j), 0x00000000);
2794 		WREG32((0x2c1c + j), 0x00000000);
2795 		WREG32((0x2c20 + j), 0x00000000);
2796 		WREG32((0x2c24 + j), 0x00000000);
2797 	}
2798 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2799 
2800 	evergreen_mc_stop(rdev, &save);
2801 	if (evergreen_mc_wait_for_idle(rdev)) {
2802 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2803 	}
2804 	/* Lockout access through VGA aperture*/
2805 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2806 	/* Update configuration */
2807 	if (rdev->flags & RADEON_IS_AGP) {
2808 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2809 			/* VRAM before AGP */
2810 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2811 				rdev->mc.vram_start >> 12);
2812 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2813 				rdev->mc.gtt_end >> 12);
2814 		} else {
2815 			/* VRAM after AGP */
2816 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2817 				rdev->mc.gtt_start >> 12);
2818 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2819 				rdev->mc.vram_end >> 12);
2820 		}
2821 	} else {
2822 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2823 			rdev->mc.vram_start >> 12);
2824 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2825 			rdev->mc.vram_end >> 12);
2826 	}
2827 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2828 	/* llano/ontario only */
2829 	if ((rdev->family == CHIP_PALM) ||
2830 	    (rdev->family == CHIP_SUMO) ||
2831 	    (rdev->family == CHIP_SUMO2)) {
2832 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2833 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2834 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2835 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2836 	}
2837 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2838 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2839 	WREG32(MC_VM_FB_LOCATION, tmp);
2840 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2841 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2842 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2843 	if (rdev->flags & RADEON_IS_AGP) {
2844 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2845 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2846 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2847 	} else {
2848 		WREG32(MC_VM_AGP_BASE, 0);
2849 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2850 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2851 	}
2852 	if (evergreen_mc_wait_for_idle(rdev)) {
2853 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2854 	}
2855 	evergreen_mc_resume(rdev, &save);
2856 	/* we need to own VRAM, so turn off the VGA renderer here
2857 	 * to stop it overwriting our objects */
2858 	rv515_vga_render_disable(rdev);
2859 }
2860 
2861 /*
2862  * CP.
2863  */
2864 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2865 {
2866 	struct radeon_ring *ring = &rdev->ring[ib->ring];
2867 	u32 next_rptr;
2868 
2869 	/* set to DX10/11 mode */
2870 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2871 	radeon_ring_write(ring, 1);
2872 
2873 	if (ring->rptr_save_reg) {
2874 		next_rptr = ring->wptr + 3 + 4;
2875 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2876 		radeon_ring_write(ring, ((ring->rptr_save_reg -
2877 					  PACKET3_SET_CONFIG_REG_START) >> 2));
2878 		radeon_ring_write(ring, next_rptr);
2879 	} else if (rdev->wb.enabled) {
2880 		next_rptr = ring->wptr + 5 + 4;
2881 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2882 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2883 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2884 		radeon_ring_write(ring, next_rptr);
2885 		radeon_ring_write(ring, 0);
2886 	}
2887 
2888 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2889 	radeon_ring_write(ring,
2890 #ifdef __BIG_ENDIAN
2891 			  (2 << 0) |
2892 #endif
2893 			  (ib->gpu_addr & 0xFFFFFFFC));
2894 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2895 	radeon_ring_write(ring, ib->length_dw);
2896 }
2897 
2898 
2899 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2900 {
2901 	const __be32 *fw_data;
2902 	int i;
2903 
2904 	if (!rdev->me_fw || !rdev->pfp_fw)
2905 		return -EINVAL;
2906 
2907 	r700_cp_stop(rdev);
2908 	WREG32(CP_RB_CNTL,
2909 #ifdef __BIG_ENDIAN
2910 	       BUF_SWAP_32BIT |
2911 #endif
2912 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2913 
2914 	fw_data = (const __be32 *)rdev->pfp_fw->data;
2915 	WREG32(CP_PFP_UCODE_ADDR, 0);
2916 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2917 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2918 	WREG32(CP_PFP_UCODE_ADDR, 0);
2919 
2920 	fw_data = (const __be32 *)rdev->me_fw->data;
2921 	WREG32(CP_ME_RAM_WADDR, 0);
2922 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2923 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2924 
2925 	WREG32(CP_PFP_UCODE_ADDR, 0);
2926 	WREG32(CP_ME_RAM_WADDR, 0);
2927 	WREG32(CP_ME_RAM_RADDR, 0);
2928 	return 0;
2929 }
2930 
2931 static int evergreen_cp_start(struct radeon_device *rdev)
2932 {
2933 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2934 	int r, i;
2935 	uint32_t cp_me;
2936 
2937 	r = radeon_ring_lock(rdev, ring, 7);
2938 	if (r) {
2939 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2940 		return r;
2941 	}
2942 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2943 	radeon_ring_write(ring, 0x1);
2944 	radeon_ring_write(ring, 0x0);
2945 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2946 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2947 	radeon_ring_write(ring, 0);
2948 	radeon_ring_write(ring, 0);
2949 	radeon_ring_unlock_commit(rdev, ring, false);
2950 
2951 	cp_me = 0xff;
2952 	WREG32(CP_ME_CNTL, cp_me);
2953 
2954 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2955 	if (r) {
2956 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2957 		return r;
2958 	}
2959 
2960 	/* setup clear context state */
2961 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2962 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2963 
2964 	for (i = 0; i < evergreen_default_size; i++)
2965 		radeon_ring_write(ring, evergreen_default_state[i]);
2966 
2967 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2968 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2969 
2970 	/* set clear context state */
2971 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2972 	radeon_ring_write(ring, 0);
2973 
2974 	/* SQ_VTX_BASE_VTX_LOC */
2975 	radeon_ring_write(ring, 0xc0026f00);
2976 	radeon_ring_write(ring, 0x00000000);
2977 	radeon_ring_write(ring, 0x00000000);
2978 	radeon_ring_write(ring, 0x00000000);
2979 
2980 	/* Clear consts */
2981 	radeon_ring_write(ring, 0xc0036f00);
2982 	radeon_ring_write(ring, 0x00000bc4);
2983 	radeon_ring_write(ring, 0xffffffff);
2984 	radeon_ring_write(ring, 0xffffffff);
2985 	radeon_ring_write(ring, 0xffffffff);
2986 
2987 	radeon_ring_write(ring, 0xc0026900);
2988 	radeon_ring_write(ring, 0x00000316);
2989 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2990 	radeon_ring_write(ring, 0x00000010); /*  */
2991 
2992 	radeon_ring_unlock_commit(rdev, ring, false);
2993 
2994 	return 0;
2995 }
2996 
2997 static int evergreen_cp_resume(struct radeon_device *rdev)
2998 {
2999 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3000 	u32 tmp;
3001 	u32 rb_bufsz;
3002 	int r;
3003 
3004 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3005 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3006 				 SOFT_RESET_PA |
3007 				 SOFT_RESET_SH |
3008 				 SOFT_RESET_VGT |
3009 				 SOFT_RESET_SPI |
3010 				 SOFT_RESET_SX));
3011 	RREG32(GRBM_SOFT_RESET);
3012 	mdelay(15);
3013 	WREG32(GRBM_SOFT_RESET, 0);
3014 	RREG32(GRBM_SOFT_RESET);
3015 
3016 	/* Set ring buffer size */
3017 	rb_bufsz = order_base_2(ring->ring_size / 8);
3018 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3019 #ifdef __BIG_ENDIAN
3020 	tmp |= BUF_SWAP_32BIT;
3021 #endif
3022 	WREG32(CP_RB_CNTL, tmp);
3023 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
3024 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3025 
3026 	/* Set the write pointer delay */
3027 	WREG32(CP_RB_WPTR_DELAY, 0);
3028 
3029 	/* Initialize the ring buffer's read and write pointers */
3030 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3031 	WREG32(CP_RB_RPTR_WR, 0);
3032 	ring->wptr = 0;
3033 	WREG32(CP_RB_WPTR, ring->wptr);
3034 
3035 	/* set the wb address whether it's enabled or not */
3036 	WREG32(CP_RB_RPTR_ADDR,
3037 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3038 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3039 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3040 
3041 	if (rdev->wb.enabled)
3042 		WREG32(SCRATCH_UMSK, 0xff);
3043 	else {
3044 		tmp |= RB_NO_UPDATE;
3045 		WREG32(SCRATCH_UMSK, 0);
3046 	}
3047 
3048 	mdelay(1);
3049 	WREG32(CP_RB_CNTL, tmp);
3050 
3051 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3052 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3053 
3054 	evergreen_cp_start(rdev);
3055 	ring->ready = true;
3056 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3057 	if (r) {
3058 		ring->ready = false;
3059 		return r;
3060 	}
3061 	return 0;
3062 }
3063 
3064 /*
3065  * Core functions
3066  */
3067 static void evergreen_gpu_init(struct radeon_device *rdev)
3068 {
3069 	u32 gb_addr_config;
3070 	u32 mc_shared_chmap, mc_arb_ramcfg;
3071 	u32 sx_debug_1;
3072 	u32 smx_dc_ctl0;
3073 	u32 sq_config;
3074 	u32 sq_lds_resource_mgmt;
3075 	u32 sq_gpr_resource_mgmt_1;
3076 	u32 sq_gpr_resource_mgmt_2;
3077 	u32 sq_gpr_resource_mgmt_3;
3078 	u32 sq_thread_resource_mgmt;
3079 	u32 sq_thread_resource_mgmt_2;
3080 	u32 sq_stack_resource_mgmt_1;
3081 	u32 sq_stack_resource_mgmt_2;
3082 	u32 sq_stack_resource_mgmt_3;
3083 	u32 vgt_cache_invalidation;
3084 	u32 hdp_host_path_cntl, tmp;
3085 	u32 disabled_rb_mask;
3086 	int i, j, ps_thread_count;
3087 
3088 	switch (rdev->family) {
3089 	case CHIP_CYPRESS:
3090 	case CHIP_HEMLOCK:
3091 		rdev->config.evergreen.num_ses = 2;
3092 		rdev->config.evergreen.max_pipes = 4;
3093 		rdev->config.evergreen.max_tile_pipes = 8;
3094 		rdev->config.evergreen.max_simds = 10;
3095 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3096 		rdev->config.evergreen.max_gprs = 256;
3097 		rdev->config.evergreen.max_threads = 248;
3098 		rdev->config.evergreen.max_gs_threads = 32;
3099 		rdev->config.evergreen.max_stack_entries = 512;
3100 		rdev->config.evergreen.sx_num_of_sets = 4;
3101 		rdev->config.evergreen.sx_max_export_size = 256;
3102 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3103 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3104 		rdev->config.evergreen.max_hw_contexts = 8;
3105 		rdev->config.evergreen.sq_num_cf_insts = 2;
3106 
3107 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3108 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3109 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3110 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3111 		break;
3112 	case CHIP_JUNIPER:
3113 		rdev->config.evergreen.num_ses = 1;
3114 		rdev->config.evergreen.max_pipes = 4;
3115 		rdev->config.evergreen.max_tile_pipes = 4;
3116 		rdev->config.evergreen.max_simds = 10;
3117 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3118 		rdev->config.evergreen.max_gprs = 256;
3119 		rdev->config.evergreen.max_threads = 248;
3120 		rdev->config.evergreen.max_gs_threads = 32;
3121 		rdev->config.evergreen.max_stack_entries = 512;
3122 		rdev->config.evergreen.sx_num_of_sets = 4;
3123 		rdev->config.evergreen.sx_max_export_size = 256;
3124 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3125 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3126 		rdev->config.evergreen.max_hw_contexts = 8;
3127 		rdev->config.evergreen.sq_num_cf_insts = 2;
3128 
3129 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3130 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3131 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3132 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3133 		break;
3134 	case CHIP_REDWOOD:
3135 		rdev->config.evergreen.num_ses = 1;
3136 		rdev->config.evergreen.max_pipes = 4;
3137 		rdev->config.evergreen.max_tile_pipes = 4;
3138 		rdev->config.evergreen.max_simds = 5;
3139 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3140 		rdev->config.evergreen.max_gprs = 256;
3141 		rdev->config.evergreen.max_threads = 248;
3142 		rdev->config.evergreen.max_gs_threads = 32;
3143 		rdev->config.evergreen.max_stack_entries = 256;
3144 		rdev->config.evergreen.sx_num_of_sets = 4;
3145 		rdev->config.evergreen.sx_max_export_size = 256;
3146 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3147 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3148 		rdev->config.evergreen.max_hw_contexts = 8;
3149 		rdev->config.evergreen.sq_num_cf_insts = 2;
3150 
3151 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3152 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3153 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3154 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3155 		break;
3156 	case CHIP_CEDAR:
3157 	default:
3158 		rdev->config.evergreen.num_ses = 1;
3159 		rdev->config.evergreen.max_pipes = 2;
3160 		rdev->config.evergreen.max_tile_pipes = 2;
3161 		rdev->config.evergreen.max_simds = 2;
3162 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3163 		rdev->config.evergreen.max_gprs = 256;
3164 		rdev->config.evergreen.max_threads = 192;
3165 		rdev->config.evergreen.max_gs_threads = 16;
3166 		rdev->config.evergreen.max_stack_entries = 256;
3167 		rdev->config.evergreen.sx_num_of_sets = 4;
3168 		rdev->config.evergreen.sx_max_export_size = 128;
3169 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3170 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3171 		rdev->config.evergreen.max_hw_contexts = 4;
3172 		rdev->config.evergreen.sq_num_cf_insts = 1;
3173 
3174 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3175 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3176 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3177 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3178 		break;
3179 	case CHIP_PALM:
3180 		rdev->config.evergreen.num_ses = 1;
3181 		rdev->config.evergreen.max_pipes = 2;
3182 		rdev->config.evergreen.max_tile_pipes = 2;
3183 		rdev->config.evergreen.max_simds = 2;
3184 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3185 		rdev->config.evergreen.max_gprs = 256;
3186 		rdev->config.evergreen.max_threads = 192;
3187 		rdev->config.evergreen.max_gs_threads = 16;
3188 		rdev->config.evergreen.max_stack_entries = 256;
3189 		rdev->config.evergreen.sx_num_of_sets = 4;
3190 		rdev->config.evergreen.sx_max_export_size = 128;
3191 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3192 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3193 		rdev->config.evergreen.max_hw_contexts = 4;
3194 		rdev->config.evergreen.sq_num_cf_insts = 1;
3195 
3196 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3197 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3198 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3199 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3200 		break;
3201 	case CHIP_SUMO:
3202 		rdev->config.evergreen.num_ses = 1;
3203 		rdev->config.evergreen.max_pipes = 4;
3204 		rdev->config.evergreen.max_tile_pipes = 4;
3205 		if (rdev->pdev->device == 0x9648)
3206 			rdev->config.evergreen.max_simds = 3;
3207 		else if ((rdev->pdev->device == 0x9647) ||
3208 			 (rdev->pdev->device == 0x964a))
3209 			rdev->config.evergreen.max_simds = 4;
3210 		else
3211 			rdev->config.evergreen.max_simds = 5;
3212 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3213 		rdev->config.evergreen.max_gprs = 256;
3214 		rdev->config.evergreen.max_threads = 248;
3215 		rdev->config.evergreen.max_gs_threads = 32;
3216 		rdev->config.evergreen.max_stack_entries = 256;
3217 		rdev->config.evergreen.sx_num_of_sets = 4;
3218 		rdev->config.evergreen.sx_max_export_size = 256;
3219 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3220 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3221 		rdev->config.evergreen.max_hw_contexts = 8;
3222 		rdev->config.evergreen.sq_num_cf_insts = 2;
3223 
3224 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3225 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3226 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3227 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3228 		break;
3229 	case CHIP_SUMO2:
3230 		rdev->config.evergreen.num_ses = 1;
3231 		rdev->config.evergreen.max_pipes = 4;
3232 		rdev->config.evergreen.max_tile_pipes = 4;
3233 		rdev->config.evergreen.max_simds = 2;
3234 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3235 		rdev->config.evergreen.max_gprs = 256;
3236 		rdev->config.evergreen.max_threads = 248;
3237 		rdev->config.evergreen.max_gs_threads = 32;
3238 		rdev->config.evergreen.max_stack_entries = 512;
3239 		rdev->config.evergreen.sx_num_of_sets = 4;
3240 		rdev->config.evergreen.sx_max_export_size = 256;
3241 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3242 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3243 		rdev->config.evergreen.max_hw_contexts = 4;
3244 		rdev->config.evergreen.sq_num_cf_insts = 2;
3245 
3246 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3247 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3248 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3249 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3250 		break;
3251 	case CHIP_BARTS:
3252 		rdev->config.evergreen.num_ses = 2;
3253 		rdev->config.evergreen.max_pipes = 4;
3254 		rdev->config.evergreen.max_tile_pipes = 8;
3255 		rdev->config.evergreen.max_simds = 7;
3256 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3257 		rdev->config.evergreen.max_gprs = 256;
3258 		rdev->config.evergreen.max_threads = 248;
3259 		rdev->config.evergreen.max_gs_threads = 32;
3260 		rdev->config.evergreen.max_stack_entries = 512;
3261 		rdev->config.evergreen.sx_num_of_sets = 4;
3262 		rdev->config.evergreen.sx_max_export_size = 256;
3263 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3264 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3265 		rdev->config.evergreen.max_hw_contexts = 8;
3266 		rdev->config.evergreen.sq_num_cf_insts = 2;
3267 
3268 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3269 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3270 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3271 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3272 		break;
3273 	case CHIP_TURKS:
3274 		rdev->config.evergreen.num_ses = 1;
3275 		rdev->config.evergreen.max_pipes = 4;
3276 		rdev->config.evergreen.max_tile_pipes = 4;
3277 		rdev->config.evergreen.max_simds = 6;
3278 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3279 		rdev->config.evergreen.max_gprs = 256;
3280 		rdev->config.evergreen.max_threads = 248;
3281 		rdev->config.evergreen.max_gs_threads = 32;
3282 		rdev->config.evergreen.max_stack_entries = 256;
3283 		rdev->config.evergreen.sx_num_of_sets = 4;
3284 		rdev->config.evergreen.sx_max_export_size = 256;
3285 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3286 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3287 		rdev->config.evergreen.max_hw_contexts = 8;
3288 		rdev->config.evergreen.sq_num_cf_insts = 2;
3289 
3290 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3291 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3292 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3293 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3294 		break;
3295 	case CHIP_CAICOS:
3296 		rdev->config.evergreen.num_ses = 1;
3297 		rdev->config.evergreen.max_pipes = 2;
3298 		rdev->config.evergreen.max_tile_pipes = 2;
3299 		rdev->config.evergreen.max_simds = 2;
3300 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3301 		rdev->config.evergreen.max_gprs = 256;
3302 		rdev->config.evergreen.max_threads = 192;
3303 		rdev->config.evergreen.max_gs_threads = 16;
3304 		rdev->config.evergreen.max_stack_entries = 256;
3305 		rdev->config.evergreen.sx_num_of_sets = 4;
3306 		rdev->config.evergreen.sx_max_export_size = 128;
3307 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3308 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3309 		rdev->config.evergreen.max_hw_contexts = 4;
3310 		rdev->config.evergreen.sq_num_cf_insts = 1;
3311 
3312 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3313 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3314 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3315 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3316 		break;
3317 	}
3318 
3319 	/* Initialize HDP */
3320 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3321 		WREG32((0x2c14 + j), 0x00000000);
3322 		WREG32((0x2c18 + j), 0x00000000);
3323 		WREG32((0x2c1c + j), 0x00000000);
3324 		WREG32((0x2c20 + j), 0x00000000);
3325 		WREG32((0x2c24 + j), 0x00000000);
3326 	}
3327 
3328 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3329 	WREG32(SRBM_INT_CNTL, 0x1);
3330 	WREG32(SRBM_INT_ACK, 0x1);
3331 
3332 	evergreen_fix_pci_max_read_req_size(rdev);
3333 
3334 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3335 	if ((rdev->family == CHIP_PALM) ||
3336 	    (rdev->family == CHIP_SUMO) ||
3337 	    (rdev->family == CHIP_SUMO2))
3338 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3339 	else
3340 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3341 
3342 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3343 	 * not have bank info, so create a custom tiling dword.
3344 	 * bits 3:0   num_pipes
3345 	 * bits 7:4   num_banks
3346 	 * bits 11:8  group_size
3347 	 * bits 15:12 row_size
3348 	 */
3349 	rdev->config.evergreen.tile_config = 0;
3350 	switch (rdev->config.evergreen.max_tile_pipes) {
3351 	case 1:
3352 	default:
3353 		rdev->config.evergreen.tile_config |= (0 << 0);
3354 		break;
3355 	case 2:
3356 		rdev->config.evergreen.tile_config |= (1 << 0);
3357 		break;
3358 	case 4:
3359 		rdev->config.evergreen.tile_config |= (2 << 0);
3360 		break;
3361 	case 8:
3362 		rdev->config.evergreen.tile_config |= (3 << 0);
3363 		break;
3364 	}
3365 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3366 	if (rdev->flags & RADEON_IS_IGP)
3367 		rdev->config.evergreen.tile_config |= 1 << 4;
3368 	else {
3369 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3370 		case 0: /* four banks */
3371 			rdev->config.evergreen.tile_config |= 0 << 4;
3372 			break;
3373 		case 1: /* eight banks */
3374 			rdev->config.evergreen.tile_config |= 1 << 4;
3375 			break;
3376 		case 2: /* sixteen banks */
3377 		default:
3378 			rdev->config.evergreen.tile_config |= 2 << 4;
3379 			break;
3380 		}
3381 	}
3382 	rdev->config.evergreen.tile_config |= 0 << 8;
3383 	rdev->config.evergreen.tile_config |=
3384 		((gb_addr_config & 0x30000000) >> 28) << 12;
3385 
3386 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3387 		u32 efuse_straps_4;
3388 		u32 efuse_straps_3;
3389 
3390 		efuse_straps_4 = RREG32_RCU(0x204);
3391 		efuse_straps_3 = RREG32_RCU(0x203);
3392 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3393 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3394 	} else {
3395 		tmp = 0;
3396 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3397 			u32 rb_disable_bitmap;
3398 
3399 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3400 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3401 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3402 			tmp <<= 4;
3403 			tmp |= rb_disable_bitmap;
3404 		}
3405 	}
3406 	/* enabled rb are just the one not disabled :) */
3407 	disabled_rb_mask = tmp;
3408 	tmp = 0;
3409 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3410 		tmp |= (1 << i);
3411 	/* if all the backends are disabled, fix it up here */
3412 	if ((disabled_rb_mask & tmp) == tmp) {
3413 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3414 			disabled_rb_mask &= ~(1 << i);
3415 	}
3416 
3417 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3418 		u32 simd_disable_bitmap;
3419 
3420 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3421 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3422 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3423 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3424 		tmp <<= 16;
3425 		tmp |= simd_disable_bitmap;
3426 	}
3427 	rdev->config.evergreen.active_simds = hweight32(~tmp);
3428 
3429 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3430 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3431 
3432 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3433 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3434 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3435 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3436 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3437 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3438 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3439 
3440 	if ((rdev->config.evergreen.max_backends == 1) &&
3441 	    (rdev->flags & RADEON_IS_IGP)) {
3442 		if ((disabled_rb_mask & 3) == 1) {
3443 			/* RB0 disabled, RB1 enabled */
3444 			tmp = 0x11111111;
3445 		} else {
3446 			/* RB1 disabled, RB0 enabled */
3447 			tmp = 0x00000000;
3448 		}
3449 	} else {
3450 		tmp = gb_addr_config & NUM_PIPES_MASK;
3451 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3452 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3453 	}
3454 	WREG32(GB_BACKEND_MAP, tmp);
3455 
3456 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3457 	WREG32(CGTS_TCC_DISABLE, 0);
3458 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3459 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3460 
3461 	/* set HW defaults for 3D engine */
3462 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3463 				     ROQ_IB2_START(0x2b)));
3464 
3465 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3466 
3467 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3468 			     SYNC_GRADIENT |
3469 			     SYNC_WALKER |
3470 			     SYNC_ALIGNER));
3471 
3472 	sx_debug_1 = RREG32(SX_DEBUG_1);
3473 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3474 	WREG32(SX_DEBUG_1, sx_debug_1);
3475 
3476 
3477 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3478 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3479 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3480 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3481 
3482 	if (rdev->family <= CHIP_SUMO2)
3483 		WREG32(SMX_SAR_CTL0, 0x00010000);
3484 
3485 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3486 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3487 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3488 
3489 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3490 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3491 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3492 
3493 	WREG32(VGT_NUM_INSTANCES, 1);
3494 	WREG32(SPI_CONFIG_CNTL, 0);
3495 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3496 	WREG32(CP_PERFMON_CNTL, 0);
3497 
3498 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3499 				  FETCH_FIFO_HIWATER(0x4) |
3500 				  DONE_FIFO_HIWATER(0xe0) |
3501 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3502 
3503 	sq_config = RREG32(SQ_CONFIG);
3504 	sq_config &= ~(PS_PRIO(3) |
3505 		       VS_PRIO(3) |
3506 		       GS_PRIO(3) |
3507 		       ES_PRIO(3));
3508 	sq_config |= (VC_ENABLE |
3509 		      EXPORT_SRC_C |
3510 		      PS_PRIO(0) |
3511 		      VS_PRIO(1) |
3512 		      GS_PRIO(2) |
3513 		      ES_PRIO(3));
3514 
3515 	switch (rdev->family) {
3516 	case CHIP_CEDAR:
3517 	case CHIP_PALM:
3518 	case CHIP_SUMO:
3519 	case CHIP_SUMO2:
3520 	case CHIP_CAICOS:
3521 		/* no vertex cache */
3522 		sq_config &= ~VC_ENABLE;
3523 		break;
3524 	default:
3525 		break;
3526 	}
3527 
3528 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3529 
3530 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3531 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3532 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3533 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3534 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3535 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3536 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3537 
3538 	switch (rdev->family) {
3539 	case CHIP_CEDAR:
3540 	case CHIP_PALM:
3541 	case CHIP_SUMO:
3542 	case CHIP_SUMO2:
3543 		ps_thread_count = 96;
3544 		break;
3545 	default:
3546 		ps_thread_count = 128;
3547 		break;
3548 	}
3549 
3550 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3551 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3552 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3553 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3554 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3555 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3556 
3557 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3558 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3559 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3560 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3561 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3562 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3563 
3564 	WREG32(SQ_CONFIG, sq_config);
3565 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3566 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3567 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3568 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3569 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3570 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3571 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3572 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3573 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3574 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3575 
3576 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3577 					  FORCE_EOV_MAX_REZ_CNT(255)));
3578 
3579 	switch (rdev->family) {
3580 	case CHIP_CEDAR:
3581 	case CHIP_PALM:
3582 	case CHIP_SUMO:
3583 	case CHIP_SUMO2:
3584 	case CHIP_CAICOS:
3585 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3586 		break;
3587 	default:
3588 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3589 		break;
3590 	}
3591 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3592 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3593 
3594 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3595 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3596 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3597 
3598 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3599 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3600 
3601 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3602 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3603 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3604 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3605 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3606 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3607 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3608 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3609 
3610 	/* clear render buffer base addresses */
3611 	WREG32(CB_COLOR0_BASE, 0);
3612 	WREG32(CB_COLOR1_BASE, 0);
3613 	WREG32(CB_COLOR2_BASE, 0);
3614 	WREG32(CB_COLOR3_BASE, 0);
3615 	WREG32(CB_COLOR4_BASE, 0);
3616 	WREG32(CB_COLOR5_BASE, 0);
3617 	WREG32(CB_COLOR6_BASE, 0);
3618 	WREG32(CB_COLOR7_BASE, 0);
3619 	WREG32(CB_COLOR8_BASE, 0);
3620 	WREG32(CB_COLOR9_BASE, 0);
3621 	WREG32(CB_COLOR10_BASE, 0);
3622 	WREG32(CB_COLOR11_BASE, 0);
3623 
3624 	/* set the shader const cache sizes to 0 */
3625 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3626 		WREG32(i, 0);
3627 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3628 		WREG32(i, 0);
3629 
3630 	tmp = RREG32(HDP_MISC_CNTL);
3631 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3632 	WREG32(HDP_MISC_CNTL, tmp);
3633 
3634 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3635 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3636 
3637 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3638 
3639 	udelay(50);
3640 
3641 }
3642 
3643 int evergreen_mc_init(struct radeon_device *rdev)
3644 {
3645 	u32 tmp;
3646 	int chansize, numchan;
3647 
3648 	/* Get VRAM informations */
3649 	rdev->mc.vram_is_ddr = true;
3650 	if ((rdev->family == CHIP_PALM) ||
3651 	    (rdev->family == CHIP_SUMO) ||
3652 	    (rdev->family == CHIP_SUMO2))
3653 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3654 	else
3655 		tmp = RREG32(MC_ARB_RAMCFG);
3656 	if (tmp & CHANSIZE_OVERRIDE) {
3657 		chansize = 16;
3658 	} else if (tmp & CHANSIZE_MASK) {
3659 		chansize = 64;
3660 	} else {
3661 		chansize = 32;
3662 	}
3663 	tmp = RREG32(MC_SHARED_CHMAP);
3664 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3665 	case 0:
3666 	default:
3667 		numchan = 1;
3668 		break;
3669 	case 1:
3670 		numchan = 2;
3671 		break;
3672 	case 2:
3673 		numchan = 4;
3674 		break;
3675 	case 3:
3676 		numchan = 8;
3677 		break;
3678 	}
3679 	rdev->mc.vram_width = numchan * chansize;
3680 	/* Could aper size report 0 ? */
3681 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3682 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3683 	/* Setup GPU memory space */
3684 	if ((rdev->family == CHIP_PALM) ||
3685 	    (rdev->family == CHIP_SUMO) ||
3686 	    (rdev->family == CHIP_SUMO2)) {
3687 		/* size in bytes on fusion */
3688 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3689 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3690 	} else {
3691 		/* size in MB on evergreen/cayman/tn */
3692 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3693 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3694 	}
3695 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3696 	r700_vram_gtt_location(rdev, &rdev->mc);
3697 	radeon_update_bandwidth_info(rdev);
3698 
3699 	return 0;
3700 }
3701 
3702 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3703 {
3704 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3705 		RREG32(GRBM_STATUS));
3706 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3707 		RREG32(GRBM_STATUS_SE0));
3708 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3709 		RREG32(GRBM_STATUS_SE1));
3710 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3711 		RREG32(SRBM_STATUS));
3712 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3713 		RREG32(SRBM_STATUS2));
3714 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3715 		RREG32(CP_STALLED_STAT1));
3716 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3717 		RREG32(CP_STALLED_STAT2));
3718 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3719 		RREG32(CP_BUSY_STAT));
3720 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3721 		RREG32(CP_STAT));
3722 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3723 		RREG32(DMA_STATUS_REG));
3724 	if (rdev->family >= CHIP_CAYMAN) {
3725 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3726 			 RREG32(DMA_STATUS_REG + 0x800));
3727 	}
3728 }
3729 
3730 bool evergreen_is_display_hung(struct radeon_device *rdev)
3731 {
3732 	u32 crtc_hung = 0;
3733 	u32 crtc_status[6];
3734 	u32 i, j, tmp;
3735 
3736 	for (i = 0; i < rdev->num_crtc; i++) {
3737 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3738 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3739 			crtc_hung |= (1 << i);
3740 		}
3741 	}
3742 
3743 	for (j = 0; j < 10; j++) {
3744 		for (i = 0; i < rdev->num_crtc; i++) {
3745 			if (crtc_hung & (1 << i)) {
3746 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3747 				if (tmp != crtc_status[i])
3748 					crtc_hung &= ~(1 << i);
3749 			}
3750 		}
3751 		if (crtc_hung == 0)
3752 			return false;
3753 		udelay(100);
3754 	}
3755 
3756 	return true;
3757 }
3758 
3759 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3760 {
3761 	u32 reset_mask = 0;
3762 	u32 tmp;
3763 
3764 	/* GRBM_STATUS */
3765 	tmp = RREG32(GRBM_STATUS);
3766 	if (tmp & (PA_BUSY | SC_BUSY |
3767 		   SH_BUSY | SX_BUSY |
3768 		   TA_BUSY | VGT_BUSY |
3769 		   DB_BUSY | CB_BUSY |
3770 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3771 		reset_mask |= RADEON_RESET_GFX;
3772 
3773 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3774 		   CP_BUSY | CP_COHERENCY_BUSY))
3775 		reset_mask |= RADEON_RESET_CP;
3776 
3777 	if (tmp & GRBM_EE_BUSY)
3778 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3779 
3780 	/* DMA_STATUS_REG */
3781 	tmp = RREG32(DMA_STATUS_REG);
3782 	if (!(tmp & DMA_IDLE))
3783 		reset_mask |= RADEON_RESET_DMA;
3784 
3785 	/* SRBM_STATUS2 */
3786 	tmp = RREG32(SRBM_STATUS2);
3787 	if (tmp & DMA_BUSY)
3788 		reset_mask |= RADEON_RESET_DMA;
3789 
3790 	/* SRBM_STATUS */
3791 	tmp = RREG32(SRBM_STATUS);
3792 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3793 		reset_mask |= RADEON_RESET_RLC;
3794 
3795 	if (tmp & IH_BUSY)
3796 		reset_mask |= RADEON_RESET_IH;
3797 
3798 	if (tmp & SEM_BUSY)
3799 		reset_mask |= RADEON_RESET_SEM;
3800 
3801 	if (tmp & GRBM_RQ_PENDING)
3802 		reset_mask |= RADEON_RESET_GRBM;
3803 
3804 	if (tmp & VMC_BUSY)
3805 		reset_mask |= RADEON_RESET_VMC;
3806 
3807 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3808 		   MCC_BUSY | MCD_BUSY))
3809 		reset_mask |= RADEON_RESET_MC;
3810 
3811 	if (evergreen_is_display_hung(rdev))
3812 		reset_mask |= RADEON_RESET_DISPLAY;
3813 
3814 	/* VM_L2_STATUS */
3815 	tmp = RREG32(VM_L2_STATUS);
3816 	if (tmp & L2_BUSY)
3817 		reset_mask |= RADEON_RESET_VMC;
3818 
3819 	/* Skip MC reset as it's mostly likely not hung, just busy */
3820 	if (reset_mask & RADEON_RESET_MC) {
3821 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3822 		reset_mask &= ~RADEON_RESET_MC;
3823 	}
3824 
3825 	return reset_mask;
3826 }
3827 
3828 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3829 {
3830 	struct evergreen_mc_save save;
3831 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3832 	u32 tmp;
3833 
3834 	if (reset_mask == 0)
3835 		return;
3836 
3837 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3838 
3839 	evergreen_print_gpu_status_regs(rdev);
3840 
3841 	/* Disable CP parsing/prefetching */
3842 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3843 
3844 	if (reset_mask & RADEON_RESET_DMA) {
3845 		/* Disable DMA */
3846 		tmp = RREG32(DMA_RB_CNTL);
3847 		tmp &= ~DMA_RB_ENABLE;
3848 		WREG32(DMA_RB_CNTL, tmp);
3849 	}
3850 
3851 	udelay(50);
3852 
3853 	evergreen_mc_stop(rdev, &save);
3854 	if (evergreen_mc_wait_for_idle(rdev)) {
3855 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3856 	}
3857 
3858 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3859 		grbm_soft_reset |= SOFT_RESET_DB |
3860 			SOFT_RESET_CB |
3861 			SOFT_RESET_PA |
3862 			SOFT_RESET_SC |
3863 			SOFT_RESET_SPI |
3864 			SOFT_RESET_SX |
3865 			SOFT_RESET_SH |
3866 			SOFT_RESET_TC |
3867 			SOFT_RESET_TA |
3868 			SOFT_RESET_VC |
3869 			SOFT_RESET_VGT;
3870 	}
3871 
3872 	if (reset_mask & RADEON_RESET_CP) {
3873 		grbm_soft_reset |= SOFT_RESET_CP |
3874 			SOFT_RESET_VGT;
3875 
3876 		srbm_soft_reset |= SOFT_RESET_GRBM;
3877 	}
3878 
3879 	if (reset_mask & RADEON_RESET_DMA)
3880 		srbm_soft_reset |= SOFT_RESET_DMA;
3881 
3882 	if (reset_mask & RADEON_RESET_DISPLAY)
3883 		srbm_soft_reset |= SOFT_RESET_DC;
3884 
3885 	if (reset_mask & RADEON_RESET_RLC)
3886 		srbm_soft_reset |= SOFT_RESET_RLC;
3887 
3888 	if (reset_mask & RADEON_RESET_SEM)
3889 		srbm_soft_reset |= SOFT_RESET_SEM;
3890 
3891 	if (reset_mask & RADEON_RESET_IH)
3892 		srbm_soft_reset |= SOFT_RESET_IH;
3893 
3894 	if (reset_mask & RADEON_RESET_GRBM)
3895 		srbm_soft_reset |= SOFT_RESET_GRBM;
3896 
3897 	if (reset_mask & RADEON_RESET_VMC)
3898 		srbm_soft_reset |= SOFT_RESET_VMC;
3899 
3900 	if (!(rdev->flags & RADEON_IS_IGP)) {
3901 		if (reset_mask & RADEON_RESET_MC)
3902 			srbm_soft_reset |= SOFT_RESET_MC;
3903 	}
3904 
3905 	if (grbm_soft_reset) {
3906 		tmp = RREG32(GRBM_SOFT_RESET);
3907 		tmp |= grbm_soft_reset;
3908 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3909 		WREG32(GRBM_SOFT_RESET, tmp);
3910 		tmp = RREG32(GRBM_SOFT_RESET);
3911 
3912 		udelay(50);
3913 
3914 		tmp &= ~grbm_soft_reset;
3915 		WREG32(GRBM_SOFT_RESET, tmp);
3916 		tmp = RREG32(GRBM_SOFT_RESET);
3917 	}
3918 
3919 	if (srbm_soft_reset) {
3920 		tmp = RREG32(SRBM_SOFT_RESET);
3921 		tmp |= srbm_soft_reset;
3922 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3923 		WREG32(SRBM_SOFT_RESET, tmp);
3924 		tmp = RREG32(SRBM_SOFT_RESET);
3925 
3926 		udelay(50);
3927 
3928 		tmp &= ~srbm_soft_reset;
3929 		WREG32(SRBM_SOFT_RESET, tmp);
3930 		tmp = RREG32(SRBM_SOFT_RESET);
3931 	}
3932 
3933 	/* Wait a little for things to settle down */
3934 	udelay(50);
3935 
3936 	evergreen_mc_resume(rdev, &save);
3937 	udelay(50);
3938 
3939 	evergreen_print_gpu_status_regs(rdev);
3940 }
3941 
3942 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
3943 {
3944 	struct evergreen_mc_save save;
3945 	u32 tmp, i;
3946 
3947 	dev_info(rdev->dev, "GPU pci config reset\n");
3948 
3949 	/* disable dpm? */
3950 
3951 	/* Disable CP parsing/prefetching */
3952 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3953 	udelay(50);
3954 	/* Disable DMA */
3955 	tmp = RREG32(DMA_RB_CNTL);
3956 	tmp &= ~DMA_RB_ENABLE;
3957 	WREG32(DMA_RB_CNTL, tmp);
3958 	/* XXX other engines? */
3959 
3960 	/* halt the rlc */
3961 	r600_rlc_stop(rdev);
3962 
3963 	udelay(50);
3964 
3965 	/* set mclk/sclk to bypass */
3966 	rv770_set_clk_bypass_mode(rdev);
3967 	/* disable BM */
3968 	pci_clear_master(rdev->pdev);
3969 	/* disable mem access */
3970 	evergreen_mc_stop(rdev, &save);
3971 	if (evergreen_mc_wait_for_idle(rdev)) {
3972 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3973 	}
3974 	/* reset */
3975 	radeon_pci_config_reset(rdev);
3976 	/* wait for asic to come out of reset */
3977 	for (i = 0; i < rdev->usec_timeout; i++) {
3978 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3979 			break;
3980 		udelay(1);
3981 	}
3982 }
3983 
3984 int evergreen_asic_reset(struct radeon_device *rdev)
3985 {
3986 	u32 reset_mask;
3987 
3988 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3989 
3990 	if (reset_mask)
3991 		r600_set_bios_scratch_engine_hung(rdev, true);
3992 
3993 	/* try soft reset */
3994 	evergreen_gpu_soft_reset(rdev, reset_mask);
3995 
3996 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3997 
3998 	/* try pci config reset */
3999 	if (reset_mask && radeon_hard_reset)
4000 		evergreen_gpu_pci_config_reset(rdev);
4001 
4002 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4003 
4004 	if (!reset_mask)
4005 		r600_set_bios_scratch_engine_hung(rdev, false);
4006 
4007 	return 0;
4008 }
4009 
4010 /**
4011  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4012  *
4013  * @rdev: radeon_device pointer
4014  * @ring: radeon_ring structure holding ring information
4015  *
4016  * Check if the GFX engine is locked up.
4017  * Returns true if the engine appears to be locked up, false if not.
4018  */
4019 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4020 {
4021 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4022 
4023 	if (!(reset_mask & (RADEON_RESET_GFX |
4024 			    RADEON_RESET_COMPUTE |
4025 			    RADEON_RESET_CP))) {
4026 		radeon_ring_lockup_update(rdev, ring);
4027 		return false;
4028 	}
4029 	return radeon_ring_test_lockup(rdev, ring);
4030 }
4031 
4032 /*
4033  * RLC
4034  */
4035 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4036 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
4037 
4038 void sumo_rlc_fini(struct radeon_device *rdev)
4039 {
4040 	int r;
4041 
4042 	/* save restore block */
4043 	if (rdev->rlc.save_restore_obj) {
4044 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4045 		if (unlikely(r != 0))
4046 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4047 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
4048 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4049 
4050 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
4051 		rdev->rlc.save_restore_obj = NULL;
4052 	}
4053 
4054 	/* clear state block */
4055 	if (rdev->rlc.clear_state_obj) {
4056 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4057 		if (unlikely(r != 0))
4058 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4059 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
4060 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4061 
4062 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
4063 		rdev->rlc.clear_state_obj = NULL;
4064 	}
4065 
4066 	/* clear state block */
4067 	if (rdev->rlc.cp_table_obj) {
4068 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4069 		if (unlikely(r != 0))
4070 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4071 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
4072 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4073 
4074 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4075 		rdev->rlc.cp_table_obj = NULL;
4076 	}
4077 }
4078 
4079 #define CP_ME_TABLE_SIZE    96
4080 
4081 int sumo_rlc_init(struct radeon_device *rdev)
4082 {
4083 	const u32 *src_ptr;
4084 	volatile u32 *dst_ptr;
4085 	u32 dws, data, i, j, k, reg_num;
4086 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4087 	u64 reg_list_mc_addr;
4088 	const struct cs_section_def *cs_data;
4089 	int r;
4090 
4091 	src_ptr = rdev->rlc.reg_list;
4092 	dws = rdev->rlc.reg_list_size;
4093 	if (rdev->family >= CHIP_BONAIRE) {
4094 		dws += (5 * 16) + 48 + 48 + 64;
4095 	}
4096 	cs_data = rdev->rlc.cs_data;
4097 
4098 	if (src_ptr) {
4099 		/* save restore block */
4100 		if (rdev->rlc.save_restore_obj == NULL) {
4101 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4102 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4103 					     NULL, &rdev->rlc.save_restore_obj);
4104 			if (r) {
4105 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4106 				return r;
4107 			}
4108 		}
4109 
4110 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4111 		if (unlikely(r != 0)) {
4112 			sumo_rlc_fini(rdev);
4113 			return r;
4114 		}
4115 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4116 				  &rdev->rlc.save_restore_gpu_addr);
4117 		if (r) {
4118 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4119 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4120 			sumo_rlc_fini(rdev);
4121 			return r;
4122 		}
4123 
4124 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4125 		if (r) {
4126 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4127 			sumo_rlc_fini(rdev);
4128 			return r;
4129 		}
4130 		/* write the sr buffer */
4131 		dst_ptr = rdev->rlc.sr_ptr;
4132 		if (rdev->family >= CHIP_TAHITI) {
4133 			/* SI */
4134 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4135 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4136 		} else {
4137 			/* ON/LN/TN */
4138 			/* format:
4139 			 * dw0: (reg2 << 16) | reg1
4140 			 * dw1: reg1 save space
4141 			 * dw2: reg2 save space
4142 			 */
4143 			for (i = 0; i < dws; i++) {
4144 				data = src_ptr[i] >> 2;
4145 				i++;
4146 				if (i < dws)
4147 					data |= (src_ptr[i] >> 2) << 16;
4148 				j = (((i - 1) * 3) / 2);
4149 				dst_ptr[j] = cpu_to_le32(data);
4150 			}
4151 			j = ((i * 3) / 2);
4152 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4153 		}
4154 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4155 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4156 	}
4157 
4158 	if (cs_data) {
4159 		/* clear state block */
4160 		if (rdev->family >= CHIP_BONAIRE) {
4161 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4162 		} else if (rdev->family >= CHIP_TAHITI) {
4163 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4164 			dws = rdev->rlc.clear_state_size + (256 / 4);
4165 		} else {
4166 			reg_list_num = 0;
4167 			dws = 0;
4168 			for (i = 0; cs_data[i].section != NULL; i++) {
4169 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4170 					reg_list_num++;
4171 					dws += cs_data[i].section[j].reg_count;
4172 				}
4173 			}
4174 			reg_list_blk_index = (3 * reg_list_num + 2);
4175 			dws += reg_list_blk_index;
4176 			rdev->rlc.clear_state_size = dws;
4177 		}
4178 
4179 		if (rdev->rlc.clear_state_obj == NULL) {
4180 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4181 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4182 					     NULL, &rdev->rlc.clear_state_obj);
4183 			if (r) {
4184 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4185 				sumo_rlc_fini(rdev);
4186 				return r;
4187 			}
4188 		}
4189 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4190 		if (unlikely(r != 0)) {
4191 			sumo_rlc_fini(rdev);
4192 			return r;
4193 		}
4194 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4195 				  &rdev->rlc.clear_state_gpu_addr);
4196 		if (r) {
4197 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4198 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4199 			sumo_rlc_fini(rdev);
4200 			return r;
4201 		}
4202 
4203 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4204 		if (r) {
4205 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4206 			sumo_rlc_fini(rdev);
4207 			return r;
4208 		}
4209 		/* set up the cs buffer */
4210 		dst_ptr = rdev->rlc.cs_ptr;
4211 		if (rdev->family >= CHIP_BONAIRE) {
4212 			cik_get_csb_buffer(rdev, dst_ptr);
4213 		} else if (rdev->family >= CHIP_TAHITI) {
4214 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4215 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4216 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4217 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4218 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4219 		} else {
4220 			reg_list_hdr_blk_index = 0;
4221 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4222 			data = upper_32_bits(reg_list_mc_addr);
4223 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4224 			reg_list_hdr_blk_index++;
4225 			for (i = 0; cs_data[i].section != NULL; i++) {
4226 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4227 					reg_num = cs_data[i].section[j].reg_count;
4228 					data = reg_list_mc_addr & 0xffffffff;
4229 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4230 					reg_list_hdr_blk_index++;
4231 
4232 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4233 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4234 					reg_list_hdr_blk_index++;
4235 
4236 					data = 0x08000000 | (reg_num * 4);
4237 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4238 					reg_list_hdr_blk_index++;
4239 
4240 					for (k = 0; k < reg_num; k++) {
4241 						data = cs_data[i].section[j].extent[k];
4242 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4243 					}
4244 					reg_list_mc_addr += reg_num * 4;
4245 					reg_list_blk_index += reg_num;
4246 				}
4247 			}
4248 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4249 		}
4250 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4251 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4252 	}
4253 
4254 	if (rdev->rlc.cp_table_size) {
4255 		if (rdev->rlc.cp_table_obj == NULL) {
4256 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4257 					     PAGE_SIZE, true,
4258 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4259 					     NULL, &rdev->rlc.cp_table_obj);
4260 			if (r) {
4261 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4262 				sumo_rlc_fini(rdev);
4263 				return r;
4264 			}
4265 		}
4266 
4267 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4268 		if (unlikely(r != 0)) {
4269 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4270 			sumo_rlc_fini(rdev);
4271 			return r;
4272 		}
4273 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4274 				  &rdev->rlc.cp_table_gpu_addr);
4275 		if (r) {
4276 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4277 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4278 			sumo_rlc_fini(rdev);
4279 			return r;
4280 		}
4281 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4282 		if (r) {
4283 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4284 			sumo_rlc_fini(rdev);
4285 			return r;
4286 		}
4287 
4288 		cik_init_cp_pg_table(rdev);
4289 
4290 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4291 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4292 
4293 	}
4294 
4295 	return 0;
4296 }
4297 
4298 static void evergreen_rlc_start(struct radeon_device *rdev)
4299 {
4300 	u32 mask = RLC_ENABLE;
4301 
4302 	if (rdev->flags & RADEON_IS_IGP) {
4303 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4304 	}
4305 
4306 	WREG32(RLC_CNTL, mask);
4307 }
4308 
4309 int evergreen_rlc_resume(struct radeon_device *rdev)
4310 {
4311 	u32 i;
4312 	const __be32 *fw_data;
4313 
4314 	if (!rdev->rlc_fw)
4315 		return -EINVAL;
4316 
4317 	r600_rlc_stop(rdev);
4318 
4319 	WREG32(RLC_HB_CNTL, 0);
4320 
4321 	if (rdev->flags & RADEON_IS_IGP) {
4322 		if (rdev->family == CHIP_ARUBA) {
4323 			u32 always_on_bitmap =
4324 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4325 			/* find out the number of active simds */
4326 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4327 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4328 			tmp = hweight32(~tmp);
4329 			if (tmp == rdev->config.cayman.max_simds_per_se) {
4330 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4331 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4332 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4333 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4334 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4335 			}
4336 		} else {
4337 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4338 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4339 		}
4340 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4341 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4342 	} else {
4343 		WREG32(RLC_HB_BASE, 0);
4344 		WREG32(RLC_HB_RPTR, 0);
4345 		WREG32(RLC_HB_WPTR, 0);
4346 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4347 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4348 	}
4349 	WREG32(RLC_MC_CNTL, 0);
4350 	WREG32(RLC_UCODE_CNTL, 0);
4351 
4352 	fw_data = (const __be32 *)rdev->rlc_fw->data;
4353 	if (rdev->family >= CHIP_ARUBA) {
4354 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4355 			WREG32(RLC_UCODE_ADDR, i);
4356 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4357 		}
4358 	} else if (rdev->family >= CHIP_CAYMAN) {
4359 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4360 			WREG32(RLC_UCODE_ADDR, i);
4361 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4362 		}
4363 	} else {
4364 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4365 			WREG32(RLC_UCODE_ADDR, i);
4366 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4367 		}
4368 	}
4369 	WREG32(RLC_UCODE_ADDR, 0);
4370 
4371 	evergreen_rlc_start(rdev);
4372 
4373 	return 0;
4374 }
4375 
4376 /* Interrupts */
4377 
4378 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4379 {
4380 	if (crtc >= rdev->num_crtc)
4381 		return 0;
4382 	else
4383 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4384 }
4385 
4386 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4387 {
4388 	u32 tmp;
4389 
4390 	if (rdev->family >= CHIP_CAYMAN) {
4391 		cayman_cp_int_cntl_setup(rdev, 0,
4392 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4393 		cayman_cp_int_cntl_setup(rdev, 1, 0);
4394 		cayman_cp_int_cntl_setup(rdev, 2, 0);
4395 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4396 		WREG32(CAYMAN_DMA1_CNTL, tmp);
4397 	} else
4398 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4399 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4400 	WREG32(DMA_CNTL, tmp);
4401 	WREG32(GRBM_INT_CNTL, 0);
4402 	WREG32(SRBM_INT_CNTL, 0);
4403 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4404 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4405 	if (rdev->num_crtc >= 4) {
4406 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4407 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4408 	}
4409 	if (rdev->num_crtc >= 6) {
4410 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4411 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4412 	}
4413 
4414 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4415 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4416 	if (rdev->num_crtc >= 4) {
4417 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4418 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4419 	}
4420 	if (rdev->num_crtc >= 6) {
4421 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4422 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4423 	}
4424 
4425 	/* only one DAC on DCE5 */
4426 	if (!ASIC_IS_DCE5(rdev))
4427 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4428 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4429 
4430 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4431 	WREG32(DC_HPD1_INT_CONTROL, tmp);
4432 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4433 	WREG32(DC_HPD2_INT_CONTROL, tmp);
4434 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4435 	WREG32(DC_HPD3_INT_CONTROL, tmp);
4436 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4437 	WREG32(DC_HPD4_INT_CONTROL, tmp);
4438 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4439 	WREG32(DC_HPD5_INT_CONTROL, tmp);
4440 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4441 	WREG32(DC_HPD6_INT_CONTROL, tmp);
4442 
4443 }
4444 
4445 int evergreen_irq_set(struct radeon_device *rdev)
4446 {
4447 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4448 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4449 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4450 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4451 	u32 grbm_int_cntl = 0;
4452 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4453 	u32 dma_cntl, dma_cntl1 = 0;
4454 	u32 thermal_int = 0;
4455 
4456 	if (!rdev->irq.installed) {
4457 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4458 		return -EINVAL;
4459 	}
4460 	/* don't enable anything if the ih is disabled */
4461 	if (!rdev->ih.enabled) {
4462 		r600_disable_interrupts(rdev);
4463 		/* force the active interrupt state to all disabled */
4464 		evergreen_disable_interrupt_state(rdev);
4465 		return 0;
4466 	}
4467 
4468 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4469 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4470 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4471 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4472 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4473 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4474 	if (rdev->family == CHIP_ARUBA)
4475 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4476 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4477 	else
4478 		thermal_int = RREG32(CG_THERMAL_INT) &
4479 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4480 
4481 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4482 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4483 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4484 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4485 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4486 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4487 
4488 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4489 
4490 	if (rdev->family >= CHIP_CAYMAN) {
4491 		/* enable CP interrupts on all rings */
4492 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4493 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4494 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4495 		}
4496 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4497 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4498 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4499 		}
4500 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4501 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4502 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4503 		}
4504 	} else {
4505 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4506 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4507 			cp_int_cntl |= RB_INT_ENABLE;
4508 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4509 		}
4510 	}
4511 
4512 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4513 		DRM_DEBUG("r600_irq_set: sw int dma\n");
4514 		dma_cntl |= TRAP_ENABLE;
4515 	}
4516 
4517 	if (rdev->family >= CHIP_CAYMAN) {
4518 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4519 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4520 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4521 			dma_cntl1 |= TRAP_ENABLE;
4522 		}
4523 	}
4524 
4525 	if (rdev->irq.dpm_thermal) {
4526 		DRM_DEBUG("dpm thermal\n");
4527 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4528 	}
4529 
4530 	if (rdev->irq.crtc_vblank_int[0] ||
4531 	    atomic_read(&rdev->irq.pflip[0])) {
4532 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4533 		crtc1 |= VBLANK_INT_MASK;
4534 	}
4535 	if (rdev->irq.crtc_vblank_int[1] ||
4536 	    atomic_read(&rdev->irq.pflip[1])) {
4537 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4538 		crtc2 |= VBLANK_INT_MASK;
4539 	}
4540 	if (rdev->irq.crtc_vblank_int[2] ||
4541 	    atomic_read(&rdev->irq.pflip[2])) {
4542 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4543 		crtc3 |= VBLANK_INT_MASK;
4544 	}
4545 	if (rdev->irq.crtc_vblank_int[3] ||
4546 	    atomic_read(&rdev->irq.pflip[3])) {
4547 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4548 		crtc4 |= VBLANK_INT_MASK;
4549 	}
4550 	if (rdev->irq.crtc_vblank_int[4] ||
4551 	    atomic_read(&rdev->irq.pflip[4])) {
4552 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4553 		crtc5 |= VBLANK_INT_MASK;
4554 	}
4555 	if (rdev->irq.crtc_vblank_int[5] ||
4556 	    atomic_read(&rdev->irq.pflip[5])) {
4557 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4558 		crtc6 |= VBLANK_INT_MASK;
4559 	}
4560 	if (rdev->irq.hpd[0]) {
4561 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4562 		hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4563 	}
4564 	if (rdev->irq.hpd[1]) {
4565 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4566 		hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4567 	}
4568 	if (rdev->irq.hpd[2]) {
4569 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4570 		hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4571 	}
4572 	if (rdev->irq.hpd[3]) {
4573 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4574 		hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4575 	}
4576 	if (rdev->irq.hpd[4]) {
4577 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4578 		hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4579 	}
4580 	if (rdev->irq.hpd[5]) {
4581 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4582 		hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4583 	}
4584 	if (rdev->irq.afmt[0]) {
4585 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4586 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4587 	}
4588 	if (rdev->irq.afmt[1]) {
4589 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4590 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4591 	}
4592 	if (rdev->irq.afmt[2]) {
4593 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4594 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4595 	}
4596 	if (rdev->irq.afmt[3]) {
4597 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4598 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4599 	}
4600 	if (rdev->irq.afmt[4]) {
4601 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4602 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4603 	}
4604 	if (rdev->irq.afmt[5]) {
4605 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4606 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4607 	}
4608 
4609 	if (rdev->family >= CHIP_CAYMAN) {
4610 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4611 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4612 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4613 	} else
4614 		WREG32(CP_INT_CNTL, cp_int_cntl);
4615 
4616 	WREG32(DMA_CNTL, dma_cntl);
4617 
4618 	if (rdev->family >= CHIP_CAYMAN)
4619 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4620 
4621 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4622 
4623 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4624 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4625 	if (rdev->num_crtc >= 4) {
4626 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4627 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4628 	}
4629 	if (rdev->num_crtc >= 6) {
4630 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4631 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4632 	}
4633 
4634 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4635 	       GRPH_PFLIP_INT_MASK);
4636 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4637 	       GRPH_PFLIP_INT_MASK);
4638 	if (rdev->num_crtc >= 4) {
4639 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4640 		       GRPH_PFLIP_INT_MASK);
4641 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4642 		       GRPH_PFLIP_INT_MASK);
4643 	}
4644 	if (rdev->num_crtc >= 6) {
4645 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4646 		       GRPH_PFLIP_INT_MASK);
4647 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4648 		       GRPH_PFLIP_INT_MASK);
4649 	}
4650 
4651 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4652 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4653 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4654 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4655 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4656 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4657 	if (rdev->family == CHIP_ARUBA)
4658 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4659 	else
4660 		WREG32(CG_THERMAL_INT, thermal_int);
4661 
4662 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4663 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4664 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4665 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4666 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4667 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4668 
4669 	/* posting read */
4670 	RREG32(SRBM_STATUS);
4671 
4672 	return 0;
4673 }
4674 
4675 static void evergreen_irq_ack(struct radeon_device *rdev)
4676 {
4677 	u32 tmp;
4678 
4679 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4680 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4681 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4682 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4683 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4684 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4685 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4686 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4687 	if (rdev->num_crtc >= 4) {
4688 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4689 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4690 	}
4691 	if (rdev->num_crtc >= 6) {
4692 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4693 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4694 	}
4695 
4696 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4697 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4698 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4699 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4700 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4701 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4702 
4703 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4704 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4705 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4706 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4707 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4708 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4709 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4710 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4711 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4712 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4713 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4714 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4715 
4716 	if (rdev->num_crtc >= 4) {
4717 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4718 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4719 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4720 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4721 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4722 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4723 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4724 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4725 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4726 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4727 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4728 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4729 	}
4730 
4731 	if (rdev->num_crtc >= 6) {
4732 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4733 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4734 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4735 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4736 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4737 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4738 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4739 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4740 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4741 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4742 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4743 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4744 	}
4745 
4746 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4747 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4748 		tmp |= DC_HPDx_INT_ACK;
4749 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4750 	}
4751 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4752 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4753 		tmp |= DC_HPDx_INT_ACK;
4754 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4755 	}
4756 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4757 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4758 		tmp |= DC_HPDx_INT_ACK;
4759 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4760 	}
4761 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4762 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4763 		tmp |= DC_HPDx_INT_ACK;
4764 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4765 	}
4766 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4767 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4768 		tmp |= DC_HPDx_INT_ACK;
4769 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4770 	}
4771 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4772 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4773 		tmp |= DC_HPDx_INT_ACK;
4774 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4775 	}
4776 
4777 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4778 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4779 		tmp |= DC_HPDx_RX_INT_ACK;
4780 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4781 	}
4782 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4783 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4784 		tmp |= DC_HPDx_RX_INT_ACK;
4785 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4786 	}
4787 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4788 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4789 		tmp |= DC_HPDx_RX_INT_ACK;
4790 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4791 	}
4792 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4793 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4794 		tmp |= DC_HPDx_RX_INT_ACK;
4795 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4796 	}
4797 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4798 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4799 		tmp |= DC_HPDx_RX_INT_ACK;
4800 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4801 	}
4802 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4803 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4804 		tmp |= DC_HPDx_RX_INT_ACK;
4805 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4806 	}
4807 
4808 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4809 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4810 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4811 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4812 	}
4813 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4814 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4815 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4816 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4817 	}
4818 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4819 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4820 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4821 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4822 	}
4823 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4824 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4825 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4826 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4827 	}
4828 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4829 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4830 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4831 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4832 	}
4833 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4834 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4835 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4836 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4837 	}
4838 }
4839 
4840 static void evergreen_irq_disable(struct radeon_device *rdev)
4841 {
4842 	r600_disable_interrupts(rdev);
4843 	/* Wait and acknowledge irq */
4844 	mdelay(1);
4845 	evergreen_irq_ack(rdev);
4846 	evergreen_disable_interrupt_state(rdev);
4847 }
4848 
4849 void evergreen_irq_suspend(struct radeon_device *rdev)
4850 {
4851 	evergreen_irq_disable(rdev);
4852 	r600_rlc_stop(rdev);
4853 }
4854 
4855 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4856 {
4857 	u32 wptr, tmp;
4858 
4859 	if (rdev->wb.enabled)
4860 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4861 	else
4862 		wptr = RREG32(IH_RB_WPTR);
4863 
4864 	if (wptr & RB_OVERFLOW) {
4865 		wptr &= ~RB_OVERFLOW;
4866 		/* When a ring buffer overflow happen start parsing interrupt
4867 		 * from the last not overwritten vector (wptr + 16). Hopefully
4868 		 * this should allow us to catchup.
4869 		 */
4870 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4871 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4872 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4873 		tmp = RREG32(IH_RB_CNTL);
4874 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
4875 		WREG32(IH_RB_CNTL, tmp);
4876 	}
4877 	return (wptr & rdev->ih.ptr_mask);
4878 }
4879 
4880 int evergreen_irq_process(struct radeon_device *rdev)
4881 {
4882 	u32 wptr;
4883 	u32 rptr;
4884 	u32 src_id, src_data;
4885 	u32 ring_index;
4886 	bool queue_hotplug = false;
4887 	bool queue_hdmi = false;
4888 	bool queue_dp = false;
4889 	bool queue_thermal = false;
4890 	u32 status, addr;
4891 
4892 	if (!rdev->ih.enabled || rdev->shutdown)
4893 		return IRQ_NONE;
4894 
4895 	wptr = evergreen_get_ih_wptr(rdev);
4896 
4897 restart_ih:
4898 	/* is somebody else already processing irqs? */
4899 	if (atomic_xchg(&rdev->ih.lock, 1))
4900 		return IRQ_NONE;
4901 
4902 	rptr = rdev->ih.rptr;
4903 	DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4904 
4905 	/* Order reading of wptr vs. reading of IH ring data */
4906 	rmb();
4907 
4908 	/* display interrupts */
4909 	evergreen_irq_ack(rdev);
4910 
4911 	while (rptr != wptr) {
4912 		/* wptr/rptr are in bytes! */
4913 		ring_index = rptr / 4;
4914 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4915 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4916 
4917 		switch (src_id) {
4918 		case 1: /* D1 vblank/vline */
4919 			switch (src_data) {
4920 			case 0: /* D1 vblank */
4921 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
4922 					DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
4923 
4924 				if (rdev->irq.crtc_vblank_int[0]) {
4925 					drm_handle_vblank(rdev->ddev, 0);
4926 					rdev->pm.vblank_sync = true;
4927 					wake_up(&rdev->irq.vblank_queue);
4928 				}
4929 				if (atomic_read(&rdev->irq.pflip[0]))
4930 					radeon_crtc_handle_vblank(rdev, 0);
4931 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4932 				DRM_DEBUG("IH: D1 vblank\n");
4933 
4934 				break;
4935 			case 1: /* D1 vline */
4936 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
4937 					DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
4938 
4939 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4940 				DRM_DEBUG("IH: D1 vline\n");
4941 
4942 				break;
4943 			default:
4944 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4945 				break;
4946 			}
4947 			break;
4948 		case 2: /* D2 vblank/vline */
4949 			switch (src_data) {
4950 			case 0: /* D2 vblank */
4951 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
4952 					DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
4953 
4954 				if (rdev->irq.crtc_vblank_int[1]) {
4955 					drm_handle_vblank(rdev->ddev, 1);
4956 					rdev->pm.vblank_sync = true;
4957 					wake_up(&rdev->irq.vblank_queue);
4958 				}
4959 				if (atomic_read(&rdev->irq.pflip[1]))
4960 					radeon_crtc_handle_vblank(rdev, 1);
4961 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4962 				DRM_DEBUG("IH: D2 vblank\n");
4963 
4964 				break;
4965 			case 1: /* D2 vline */
4966 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
4967 					DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
4968 
4969 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4970 				DRM_DEBUG("IH: D2 vline\n");
4971 
4972 				break;
4973 			default:
4974 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4975 				break;
4976 			}
4977 			break;
4978 		case 3: /* D3 vblank/vline */
4979 			switch (src_data) {
4980 			case 0: /* D3 vblank */
4981 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
4982 					DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
4983 
4984 				if (rdev->irq.crtc_vblank_int[2]) {
4985 					drm_handle_vblank(rdev->ddev, 2);
4986 					rdev->pm.vblank_sync = true;
4987 					wake_up(&rdev->irq.vblank_queue);
4988 				}
4989 				if (atomic_read(&rdev->irq.pflip[2]))
4990 					radeon_crtc_handle_vblank(rdev, 2);
4991 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4992 				DRM_DEBUG("IH: D3 vblank\n");
4993 
4994 				break;
4995 			case 1: /* D3 vline */
4996 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
4997 					DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
4998 
4999 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5000 				DRM_DEBUG("IH: D3 vline\n");
5001 
5002 				break;
5003 			default:
5004 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5005 				break;
5006 			}
5007 			break;
5008 		case 4: /* D4 vblank/vline */
5009 			switch (src_data) {
5010 			case 0: /* D4 vblank */
5011 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5012 					DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5013 
5014 				if (rdev->irq.crtc_vblank_int[3]) {
5015 					drm_handle_vblank(rdev->ddev, 3);
5016 					rdev->pm.vblank_sync = true;
5017 					wake_up(&rdev->irq.vblank_queue);
5018 				}
5019 				if (atomic_read(&rdev->irq.pflip[3]))
5020 					radeon_crtc_handle_vblank(rdev, 3);
5021 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5022 				DRM_DEBUG("IH: D4 vblank\n");
5023 
5024 				break;
5025 			case 1: /* D4 vline */
5026 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5027 					DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5028 
5029 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5030 				DRM_DEBUG("IH: D4 vline\n");
5031 
5032 				break;
5033 			default:
5034 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5035 				break;
5036 			}
5037 			break;
5038 		case 5: /* D5 vblank/vline */
5039 			switch (src_data) {
5040 			case 0: /* D5 vblank */
5041 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5042 					DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5043 
5044 				if (rdev->irq.crtc_vblank_int[4]) {
5045 					drm_handle_vblank(rdev->ddev, 4);
5046 					rdev->pm.vblank_sync = true;
5047 					wake_up(&rdev->irq.vblank_queue);
5048 				}
5049 				if (atomic_read(&rdev->irq.pflip[4]))
5050 					radeon_crtc_handle_vblank(rdev, 4);
5051 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5052 				DRM_DEBUG("IH: D5 vblank\n");
5053 
5054 				break;
5055 			case 1: /* D5 vline */
5056 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5057 					DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5058 
5059 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5060 				DRM_DEBUG("IH: D5 vline\n");
5061 
5062 				break;
5063 			default:
5064 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5065 				break;
5066 			}
5067 			break;
5068 		case 6: /* D6 vblank/vline */
5069 			switch (src_data) {
5070 			case 0: /* D6 vblank */
5071 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5072 					DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5073 
5074 				if (rdev->irq.crtc_vblank_int[5]) {
5075 					drm_handle_vblank(rdev->ddev, 5);
5076 					rdev->pm.vblank_sync = true;
5077 					wake_up(&rdev->irq.vblank_queue);
5078 				}
5079 				if (atomic_read(&rdev->irq.pflip[5]))
5080 					radeon_crtc_handle_vblank(rdev, 5);
5081 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5082 				DRM_DEBUG("IH: D6 vblank\n");
5083 
5084 				break;
5085 			case 1: /* D6 vline */
5086 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5087 					DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5088 
5089 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5090 				DRM_DEBUG("IH: D6 vline\n");
5091 
5092 				break;
5093 			default:
5094 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5095 				break;
5096 			}
5097 			break;
5098 		case 8: /* D1 page flip */
5099 		case 10: /* D2 page flip */
5100 		case 12: /* D3 page flip */
5101 		case 14: /* D4 page flip */
5102 		case 16: /* D5 page flip */
5103 		case 18: /* D6 page flip */
5104 			DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5105 			if (radeon_use_pflipirq > 0)
5106 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5107 			break;
5108 		case 42: /* HPD hotplug */
5109 			switch (src_data) {
5110 			case 0:
5111 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5112 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5113 
5114 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5115 				queue_hotplug = true;
5116 				DRM_DEBUG("IH: HPD1\n");
5117 				break;
5118 			case 1:
5119 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5120 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5121 
5122 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5123 				queue_hotplug = true;
5124 				DRM_DEBUG("IH: HPD2\n");
5125 				break;
5126 			case 2:
5127 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5128 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5129 
5130 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5131 				queue_hotplug = true;
5132 				DRM_DEBUG("IH: HPD3\n");
5133 				break;
5134 			case 3:
5135 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5136 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5137 
5138 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5139 				queue_hotplug = true;
5140 				DRM_DEBUG("IH: HPD4\n");
5141 				break;
5142 			case 4:
5143 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5144 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5145 
5146 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5147 				queue_hotplug = true;
5148 				DRM_DEBUG("IH: HPD5\n");
5149 				break;
5150 			case 5:
5151 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5152 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5153 
5154 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5155 				queue_hotplug = true;
5156 				DRM_DEBUG("IH: HPD6\n");
5157 				break;
5158 			case 6:
5159 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5160 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5161 
5162 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5163 				queue_dp = true;
5164 				DRM_DEBUG("IH: HPD_RX 1\n");
5165 				break;
5166 			case 7:
5167 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5168 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5169 
5170 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5171 				queue_dp = true;
5172 				DRM_DEBUG("IH: HPD_RX 2\n");
5173 				break;
5174 			case 8:
5175 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5176 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5177 
5178 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5179 				queue_dp = true;
5180 				DRM_DEBUG("IH: HPD_RX 3\n");
5181 				break;
5182 			case 9:
5183 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5184 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5185 
5186 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5187 				queue_dp = true;
5188 				DRM_DEBUG("IH: HPD_RX 4\n");
5189 				break;
5190 			case 10:
5191 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5192 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5193 
5194 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5195 				queue_dp = true;
5196 				DRM_DEBUG("IH: HPD_RX 5\n");
5197 				break;
5198 			case 11:
5199 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5200 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5201 
5202 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5203 				queue_dp = true;
5204 				DRM_DEBUG("IH: HPD_RX 6\n");
5205 				break;
5206 			default:
5207 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5208 				break;
5209 			}
5210 			break;
5211 		case 44: /* hdmi */
5212 			switch (src_data) {
5213 			case 0:
5214 				if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5215 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5216 
5217 				rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5218 				queue_hdmi = true;
5219 				DRM_DEBUG("IH: HDMI0\n");
5220 				break;
5221 			case 1:
5222 				if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5223 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5224 
5225 				rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5226 				queue_hdmi = true;
5227 				DRM_DEBUG("IH: HDMI1\n");
5228 				break;
5229 			case 2:
5230 				if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5231 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5232 
5233 				rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5234 				queue_hdmi = true;
5235 				DRM_DEBUG("IH: HDMI2\n");
5236 				break;
5237 			case 3:
5238 				if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5239 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5240 
5241 				rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5242 				queue_hdmi = true;
5243 				DRM_DEBUG("IH: HDMI3\n");
5244 				break;
5245 			case 4:
5246 				if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5247 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5248 
5249 				rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5250 				queue_hdmi = true;
5251 				DRM_DEBUG("IH: HDMI4\n");
5252 				break;
5253 			case 5:
5254 				if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5255 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5256 
5257 				rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5258 				queue_hdmi = true;
5259 				DRM_DEBUG("IH: HDMI5\n");
5260 				break;
5261 			default:
5262 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5263 				break;
5264 			}
5265 		case 96:
5266 			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5267 			WREG32(SRBM_INT_ACK, 0x1);
5268 			break;
5269 		case 124: /* UVD */
5270 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5271 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5272 			break;
5273 		case 146:
5274 		case 147:
5275 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5276 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5277 			/* reset addr and status */
5278 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5279 			if (addr == 0x0 && status == 0x0)
5280 				break;
5281 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5282 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5283 				addr);
5284 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5285 				status);
5286 			cayman_vm_decode_fault(rdev, status, addr);
5287 			break;
5288 		case 176: /* CP_INT in ring buffer */
5289 		case 177: /* CP_INT in IB1 */
5290 		case 178: /* CP_INT in IB2 */
5291 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5292 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5293 			break;
5294 		case 181: /* CP EOP event */
5295 			DRM_DEBUG("IH: CP EOP\n");
5296 			if (rdev->family >= CHIP_CAYMAN) {
5297 				switch (src_data) {
5298 				case 0:
5299 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5300 					break;
5301 				case 1:
5302 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5303 					break;
5304 				case 2:
5305 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5306 					break;
5307 				}
5308 			} else
5309 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5310 			break;
5311 		case 224: /* DMA trap event */
5312 			DRM_DEBUG("IH: DMA trap\n");
5313 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5314 			break;
5315 		case 230: /* thermal low to high */
5316 			DRM_DEBUG("IH: thermal low to high\n");
5317 			rdev->pm.dpm.thermal.high_to_low = false;
5318 			queue_thermal = true;
5319 			break;
5320 		case 231: /* thermal high to low */
5321 			DRM_DEBUG("IH: thermal high to low\n");
5322 			rdev->pm.dpm.thermal.high_to_low = true;
5323 			queue_thermal = true;
5324 			break;
5325 		case 233: /* GUI IDLE */
5326 			DRM_DEBUG("IH: GUI idle\n");
5327 			break;
5328 		case 244: /* DMA trap event */
5329 			if (rdev->family >= CHIP_CAYMAN) {
5330 				DRM_DEBUG("IH: DMA1 trap\n");
5331 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5332 			}
5333 			break;
5334 		default:
5335 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5336 			break;
5337 		}
5338 
5339 		/* wptr/rptr are in bytes! */
5340 		rptr += 16;
5341 		rptr &= rdev->ih.ptr_mask;
5342 		WREG32(IH_RB_RPTR, rptr);
5343 	}
5344 	if (queue_dp)
5345 		schedule_work(&rdev->dp_work);
5346 	if (queue_hotplug)
5347 		schedule_work(&rdev->hotplug_work);
5348 	if (queue_hdmi)
5349 		schedule_work(&rdev->audio_work);
5350 	if (queue_thermal && rdev->pm.dpm_enabled)
5351 		schedule_work(&rdev->pm.dpm.thermal.work);
5352 	rdev->ih.rptr = rptr;
5353 	atomic_set(&rdev->ih.lock, 0);
5354 
5355 	/* make sure wptr hasn't changed while processing */
5356 	wptr = evergreen_get_ih_wptr(rdev);
5357 	if (wptr != rptr)
5358 		goto restart_ih;
5359 
5360 	return IRQ_HANDLED;
5361 }
5362 
5363 static int evergreen_startup(struct radeon_device *rdev)
5364 {
5365 	struct radeon_ring *ring;
5366 	int r;
5367 
5368 	/* enable pcie gen2 link */
5369 	evergreen_pcie_gen2_enable(rdev);
5370 	/* enable aspm */
5371 	evergreen_program_aspm(rdev);
5372 
5373 	/* scratch needs to be initialized before MC */
5374 	r = r600_vram_scratch_init(rdev);
5375 	if (r)
5376 		return r;
5377 
5378 	evergreen_mc_program(rdev);
5379 
5380 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5381 		r = ni_mc_load_microcode(rdev);
5382 		if (r) {
5383 			DRM_ERROR("Failed to load MC firmware!\n");
5384 			return r;
5385 		}
5386 	}
5387 
5388 	if (rdev->flags & RADEON_IS_AGP) {
5389 		evergreen_agp_enable(rdev);
5390 	} else {
5391 		r = evergreen_pcie_gart_enable(rdev);
5392 		if (r)
5393 			return r;
5394 	}
5395 	evergreen_gpu_init(rdev);
5396 
5397 	/* allocate rlc buffers */
5398 	if (rdev->flags & RADEON_IS_IGP) {
5399 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5400 		rdev->rlc.reg_list_size =
5401 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5402 		rdev->rlc.cs_data = evergreen_cs_data;
5403 		r = sumo_rlc_init(rdev);
5404 		if (r) {
5405 			DRM_ERROR("Failed to init rlc BOs!\n");
5406 			return r;
5407 		}
5408 	}
5409 
5410 	/* allocate wb buffer */
5411 	r = radeon_wb_init(rdev);
5412 	if (r)
5413 		return r;
5414 
5415 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5416 	if (r) {
5417 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5418 		return r;
5419 	}
5420 
5421 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5422 	if (r) {
5423 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5424 		return r;
5425 	}
5426 
5427 	r = uvd_v2_2_resume(rdev);
5428 	if (!r) {
5429 		r = radeon_fence_driver_start_ring(rdev,
5430 						   R600_RING_TYPE_UVD_INDEX);
5431 		if (r)
5432 			dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5433 	}
5434 
5435 	if (r)
5436 		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5437 
5438 	/* Enable IRQ */
5439 	if (!rdev->irq.installed) {
5440 		r = radeon_irq_kms_init(rdev);
5441 		if (r)
5442 			return r;
5443 	}
5444 
5445 	r = r600_irq_init(rdev);
5446 	if (r) {
5447 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5448 		radeon_irq_kms_fini(rdev);
5449 		return r;
5450 	}
5451 	evergreen_irq_set(rdev);
5452 
5453 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5454 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5455 			     RADEON_CP_PACKET2);
5456 	if (r)
5457 		return r;
5458 
5459 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5460 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5461 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5462 	if (r)
5463 		return r;
5464 
5465 	r = evergreen_cp_load_microcode(rdev);
5466 	if (r)
5467 		return r;
5468 	r = evergreen_cp_resume(rdev);
5469 	if (r)
5470 		return r;
5471 	r = r600_dma_resume(rdev);
5472 	if (r)
5473 		return r;
5474 
5475 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5476 	if (ring->ring_size) {
5477 		r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5478 				     RADEON_CP_PACKET2);
5479 		if (!r)
5480 			r = uvd_v1_0_init(rdev);
5481 
5482 		if (r)
5483 			DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5484 	}
5485 
5486 	r = radeon_ib_pool_init(rdev);
5487 	if (r) {
5488 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5489 		return r;
5490 	}
5491 
5492 	r = radeon_audio_init(rdev);
5493 	if (r) {
5494 		DRM_ERROR("radeon: audio init failed\n");
5495 		return r;
5496 	}
5497 
5498 	return 0;
5499 }
5500 
5501 int evergreen_resume(struct radeon_device *rdev)
5502 {
5503 	int r;
5504 
5505 	/* reset the asic, the gfx blocks are often in a bad state
5506 	 * after the driver is unloaded or after a resume
5507 	 */
5508 	if (radeon_asic_reset(rdev))
5509 		dev_warn(rdev->dev, "GPU reset failed !\n");
5510 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5511 	 * posting will perform necessary task to bring back GPU into good
5512 	 * shape.
5513 	 */
5514 	/* post card */
5515 	atom_asic_init(rdev->mode_info.atom_context);
5516 
5517 	/* init golden registers */
5518 	evergreen_init_golden_registers(rdev);
5519 
5520 	if (rdev->pm.pm_method == PM_METHOD_DPM)
5521 		radeon_pm_resume(rdev);
5522 
5523 	rdev->accel_working = true;
5524 	r = evergreen_startup(rdev);
5525 	if (r) {
5526 		DRM_ERROR("evergreen startup failed on resume\n");
5527 		rdev->accel_working = false;
5528 		return r;
5529 	}
5530 
5531 	return r;
5532 
5533 }
5534 
5535 int evergreen_suspend(struct radeon_device *rdev)
5536 {
5537 	radeon_pm_suspend(rdev);
5538 	radeon_audio_fini(rdev);
5539 	uvd_v1_0_fini(rdev);
5540 	radeon_uvd_suspend(rdev);
5541 	r700_cp_stop(rdev);
5542 	r600_dma_stop(rdev);
5543 	evergreen_irq_suspend(rdev);
5544 	radeon_wb_disable(rdev);
5545 	evergreen_pcie_gart_disable(rdev);
5546 
5547 	return 0;
5548 }
5549 
5550 /* Plan is to move initialization in that function and use
5551  * helper function so that radeon_device_init pretty much
5552  * do nothing more than calling asic specific function. This
5553  * should also allow to remove a bunch of callback function
5554  * like vram_info.
5555  */
5556 int evergreen_init(struct radeon_device *rdev)
5557 {
5558 	int r;
5559 
5560 	/* Read BIOS */
5561 	if (!radeon_get_bios(rdev)) {
5562 		if (ASIC_IS_AVIVO(rdev))
5563 			return -EINVAL;
5564 	}
5565 	/* Must be an ATOMBIOS */
5566 	if (!rdev->is_atom_bios) {
5567 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5568 		return -EINVAL;
5569 	}
5570 	r = radeon_atombios_init(rdev);
5571 	if (r)
5572 		return r;
5573 	/* reset the asic, the gfx blocks are often in a bad state
5574 	 * after the driver is unloaded or after a resume
5575 	 */
5576 	if (radeon_asic_reset(rdev))
5577 		dev_warn(rdev->dev, "GPU reset failed !\n");
5578 	/* Post card if necessary */
5579 	if (!radeon_card_posted(rdev)) {
5580 		if (!rdev->bios) {
5581 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5582 			return -EINVAL;
5583 		}
5584 		DRM_INFO("GPU not posted. posting now...\n");
5585 		atom_asic_init(rdev->mode_info.atom_context);
5586 	}
5587 	/* init golden registers */
5588 	evergreen_init_golden_registers(rdev);
5589 	/* Initialize scratch registers */
5590 	r600_scratch_init(rdev);
5591 	/* Initialize surface registers */
5592 	radeon_surface_init(rdev);
5593 	/* Initialize clocks */
5594 	radeon_get_clock_info(rdev->ddev);
5595 	/* Fence driver */
5596 	r = radeon_fence_driver_init(rdev);
5597 	if (r)
5598 		return r;
5599 	/* initialize AGP */
5600 	if (rdev->flags & RADEON_IS_AGP) {
5601 		r = radeon_agp_init(rdev);
5602 		if (r)
5603 			radeon_agp_disable(rdev);
5604 	}
5605 	/* initialize memory controller */
5606 	r = evergreen_mc_init(rdev);
5607 	if (r)
5608 		return r;
5609 	/* Memory manager */
5610 	r = radeon_bo_init(rdev);
5611 	if (r)
5612 		return r;
5613 
5614 	if (ASIC_IS_DCE5(rdev)) {
5615 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5616 			r = ni_init_microcode(rdev);
5617 			if (r) {
5618 				DRM_ERROR("Failed to load firmware!\n");
5619 				return r;
5620 			}
5621 		}
5622 	} else {
5623 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5624 			r = r600_init_microcode(rdev);
5625 			if (r) {
5626 				DRM_ERROR("Failed to load firmware!\n");
5627 				return r;
5628 			}
5629 		}
5630 	}
5631 
5632 	/* Initialize power management */
5633 	radeon_pm_init(rdev);
5634 
5635 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5636 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5637 
5638 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5639 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5640 
5641 	r = radeon_uvd_init(rdev);
5642 	if (!r) {
5643 		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5644 		r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5645 			       4096);
5646 	}
5647 
5648 	rdev->ih.ring_obj = NULL;
5649 	r600_ih_ring_init(rdev, 64 * 1024);
5650 
5651 	r = r600_pcie_gart_init(rdev);
5652 	if (r)
5653 		return r;
5654 
5655 	rdev->accel_working = true;
5656 	r = evergreen_startup(rdev);
5657 	if (r) {
5658 		dev_err(rdev->dev, "disabling GPU acceleration\n");
5659 		r700_cp_fini(rdev);
5660 		r600_dma_fini(rdev);
5661 		r600_irq_fini(rdev);
5662 		if (rdev->flags & RADEON_IS_IGP)
5663 			sumo_rlc_fini(rdev);
5664 		radeon_wb_fini(rdev);
5665 		radeon_ib_pool_fini(rdev);
5666 		radeon_irq_kms_fini(rdev);
5667 		evergreen_pcie_gart_fini(rdev);
5668 		rdev->accel_working = false;
5669 	}
5670 
5671 	/* Don't start up if the MC ucode is missing on BTC parts.
5672 	 * The default clocks and voltages before the MC ucode
5673 	 * is loaded are not suffient for advanced operations.
5674 	 */
5675 	if (ASIC_IS_DCE5(rdev)) {
5676 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5677 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5678 			return -EINVAL;
5679 		}
5680 	}
5681 
5682 	return 0;
5683 }
5684 
5685 void evergreen_fini(struct radeon_device *rdev)
5686 {
5687 	radeon_pm_fini(rdev);
5688 	radeon_audio_fini(rdev);
5689 	r700_cp_fini(rdev);
5690 	r600_dma_fini(rdev);
5691 	r600_irq_fini(rdev);
5692 	if (rdev->flags & RADEON_IS_IGP)
5693 		sumo_rlc_fini(rdev);
5694 	radeon_wb_fini(rdev);
5695 	radeon_ib_pool_fini(rdev);
5696 	radeon_irq_kms_fini(rdev);
5697 	uvd_v1_0_fini(rdev);
5698 	radeon_uvd_fini(rdev);
5699 	evergreen_pcie_gart_fini(rdev);
5700 	r600_vram_scratch_fini(rdev);
5701 	radeon_gem_fini(rdev);
5702 	radeon_fence_driver_fini(rdev);
5703 	radeon_agp_fini(rdev);
5704 	radeon_bo_fini(rdev);
5705 	radeon_atombios_fini(rdev);
5706 	kfree(rdev->bios);
5707 	rdev->bios = NULL;
5708 }
5709 
5710 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5711 {
5712 	u32 link_width_cntl, speed_cntl;
5713 
5714 	if (radeon_pcie_gen2 == 0)
5715 		return;
5716 
5717 	if (rdev->flags & RADEON_IS_IGP)
5718 		return;
5719 
5720 	if (!(rdev->flags & RADEON_IS_PCIE))
5721 		return;
5722 
5723 	/* x2 cards have a special sequence */
5724 	if (ASIC_IS_X2(rdev))
5725 		return;
5726 
5727 	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5728 		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5729 		return;
5730 
5731 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5732 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5733 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5734 		return;
5735 	}
5736 
5737 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5738 
5739 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5740 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5741 
5742 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5743 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5744 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5745 
5746 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5747 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5748 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5749 
5750 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5751 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5752 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5753 
5754 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5755 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5756 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5757 
5758 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5759 		speed_cntl |= LC_GEN2_EN_STRAP;
5760 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5761 
5762 	} else {
5763 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5764 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5765 		if (1)
5766 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5767 		else
5768 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5769 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5770 	}
5771 }
5772 
5773 void evergreen_program_aspm(struct radeon_device *rdev)
5774 {
5775 	u32 data, orig;
5776 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5777 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5778 	/* fusion_platform = true
5779 	 * if the system is a fusion system
5780 	 * (APU or DGPU in a fusion system).
5781 	 * todo: check if the system is a fusion platform.
5782 	 */
5783 	bool fusion_platform = false;
5784 
5785 	if (radeon_aspm == 0)
5786 		return;
5787 
5788 	if (!(rdev->flags & RADEON_IS_PCIE))
5789 		return;
5790 
5791 	switch (rdev->family) {
5792 	case CHIP_CYPRESS:
5793 	case CHIP_HEMLOCK:
5794 	case CHIP_JUNIPER:
5795 	case CHIP_REDWOOD:
5796 	case CHIP_CEDAR:
5797 	case CHIP_SUMO:
5798 	case CHIP_SUMO2:
5799 	case CHIP_PALM:
5800 	case CHIP_ARUBA:
5801 		disable_l0s = true;
5802 		break;
5803 	default:
5804 		disable_l0s = false;
5805 		break;
5806 	}
5807 
5808 	if (rdev->flags & RADEON_IS_IGP)
5809 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
5810 
5811 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5812 	if (fusion_platform)
5813 		data &= ~MULTI_PIF;
5814 	else
5815 		data |= MULTI_PIF;
5816 	if (data != orig)
5817 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5818 
5819 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5820 	if (fusion_platform)
5821 		data &= ~MULTI_PIF;
5822 	else
5823 		data |= MULTI_PIF;
5824 	if (data != orig)
5825 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5826 
5827 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5828 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5829 	if (!disable_l0s) {
5830 		if (rdev->family >= CHIP_BARTS)
5831 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5832 		else
5833 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5834 	}
5835 
5836 	if (!disable_l1) {
5837 		if (rdev->family >= CHIP_BARTS)
5838 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5839 		else
5840 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5841 
5842 		if (!disable_plloff_in_l1) {
5843 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5844 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5845 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5846 			if (data != orig)
5847 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5848 
5849 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5850 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5851 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5852 			if (data != orig)
5853 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5854 
5855 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5856 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5857 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5858 			if (data != orig)
5859 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5860 
5861 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5862 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5863 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5864 			if (data != orig)
5865 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5866 
5867 			if (rdev->family >= CHIP_BARTS) {
5868 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5869 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5870 				data |= PLL_RAMP_UP_TIME_0(4);
5871 				if (data != orig)
5872 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5873 
5874 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5875 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5876 				data |= PLL_RAMP_UP_TIME_1(4);
5877 				if (data != orig)
5878 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5879 
5880 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5881 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5882 				data |= PLL_RAMP_UP_TIME_0(4);
5883 				if (data != orig)
5884 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5885 
5886 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5887 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5888 				data |= PLL_RAMP_UP_TIME_1(4);
5889 				if (data != orig)
5890 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5891 			}
5892 
5893 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5894 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5895 			data |= LC_DYN_LANES_PWR_STATE(3);
5896 			if (data != orig)
5897 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5898 
5899 			if (rdev->family >= CHIP_BARTS) {
5900 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5901 				data &= ~LS2_EXIT_TIME_MASK;
5902 				data |= LS2_EXIT_TIME(1);
5903 				if (data != orig)
5904 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5905 
5906 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5907 				data &= ~LS2_EXIT_TIME_MASK;
5908 				data |= LS2_EXIT_TIME(1);
5909 				if (data != orig)
5910 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5911 			}
5912 		}
5913 	}
5914 
5915 	/* evergreen parts only */
5916 	if (rdev->family < CHIP_BARTS)
5917 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5918 
5919 	if (pcie_lc_cntl != pcie_lc_cntl_old)
5920 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5921 }
5922