xref: /openbmc/linux/drivers/gpu/drm/radeon/evergreen.c (revision a36954f5)
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <drm/drmP.h>
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include "radeon_audio.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37 
38 /*
39  * Indirect registers accessor
40  */
41 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
42 {
43 	unsigned long flags;
44 	u32 r;
45 
46 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
47 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
48 	r = RREG32(EVERGREEN_CG_IND_DATA);
49 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
50 	return r;
51 }
52 
53 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
54 {
55 	unsigned long flags;
56 
57 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
58 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
59 	WREG32(EVERGREEN_CG_IND_DATA, (v));
60 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
61 }
62 
63 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
64 {
65 	unsigned long flags;
66 	u32 r;
67 
68 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
69 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
70 	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
71 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
72 	return r;
73 }
74 
75 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
76 {
77 	unsigned long flags;
78 
79 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
80 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
81 	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
82 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
83 }
84 
85 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
86 {
87 	unsigned long flags;
88 	u32 r;
89 
90 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
91 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
92 	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
93 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
94 	return r;
95 }
96 
97 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
98 {
99 	unsigned long flags;
100 
101 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
102 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
103 	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
104 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
105 }
106 
107 static const u32 crtc_offsets[6] =
108 {
109 	EVERGREEN_CRTC0_REGISTER_OFFSET,
110 	EVERGREEN_CRTC1_REGISTER_OFFSET,
111 	EVERGREEN_CRTC2_REGISTER_OFFSET,
112 	EVERGREEN_CRTC3_REGISTER_OFFSET,
113 	EVERGREEN_CRTC4_REGISTER_OFFSET,
114 	EVERGREEN_CRTC5_REGISTER_OFFSET
115 };
116 
117 #include "clearstate_evergreen.h"
118 
119 static const u32 sumo_rlc_save_restore_register_list[] =
120 {
121 	0x98fc,
122 	0x9830,
123 	0x9834,
124 	0x9838,
125 	0x9870,
126 	0x9874,
127 	0x8a14,
128 	0x8b24,
129 	0x8bcc,
130 	0x8b10,
131 	0x8d00,
132 	0x8d04,
133 	0x8c00,
134 	0x8c04,
135 	0x8c08,
136 	0x8c0c,
137 	0x8d8c,
138 	0x8c20,
139 	0x8c24,
140 	0x8c28,
141 	0x8c18,
142 	0x8c1c,
143 	0x8cf0,
144 	0x8e2c,
145 	0x8e38,
146 	0x8c30,
147 	0x9508,
148 	0x9688,
149 	0x9608,
150 	0x960c,
151 	0x9610,
152 	0x9614,
153 	0x88c4,
154 	0x88d4,
155 	0xa008,
156 	0x900c,
157 	0x9100,
158 	0x913c,
159 	0x98f8,
160 	0x98f4,
161 	0x9b7c,
162 	0x3f8c,
163 	0x8950,
164 	0x8954,
165 	0x8a18,
166 	0x8b28,
167 	0x9144,
168 	0x9148,
169 	0x914c,
170 	0x3f90,
171 	0x3f94,
172 	0x915c,
173 	0x9160,
174 	0x9178,
175 	0x917c,
176 	0x9180,
177 	0x918c,
178 	0x9190,
179 	0x9194,
180 	0x9198,
181 	0x919c,
182 	0x91a8,
183 	0x91ac,
184 	0x91b0,
185 	0x91b4,
186 	0x91b8,
187 	0x91c4,
188 	0x91c8,
189 	0x91cc,
190 	0x91d0,
191 	0x91d4,
192 	0x91e0,
193 	0x91e4,
194 	0x91ec,
195 	0x91f0,
196 	0x91f4,
197 	0x9200,
198 	0x9204,
199 	0x929c,
200 	0x9150,
201 	0x802c,
202 };
203 
204 static void evergreen_gpu_init(struct radeon_device *rdev);
205 void evergreen_fini(struct radeon_device *rdev);
206 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
207 void evergreen_program_aspm(struct radeon_device *rdev);
208 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
209 				     int ring, u32 cp_int_cntl);
210 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
211 				   u32 status, u32 addr);
212 void cik_init_cp_pg_table(struct radeon_device *rdev);
213 
214 extern u32 si_get_csb_size(struct radeon_device *rdev);
215 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
216 extern u32 cik_get_csb_size(struct radeon_device *rdev);
217 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
218 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
219 
220 static const u32 evergreen_golden_registers[] =
221 {
222 	0x3f90, 0xffff0000, 0xff000000,
223 	0x9148, 0xffff0000, 0xff000000,
224 	0x3f94, 0xffff0000, 0xff000000,
225 	0x914c, 0xffff0000, 0xff000000,
226 	0x9b7c, 0xffffffff, 0x00000000,
227 	0x8a14, 0xffffffff, 0x00000007,
228 	0x8b10, 0xffffffff, 0x00000000,
229 	0x960c, 0xffffffff, 0x54763210,
230 	0x88c4, 0xffffffff, 0x000000c2,
231 	0x88d4, 0xffffffff, 0x00000010,
232 	0x8974, 0xffffffff, 0x00000000,
233 	0xc78, 0x00000080, 0x00000080,
234 	0x5eb4, 0xffffffff, 0x00000002,
235 	0x5e78, 0xffffffff, 0x001000f0,
236 	0x6104, 0x01000300, 0x00000000,
237 	0x5bc0, 0x00300000, 0x00000000,
238 	0x7030, 0xffffffff, 0x00000011,
239 	0x7c30, 0xffffffff, 0x00000011,
240 	0x10830, 0xffffffff, 0x00000011,
241 	0x11430, 0xffffffff, 0x00000011,
242 	0x12030, 0xffffffff, 0x00000011,
243 	0x12c30, 0xffffffff, 0x00000011,
244 	0xd02c, 0xffffffff, 0x08421000,
245 	0x240c, 0xffffffff, 0x00000380,
246 	0x8b24, 0xffffffff, 0x00ff0fff,
247 	0x28a4c, 0x06000000, 0x06000000,
248 	0x10c, 0x00000001, 0x00000001,
249 	0x8d00, 0xffffffff, 0x100e4848,
250 	0x8d04, 0xffffffff, 0x00164745,
251 	0x8c00, 0xffffffff, 0xe4000003,
252 	0x8c04, 0xffffffff, 0x40600060,
253 	0x8c08, 0xffffffff, 0x001c001c,
254 	0x8cf0, 0xffffffff, 0x08e00620,
255 	0x8c20, 0xffffffff, 0x00800080,
256 	0x8c24, 0xffffffff, 0x00800080,
257 	0x8c18, 0xffffffff, 0x20202078,
258 	0x8c1c, 0xffffffff, 0x00001010,
259 	0x28350, 0xffffffff, 0x00000000,
260 	0xa008, 0xffffffff, 0x00010000,
261 	0x5c4, 0xffffffff, 0x00000001,
262 	0x9508, 0xffffffff, 0x00000002,
263 	0x913c, 0x0000000f, 0x0000000a
264 };
265 
266 static const u32 evergreen_golden_registers2[] =
267 {
268 	0x2f4c, 0xffffffff, 0x00000000,
269 	0x54f4, 0xffffffff, 0x00000000,
270 	0x54f0, 0xffffffff, 0x00000000,
271 	0x5498, 0xffffffff, 0x00000000,
272 	0x549c, 0xffffffff, 0x00000000,
273 	0x5494, 0xffffffff, 0x00000000,
274 	0x53cc, 0xffffffff, 0x00000000,
275 	0x53c8, 0xffffffff, 0x00000000,
276 	0x53c4, 0xffffffff, 0x00000000,
277 	0x53c0, 0xffffffff, 0x00000000,
278 	0x53bc, 0xffffffff, 0x00000000,
279 	0x53b8, 0xffffffff, 0x00000000,
280 	0x53b4, 0xffffffff, 0x00000000,
281 	0x53b0, 0xffffffff, 0x00000000
282 };
283 
284 static const u32 cypress_mgcg_init[] =
285 {
286 	0x802c, 0xffffffff, 0xc0000000,
287 	0x5448, 0xffffffff, 0x00000100,
288 	0x55e4, 0xffffffff, 0x00000100,
289 	0x160c, 0xffffffff, 0x00000100,
290 	0x5644, 0xffffffff, 0x00000100,
291 	0xc164, 0xffffffff, 0x00000100,
292 	0x8a18, 0xffffffff, 0x00000100,
293 	0x897c, 0xffffffff, 0x06000100,
294 	0x8b28, 0xffffffff, 0x00000100,
295 	0x9144, 0xffffffff, 0x00000100,
296 	0x9a60, 0xffffffff, 0x00000100,
297 	0x9868, 0xffffffff, 0x00000100,
298 	0x8d58, 0xffffffff, 0x00000100,
299 	0x9510, 0xffffffff, 0x00000100,
300 	0x949c, 0xffffffff, 0x00000100,
301 	0x9654, 0xffffffff, 0x00000100,
302 	0x9030, 0xffffffff, 0x00000100,
303 	0x9034, 0xffffffff, 0x00000100,
304 	0x9038, 0xffffffff, 0x00000100,
305 	0x903c, 0xffffffff, 0x00000100,
306 	0x9040, 0xffffffff, 0x00000100,
307 	0xa200, 0xffffffff, 0x00000100,
308 	0xa204, 0xffffffff, 0x00000100,
309 	0xa208, 0xffffffff, 0x00000100,
310 	0xa20c, 0xffffffff, 0x00000100,
311 	0x971c, 0xffffffff, 0x00000100,
312 	0x977c, 0xffffffff, 0x00000100,
313 	0x3f80, 0xffffffff, 0x00000100,
314 	0xa210, 0xffffffff, 0x00000100,
315 	0xa214, 0xffffffff, 0x00000100,
316 	0x4d8, 0xffffffff, 0x00000100,
317 	0x9784, 0xffffffff, 0x00000100,
318 	0x9698, 0xffffffff, 0x00000100,
319 	0x4d4, 0xffffffff, 0x00000200,
320 	0x30cc, 0xffffffff, 0x00000100,
321 	0xd0c0, 0xffffffff, 0xff000100,
322 	0x802c, 0xffffffff, 0x40000000,
323 	0x915c, 0xffffffff, 0x00010000,
324 	0x9160, 0xffffffff, 0x00030002,
325 	0x9178, 0xffffffff, 0x00070000,
326 	0x917c, 0xffffffff, 0x00030002,
327 	0x9180, 0xffffffff, 0x00050004,
328 	0x918c, 0xffffffff, 0x00010006,
329 	0x9190, 0xffffffff, 0x00090008,
330 	0x9194, 0xffffffff, 0x00070000,
331 	0x9198, 0xffffffff, 0x00030002,
332 	0x919c, 0xffffffff, 0x00050004,
333 	0x91a8, 0xffffffff, 0x00010006,
334 	0x91ac, 0xffffffff, 0x00090008,
335 	0x91b0, 0xffffffff, 0x00070000,
336 	0x91b4, 0xffffffff, 0x00030002,
337 	0x91b8, 0xffffffff, 0x00050004,
338 	0x91c4, 0xffffffff, 0x00010006,
339 	0x91c8, 0xffffffff, 0x00090008,
340 	0x91cc, 0xffffffff, 0x00070000,
341 	0x91d0, 0xffffffff, 0x00030002,
342 	0x91d4, 0xffffffff, 0x00050004,
343 	0x91e0, 0xffffffff, 0x00010006,
344 	0x91e4, 0xffffffff, 0x00090008,
345 	0x91e8, 0xffffffff, 0x00000000,
346 	0x91ec, 0xffffffff, 0x00070000,
347 	0x91f0, 0xffffffff, 0x00030002,
348 	0x91f4, 0xffffffff, 0x00050004,
349 	0x9200, 0xffffffff, 0x00010006,
350 	0x9204, 0xffffffff, 0x00090008,
351 	0x9208, 0xffffffff, 0x00070000,
352 	0x920c, 0xffffffff, 0x00030002,
353 	0x9210, 0xffffffff, 0x00050004,
354 	0x921c, 0xffffffff, 0x00010006,
355 	0x9220, 0xffffffff, 0x00090008,
356 	0x9224, 0xffffffff, 0x00070000,
357 	0x9228, 0xffffffff, 0x00030002,
358 	0x922c, 0xffffffff, 0x00050004,
359 	0x9238, 0xffffffff, 0x00010006,
360 	0x923c, 0xffffffff, 0x00090008,
361 	0x9240, 0xffffffff, 0x00070000,
362 	0x9244, 0xffffffff, 0x00030002,
363 	0x9248, 0xffffffff, 0x00050004,
364 	0x9254, 0xffffffff, 0x00010006,
365 	0x9258, 0xffffffff, 0x00090008,
366 	0x925c, 0xffffffff, 0x00070000,
367 	0x9260, 0xffffffff, 0x00030002,
368 	0x9264, 0xffffffff, 0x00050004,
369 	0x9270, 0xffffffff, 0x00010006,
370 	0x9274, 0xffffffff, 0x00090008,
371 	0x9278, 0xffffffff, 0x00070000,
372 	0x927c, 0xffffffff, 0x00030002,
373 	0x9280, 0xffffffff, 0x00050004,
374 	0x928c, 0xffffffff, 0x00010006,
375 	0x9290, 0xffffffff, 0x00090008,
376 	0x9294, 0xffffffff, 0x00000000,
377 	0x929c, 0xffffffff, 0x00000001,
378 	0x802c, 0xffffffff, 0x40010000,
379 	0x915c, 0xffffffff, 0x00010000,
380 	0x9160, 0xffffffff, 0x00030002,
381 	0x9178, 0xffffffff, 0x00070000,
382 	0x917c, 0xffffffff, 0x00030002,
383 	0x9180, 0xffffffff, 0x00050004,
384 	0x918c, 0xffffffff, 0x00010006,
385 	0x9190, 0xffffffff, 0x00090008,
386 	0x9194, 0xffffffff, 0x00070000,
387 	0x9198, 0xffffffff, 0x00030002,
388 	0x919c, 0xffffffff, 0x00050004,
389 	0x91a8, 0xffffffff, 0x00010006,
390 	0x91ac, 0xffffffff, 0x00090008,
391 	0x91b0, 0xffffffff, 0x00070000,
392 	0x91b4, 0xffffffff, 0x00030002,
393 	0x91b8, 0xffffffff, 0x00050004,
394 	0x91c4, 0xffffffff, 0x00010006,
395 	0x91c8, 0xffffffff, 0x00090008,
396 	0x91cc, 0xffffffff, 0x00070000,
397 	0x91d0, 0xffffffff, 0x00030002,
398 	0x91d4, 0xffffffff, 0x00050004,
399 	0x91e0, 0xffffffff, 0x00010006,
400 	0x91e4, 0xffffffff, 0x00090008,
401 	0x91e8, 0xffffffff, 0x00000000,
402 	0x91ec, 0xffffffff, 0x00070000,
403 	0x91f0, 0xffffffff, 0x00030002,
404 	0x91f4, 0xffffffff, 0x00050004,
405 	0x9200, 0xffffffff, 0x00010006,
406 	0x9204, 0xffffffff, 0x00090008,
407 	0x9208, 0xffffffff, 0x00070000,
408 	0x920c, 0xffffffff, 0x00030002,
409 	0x9210, 0xffffffff, 0x00050004,
410 	0x921c, 0xffffffff, 0x00010006,
411 	0x9220, 0xffffffff, 0x00090008,
412 	0x9224, 0xffffffff, 0x00070000,
413 	0x9228, 0xffffffff, 0x00030002,
414 	0x922c, 0xffffffff, 0x00050004,
415 	0x9238, 0xffffffff, 0x00010006,
416 	0x923c, 0xffffffff, 0x00090008,
417 	0x9240, 0xffffffff, 0x00070000,
418 	0x9244, 0xffffffff, 0x00030002,
419 	0x9248, 0xffffffff, 0x00050004,
420 	0x9254, 0xffffffff, 0x00010006,
421 	0x9258, 0xffffffff, 0x00090008,
422 	0x925c, 0xffffffff, 0x00070000,
423 	0x9260, 0xffffffff, 0x00030002,
424 	0x9264, 0xffffffff, 0x00050004,
425 	0x9270, 0xffffffff, 0x00010006,
426 	0x9274, 0xffffffff, 0x00090008,
427 	0x9278, 0xffffffff, 0x00070000,
428 	0x927c, 0xffffffff, 0x00030002,
429 	0x9280, 0xffffffff, 0x00050004,
430 	0x928c, 0xffffffff, 0x00010006,
431 	0x9290, 0xffffffff, 0x00090008,
432 	0x9294, 0xffffffff, 0x00000000,
433 	0x929c, 0xffffffff, 0x00000001,
434 	0x802c, 0xffffffff, 0xc0000000
435 };
436 
437 static const u32 redwood_mgcg_init[] =
438 {
439 	0x802c, 0xffffffff, 0xc0000000,
440 	0x5448, 0xffffffff, 0x00000100,
441 	0x55e4, 0xffffffff, 0x00000100,
442 	0x160c, 0xffffffff, 0x00000100,
443 	0x5644, 0xffffffff, 0x00000100,
444 	0xc164, 0xffffffff, 0x00000100,
445 	0x8a18, 0xffffffff, 0x00000100,
446 	0x897c, 0xffffffff, 0x06000100,
447 	0x8b28, 0xffffffff, 0x00000100,
448 	0x9144, 0xffffffff, 0x00000100,
449 	0x9a60, 0xffffffff, 0x00000100,
450 	0x9868, 0xffffffff, 0x00000100,
451 	0x8d58, 0xffffffff, 0x00000100,
452 	0x9510, 0xffffffff, 0x00000100,
453 	0x949c, 0xffffffff, 0x00000100,
454 	0x9654, 0xffffffff, 0x00000100,
455 	0x9030, 0xffffffff, 0x00000100,
456 	0x9034, 0xffffffff, 0x00000100,
457 	0x9038, 0xffffffff, 0x00000100,
458 	0x903c, 0xffffffff, 0x00000100,
459 	0x9040, 0xffffffff, 0x00000100,
460 	0xa200, 0xffffffff, 0x00000100,
461 	0xa204, 0xffffffff, 0x00000100,
462 	0xa208, 0xffffffff, 0x00000100,
463 	0xa20c, 0xffffffff, 0x00000100,
464 	0x971c, 0xffffffff, 0x00000100,
465 	0x977c, 0xffffffff, 0x00000100,
466 	0x3f80, 0xffffffff, 0x00000100,
467 	0xa210, 0xffffffff, 0x00000100,
468 	0xa214, 0xffffffff, 0x00000100,
469 	0x4d8, 0xffffffff, 0x00000100,
470 	0x9784, 0xffffffff, 0x00000100,
471 	0x9698, 0xffffffff, 0x00000100,
472 	0x4d4, 0xffffffff, 0x00000200,
473 	0x30cc, 0xffffffff, 0x00000100,
474 	0xd0c0, 0xffffffff, 0xff000100,
475 	0x802c, 0xffffffff, 0x40000000,
476 	0x915c, 0xffffffff, 0x00010000,
477 	0x9160, 0xffffffff, 0x00030002,
478 	0x9178, 0xffffffff, 0x00070000,
479 	0x917c, 0xffffffff, 0x00030002,
480 	0x9180, 0xffffffff, 0x00050004,
481 	0x918c, 0xffffffff, 0x00010006,
482 	0x9190, 0xffffffff, 0x00090008,
483 	0x9194, 0xffffffff, 0x00070000,
484 	0x9198, 0xffffffff, 0x00030002,
485 	0x919c, 0xffffffff, 0x00050004,
486 	0x91a8, 0xffffffff, 0x00010006,
487 	0x91ac, 0xffffffff, 0x00090008,
488 	0x91b0, 0xffffffff, 0x00070000,
489 	0x91b4, 0xffffffff, 0x00030002,
490 	0x91b8, 0xffffffff, 0x00050004,
491 	0x91c4, 0xffffffff, 0x00010006,
492 	0x91c8, 0xffffffff, 0x00090008,
493 	0x91cc, 0xffffffff, 0x00070000,
494 	0x91d0, 0xffffffff, 0x00030002,
495 	0x91d4, 0xffffffff, 0x00050004,
496 	0x91e0, 0xffffffff, 0x00010006,
497 	0x91e4, 0xffffffff, 0x00090008,
498 	0x91e8, 0xffffffff, 0x00000000,
499 	0x91ec, 0xffffffff, 0x00070000,
500 	0x91f0, 0xffffffff, 0x00030002,
501 	0x91f4, 0xffffffff, 0x00050004,
502 	0x9200, 0xffffffff, 0x00010006,
503 	0x9204, 0xffffffff, 0x00090008,
504 	0x9294, 0xffffffff, 0x00000000,
505 	0x929c, 0xffffffff, 0x00000001,
506 	0x802c, 0xffffffff, 0xc0000000
507 };
508 
509 static const u32 cedar_golden_registers[] =
510 {
511 	0x3f90, 0xffff0000, 0xff000000,
512 	0x9148, 0xffff0000, 0xff000000,
513 	0x3f94, 0xffff0000, 0xff000000,
514 	0x914c, 0xffff0000, 0xff000000,
515 	0x9b7c, 0xffffffff, 0x00000000,
516 	0x8a14, 0xffffffff, 0x00000007,
517 	0x8b10, 0xffffffff, 0x00000000,
518 	0x960c, 0xffffffff, 0x54763210,
519 	0x88c4, 0xffffffff, 0x000000c2,
520 	0x88d4, 0xffffffff, 0x00000000,
521 	0x8974, 0xffffffff, 0x00000000,
522 	0xc78, 0x00000080, 0x00000080,
523 	0x5eb4, 0xffffffff, 0x00000002,
524 	0x5e78, 0xffffffff, 0x001000f0,
525 	0x6104, 0x01000300, 0x00000000,
526 	0x5bc0, 0x00300000, 0x00000000,
527 	0x7030, 0xffffffff, 0x00000011,
528 	0x7c30, 0xffffffff, 0x00000011,
529 	0x10830, 0xffffffff, 0x00000011,
530 	0x11430, 0xffffffff, 0x00000011,
531 	0xd02c, 0xffffffff, 0x08421000,
532 	0x240c, 0xffffffff, 0x00000380,
533 	0x8b24, 0xffffffff, 0x00ff0fff,
534 	0x28a4c, 0x06000000, 0x06000000,
535 	0x10c, 0x00000001, 0x00000001,
536 	0x8d00, 0xffffffff, 0x100e4848,
537 	0x8d04, 0xffffffff, 0x00164745,
538 	0x8c00, 0xffffffff, 0xe4000003,
539 	0x8c04, 0xffffffff, 0x40600060,
540 	0x8c08, 0xffffffff, 0x001c001c,
541 	0x8cf0, 0xffffffff, 0x08e00410,
542 	0x8c20, 0xffffffff, 0x00800080,
543 	0x8c24, 0xffffffff, 0x00800080,
544 	0x8c18, 0xffffffff, 0x20202078,
545 	0x8c1c, 0xffffffff, 0x00001010,
546 	0x28350, 0xffffffff, 0x00000000,
547 	0xa008, 0xffffffff, 0x00010000,
548 	0x5c4, 0xffffffff, 0x00000001,
549 	0x9508, 0xffffffff, 0x00000002
550 };
551 
552 static const u32 cedar_mgcg_init[] =
553 {
554 	0x802c, 0xffffffff, 0xc0000000,
555 	0x5448, 0xffffffff, 0x00000100,
556 	0x55e4, 0xffffffff, 0x00000100,
557 	0x160c, 0xffffffff, 0x00000100,
558 	0x5644, 0xffffffff, 0x00000100,
559 	0xc164, 0xffffffff, 0x00000100,
560 	0x8a18, 0xffffffff, 0x00000100,
561 	0x897c, 0xffffffff, 0x06000100,
562 	0x8b28, 0xffffffff, 0x00000100,
563 	0x9144, 0xffffffff, 0x00000100,
564 	0x9a60, 0xffffffff, 0x00000100,
565 	0x9868, 0xffffffff, 0x00000100,
566 	0x8d58, 0xffffffff, 0x00000100,
567 	0x9510, 0xffffffff, 0x00000100,
568 	0x949c, 0xffffffff, 0x00000100,
569 	0x9654, 0xffffffff, 0x00000100,
570 	0x9030, 0xffffffff, 0x00000100,
571 	0x9034, 0xffffffff, 0x00000100,
572 	0x9038, 0xffffffff, 0x00000100,
573 	0x903c, 0xffffffff, 0x00000100,
574 	0x9040, 0xffffffff, 0x00000100,
575 	0xa200, 0xffffffff, 0x00000100,
576 	0xa204, 0xffffffff, 0x00000100,
577 	0xa208, 0xffffffff, 0x00000100,
578 	0xa20c, 0xffffffff, 0x00000100,
579 	0x971c, 0xffffffff, 0x00000100,
580 	0x977c, 0xffffffff, 0x00000100,
581 	0x3f80, 0xffffffff, 0x00000100,
582 	0xa210, 0xffffffff, 0x00000100,
583 	0xa214, 0xffffffff, 0x00000100,
584 	0x4d8, 0xffffffff, 0x00000100,
585 	0x9784, 0xffffffff, 0x00000100,
586 	0x9698, 0xffffffff, 0x00000100,
587 	0x4d4, 0xffffffff, 0x00000200,
588 	0x30cc, 0xffffffff, 0x00000100,
589 	0xd0c0, 0xffffffff, 0xff000100,
590 	0x802c, 0xffffffff, 0x40000000,
591 	0x915c, 0xffffffff, 0x00010000,
592 	0x9178, 0xffffffff, 0x00050000,
593 	0x917c, 0xffffffff, 0x00030002,
594 	0x918c, 0xffffffff, 0x00010004,
595 	0x9190, 0xffffffff, 0x00070006,
596 	0x9194, 0xffffffff, 0x00050000,
597 	0x9198, 0xffffffff, 0x00030002,
598 	0x91a8, 0xffffffff, 0x00010004,
599 	0x91ac, 0xffffffff, 0x00070006,
600 	0x91e8, 0xffffffff, 0x00000000,
601 	0x9294, 0xffffffff, 0x00000000,
602 	0x929c, 0xffffffff, 0x00000001,
603 	0x802c, 0xffffffff, 0xc0000000
604 };
605 
606 static const u32 juniper_mgcg_init[] =
607 {
608 	0x802c, 0xffffffff, 0xc0000000,
609 	0x5448, 0xffffffff, 0x00000100,
610 	0x55e4, 0xffffffff, 0x00000100,
611 	0x160c, 0xffffffff, 0x00000100,
612 	0x5644, 0xffffffff, 0x00000100,
613 	0xc164, 0xffffffff, 0x00000100,
614 	0x8a18, 0xffffffff, 0x00000100,
615 	0x897c, 0xffffffff, 0x06000100,
616 	0x8b28, 0xffffffff, 0x00000100,
617 	0x9144, 0xffffffff, 0x00000100,
618 	0x9a60, 0xffffffff, 0x00000100,
619 	0x9868, 0xffffffff, 0x00000100,
620 	0x8d58, 0xffffffff, 0x00000100,
621 	0x9510, 0xffffffff, 0x00000100,
622 	0x949c, 0xffffffff, 0x00000100,
623 	0x9654, 0xffffffff, 0x00000100,
624 	0x9030, 0xffffffff, 0x00000100,
625 	0x9034, 0xffffffff, 0x00000100,
626 	0x9038, 0xffffffff, 0x00000100,
627 	0x903c, 0xffffffff, 0x00000100,
628 	0x9040, 0xffffffff, 0x00000100,
629 	0xa200, 0xffffffff, 0x00000100,
630 	0xa204, 0xffffffff, 0x00000100,
631 	0xa208, 0xffffffff, 0x00000100,
632 	0xa20c, 0xffffffff, 0x00000100,
633 	0x971c, 0xffffffff, 0x00000100,
634 	0xd0c0, 0xffffffff, 0xff000100,
635 	0x802c, 0xffffffff, 0x40000000,
636 	0x915c, 0xffffffff, 0x00010000,
637 	0x9160, 0xffffffff, 0x00030002,
638 	0x9178, 0xffffffff, 0x00070000,
639 	0x917c, 0xffffffff, 0x00030002,
640 	0x9180, 0xffffffff, 0x00050004,
641 	0x918c, 0xffffffff, 0x00010006,
642 	0x9190, 0xffffffff, 0x00090008,
643 	0x9194, 0xffffffff, 0x00070000,
644 	0x9198, 0xffffffff, 0x00030002,
645 	0x919c, 0xffffffff, 0x00050004,
646 	0x91a8, 0xffffffff, 0x00010006,
647 	0x91ac, 0xffffffff, 0x00090008,
648 	0x91b0, 0xffffffff, 0x00070000,
649 	0x91b4, 0xffffffff, 0x00030002,
650 	0x91b8, 0xffffffff, 0x00050004,
651 	0x91c4, 0xffffffff, 0x00010006,
652 	0x91c8, 0xffffffff, 0x00090008,
653 	0x91cc, 0xffffffff, 0x00070000,
654 	0x91d0, 0xffffffff, 0x00030002,
655 	0x91d4, 0xffffffff, 0x00050004,
656 	0x91e0, 0xffffffff, 0x00010006,
657 	0x91e4, 0xffffffff, 0x00090008,
658 	0x91e8, 0xffffffff, 0x00000000,
659 	0x91ec, 0xffffffff, 0x00070000,
660 	0x91f0, 0xffffffff, 0x00030002,
661 	0x91f4, 0xffffffff, 0x00050004,
662 	0x9200, 0xffffffff, 0x00010006,
663 	0x9204, 0xffffffff, 0x00090008,
664 	0x9208, 0xffffffff, 0x00070000,
665 	0x920c, 0xffffffff, 0x00030002,
666 	0x9210, 0xffffffff, 0x00050004,
667 	0x921c, 0xffffffff, 0x00010006,
668 	0x9220, 0xffffffff, 0x00090008,
669 	0x9224, 0xffffffff, 0x00070000,
670 	0x9228, 0xffffffff, 0x00030002,
671 	0x922c, 0xffffffff, 0x00050004,
672 	0x9238, 0xffffffff, 0x00010006,
673 	0x923c, 0xffffffff, 0x00090008,
674 	0x9240, 0xffffffff, 0x00070000,
675 	0x9244, 0xffffffff, 0x00030002,
676 	0x9248, 0xffffffff, 0x00050004,
677 	0x9254, 0xffffffff, 0x00010006,
678 	0x9258, 0xffffffff, 0x00090008,
679 	0x925c, 0xffffffff, 0x00070000,
680 	0x9260, 0xffffffff, 0x00030002,
681 	0x9264, 0xffffffff, 0x00050004,
682 	0x9270, 0xffffffff, 0x00010006,
683 	0x9274, 0xffffffff, 0x00090008,
684 	0x9278, 0xffffffff, 0x00070000,
685 	0x927c, 0xffffffff, 0x00030002,
686 	0x9280, 0xffffffff, 0x00050004,
687 	0x928c, 0xffffffff, 0x00010006,
688 	0x9290, 0xffffffff, 0x00090008,
689 	0x9294, 0xffffffff, 0x00000000,
690 	0x929c, 0xffffffff, 0x00000001,
691 	0x802c, 0xffffffff, 0xc0000000,
692 	0x977c, 0xffffffff, 0x00000100,
693 	0x3f80, 0xffffffff, 0x00000100,
694 	0xa210, 0xffffffff, 0x00000100,
695 	0xa214, 0xffffffff, 0x00000100,
696 	0x4d8, 0xffffffff, 0x00000100,
697 	0x9784, 0xffffffff, 0x00000100,
698 	0x9698, 0xffffffff, 0x00000100,
699 	0x4d4, 0xffffffff, 0x00000200,
700 	0x30cc, 0xffffffff, 0x00000100,
701 	0x802c, 0xffffffff, 0xc0000000
702 };
703 
704 static const u32 supersumo_golden_registers[] =
705 {
706 	0x5eb4, 0xffffffff, 0x00000002,
707 	0x5c4, 0xffffffff, 0x00000001,
708 	0x7030, 0xffffffff, 0x00000011,
709 	0x7c30, 0xffffffff, 0x00000011,
710 	0x6104, 0x01000300, 0x00000000,
711 	0x5bc0, 0x00300000, 0x00000000,
712 	0x8c04, 0xffffffff, 0x40600060,
713 	0x8c08, 0xffffffff, 0x001c001c,
714 	0x8c20, 0xffffffff, 0x00800080,
715 	0x8c24, 0xffffffff, 0x00800080,
716 	0x8c18, 0xffffffff, 0x20202078,
717 	0x8c1c, 0xffffffff, 0x00001010,
718 	0x918c, 0xffffffff, 0x00010006,
719 	0x91a8, 0xffffffff, 0x00010006,
720 	0x91c4, 0xffffffff, 0x00010006,
721 	0x91e0, 0xffffffff, 0x00010006,
722 	0x9200, 0xffffffff, 0x00010006,
723 	0x9150, 0xffffffff, 0x6e944040,
724 	0x917c, 0xffffffff, 0x00030002,
725 	0x9180, 0xffffffff, 0x00050004,
726 	0x9198, 0xffffffff, 0x00030002,
727 	0x919c, 0xffffffff, 0x00050004,
728 	0x91b4, 0xffffffff, 0x00030002,
729 	0x91b8, 0xffffffff, 0x00050004,
730 	0x91d0, 0xffffffff, 0x00030002,
731 	0x91d4, 0xffffffff, 0x00050004,
732 	0x91f0, 0xffffffff, 0x00030002,
733 	0x91f4, 0xffffffff, 0x00050004,
734 	0x915c, 0xffffffff, 0x00010000,
735 	0x9160, 0xffffffff, 0x00030002,
736 	0x3f90, 0xffff0000, 0xff000000,
737 	0x9178, 0xffffffff, 0x00070000,
738 	0x9194, 0xffffffff, 0x00070000,
739 	0x91b0, 0xffffffff, 0x00070000,
740 	0x91cc, 0xffffffff, 0x00070000,
741 	0x91ec, 0xffffffff, 0x00070000,
742 	0x9148, 0xffff0000, 0xff000000,
743 	0x9190, 0xffffffff, 0x00090008,
744 	0x91ac, 0xffffffff, 0x00090008,
745 	0x91c8, 0xffffffff, 0x00090008,
746 	0x91e4, 0xffffffff, 0x00090008,
747 	0x9204, 0xffffffff, 0x00090008,
748 	0x3f94, 0xffff0000, 0xff000000,
749 	0x914c, 0xffff0000, 0xff000000,
750 	0x929c, 0xffffffff, 0x00000001,
751 	0x8a18, 0xffffffff, 0x00000100,
752 	0x8b28, 0xffffffff, 0x00000100,
753 	0x9144, 0xffffffff, 0x00000100,
754 	0x5644, 0xffffffff, 0x00000100,
755 	0x9b7c, 0xffffffff, 0x00000000,
756 	0x8030, 0xffffffff, 0x0000100a,
757 	0x8a14, 0xffffffff, 0x00000007,
758 	0x8b24, 0xffffffff, 0x00ff0fff,
759 	0x8b10, 0xffffffff, 0x00000000,
760 	0x28a4c, 0x06000000, 0x06000000,
761 	0x4d8, 0xffffffff, 0x00000100,
762 	0x913c, 0xffff000f, 0x0100000a,
763 	0x960c, 0xffffffff, 0x54763210,
764 	0x88c4, 0xffffffff, 0x000000c2,
765 	0x88d4, 0xffffffff, 0x00000010,
766 	0x8974, 0xffffffff, 0x00000000,
767 	0xc78, 0x00000080, 0x00000080,
768 	0x5e78, 0xffffffff, 0x001000f0,
769 	0xd02c, 0xffffffff, 0x08421000,
770 	0xa008, 0xffffffff, 0x00010000,
771 	0x8d00, 0xffffffff, 0x100e4848,
772 	0x8d04, 0xffffffff, 0x00164745,
773 	0x8c00, 0xffffffff, 0xe4000003,
774 	0x8cf0, 0x1fffffff, 0x08e00620,
775 	0x28350, 0xffffffff, 0x00000000,
776 	0x9508, 0xffffffff, 0x00000002
777 };
778 
779 static const u32 sumo_golden_registers[] =
780 {
781 	0x900c, 0x00ffffff, 0x0017071f,
782 	0x8c18, 0xffffffff, 0x10101060,
783 	0x8c1c, 0xffffffff, 0x00001010,
784 	0x8c30, 0x0000000f, 0x00000005,
785 	0x9688, 0x0000000f, 0x00000007
786 };
787 
788 static const u32 wrestler_golden_registers[] =
789 {
790 	0x5eb4, 0xffffffff, 0x00000002,
791 	0x5c4, 0xffffffff, 0x00000001,
792 	0x7030, 0xffffffff, 0x00000011,
793 	0x7c30, 0xffffffff, 0x00000011,
794 	0x6104, 0x01000300, 0x00000000,
795 	0x5bc0, 0x00300000, 0x00000000,
796 	0x918c, 0xffffffff, 0x00010006,
797 	0x91a8, 0xffffffff, 0x00010006,
798 	0x9150, 0xffffffff, 0x6e944040,
799 	0x917c, 0xffffffff, 0x00030002,
800 	0x9198, 0xffffffff, 0x00030002,
801 	0x915c, 0xffffffff, 0x00010000,
802 	0x3f90, 0xffff0000, 0xff000000,
803 	0x9178, 0xffffffff, 0x00070000,
804 	0x9194, 0xffffffff, 0x00070000,
805 	0x9148, 0xffff0000, 0xff000000,
806 	0x9190, 0xffffffff, 0x00090008,
807 	0x91ac, 0xffffffff, 0x00090008,
808 	0x3f94, 0xffff0000, 0xff000000,
809 	0x914c, 0xffff0000, 0xff000000,
810 	0x929c, 0xffffffff, 0x00000001,
811 	0x8a18, 0xffffffff, 0x00000100,
812 	0x8b28, 0xffffffff, 0x00000100,
813 	0x9144, 0xffffffff, 0x00000100,
814 	0x9b7c, 0xffffffff, 0x00000000,
815 	0x8030, 0xffffffff, 0x0000100a,
816 	0x8a14, 0xffffffff, 0x00000001,
817 	0x8b24, 0xffffffff, 0x00ff0fff,
818 	0x8b10, 0xffffffff, 0x00000000,
819 	0x28a4c, 0x06000000, 0x06000000,
820 	0x4d8, 0xffffffff, 0x00000100,
821 	0x913c, 0xffff000f, 0x0100000a,
822 	0x960c, 0xffffffff, 0x54763210,
823 	0x88c4, 0xffffffff, 0x000000c2,
824 	0x88d4, 0xffffffff, 0x00000010,
825 	0x8974, 0xffffffff, 0x00000000,
826 	0xc78, 0x00000080, 0x00000080,
827 	0x5e78, 0xffffffff, 0x001000f0,
828 	0xd02c, 0xffffffff, 0x08421000,
829 	0xa008, 0xffffffff, 0x00010000,
830 	0x8d00, 0xffffffff, 0x100e4848,
831 	0x8d04, 0xffffffff, 0x00164745,
832 	0x8c00, 0xffffffff, 0xe4000003,
833 	0x8cf0, 0x1fffffff, 0x08e00410,
834 	0x28350, 0xffffffff, 0x00000000,
835 	0x9508, 0xffffffff, 0x00000002,
836 	0x900c, 0xffffffff, 0x0017071f,
837 	0x8c18, 0xffffffff, 0x10101060,
838 	0x8c1c, 0xffffffff, 0x00001010
839 };
840 
841 static const u32 barts_golden_registers[] =
842 {
843 	0x5eb4, 0xffffffff, 0x00000002,
844 	0x5e78, 0x8f311ff1, 0x001000f0,
845 	0x3f90, 0xffff0000, 0xff000000,
846 	0x9148, 0xffff0000, 0xff000000,
847 	0x3f94, 0xffff0000, 0xff000000,
848 	0x914c, 0xffff0000, 0xff000000,
849 	0xc78, 0x00000080, 0x00000080,
850 	0xbd4, 0x70073777, 0x00010001,
851 	0xd02c, 0xbfffff1f, 0x08421000,
852 	0xd0b8, 0x03773777, 0x02011003,
853 	0x5bc0, 0x00200000, 0x50100000,
854 	0x98f8, 0x33773777, 0x02011003,
855 	0x98fc, 0xffffffff, 0x76543210,
856 	0x7030, 0x31000311, 0x00000011,
857 	0x2f48, 0x00000007, 0x02011003,
858 	0x6b28, 0x00000010, 0x00000012,
859 	0x7728, 0x00000010, 0x00000012,
860 	0x10328, 0x00000010, 0x00000012,
861 	0x10f28, 0x00000010, 0x00000012,
862 	0x11b28, 0x00000010, 0x00000012,
863 	0x12728, 0x00000010, 0x00000012,
864 	0x240c, 0x000007ff, 0x00000380,
865 	0x8a14, 0xf000001f, 0x00000007,
866 	0x8b24, 0x3fff3fff, 0x00ff0fff,
867 	0x8b10, 0x0000ff0f, 0x00000000,
868 	0x28a4c, 0x07ffffff, 0x06000000,
869 	0x10c, 0x00000001, 0x00010003,
870 	0xa02c, 0xffffffff, 0x0000009b,
871 	0x913c, 0x0000000f, 0x0100000a,
872 	0x8d00, 0xffff7f7f, 0x100e4848,
873 	0x8d04, 0x00ffffff, 0x00164745,
874 	0x8c00, 0xfffc0003, 0xe4000003,
875 	0x8c04, 0xf8ff00ff, 0x40600060,
876 	0x8c08, 0x00ff00ff, 0x001c001c,
877 	0x8cf0, 0x1fff1fff, 0x08e00620,
878 	0x8c20, 0x0fff0fff, 0x00800080,
879 	0x8c24, 0x0fff0fff, 0x00800080,
880 	0x8c18, 0xffffffff, 0x20202078,
881 	0x8c1c, 0x0000ffff, 0x00001010,
882 	0x28350, 0x00000f01, 0x00000000,
883 	0x9508, 0x3700001f, 0x00000002,
884 	0x960c, 0xffffffff, 0x54763210,
885 	0x88c4, 0x001f3ae3, 0x000000c2,
886 	0x88d4, 0x0000001f, 0x00000010,
887 	0x8974, 0xffffffff, 0x00000000
888 };
889 
890 static const u32 turks_golden_registers[] =
891 {
892 	0x5eb4, 0xffffffff, 0x00000002,
893 	0x5e78, 0x8f311ff1, 0x001000f0,
894 	0x8c8, 0x00003000, 0x00001070,
895 	0x8cc, 0x000fffff, 0x00040035,
896 	0x3f90, 0xffff0000, 0xfff00000,
897 	0x9148, 0xffff0000, 0xfff00000,
898 	0x3f94, 0xffff0000, 0xfff00000,
899 	0x914c, 0xffff0000, 0xfff00000,
900 	0xc78, 0x00000080, 0x00000080,
901 	0xbd4, 0x00073007, 0x00010002,
902 	0xd02c, 0xbfffff1f, 0x08421000,
903 	0xd0b8, 0x03773777, 0x02010002,
904 	0x5bc0, 0x00200000, 0x50100000,
905 	0x98f8, 0x33773777, 0x00010002,
906 	0x98fc, 0xffffffff, 0x33221100,
907 	0x7030, 0x31000311, 0x00000011,
908 	0x2f48, 0x33773777, 0x00010002,
909 	0x6b28, 0x00000010, 0x00000012,
910 	0x7728, 0x00000010, 0x00000012,
911 	0x10328, 0x00000010, 0x00000012,
912 	0x10f28, 0x00000010, 0x00000012,
913 	0x11b28, 0x00000010, 0x00000012,
914 	0x12728, 0x00000010, 0x00000012,
915 	0x240c, 0x000007ff, 0x00000380,
916 	0x8a14, 0xf000001f, 0x00000007,
917 	0x8b24, 0x3fff3fff, 0x00ff0fff,
918 	0x8b10, 0x0000ff0f, 0x00000000,
919 	0x28a4c, 0x07ffffff, 0x06000000,
920 	0x10c, 0x00000001, 0x00010003,
921 	0xa02c, 0xffffffff, 0x0000009b,
922 	0x913c, 0x0000000f, 0x0100000a,
923 	0x8d00, 0xffff7f7f, 0x100e4848,
924 	0x8d04, 0x00ffffff, 0x00164745,
925 	0x8c00, 0xfffc0003, 0xe4000003,
926 	0x8c04, 0xf8ff00ff, 0x40600060,
927 	0x8c08, 0x00ff00ff, 0x001c001c,
928 	0x8cf0, 0x1fff1fff, 0x08e00410,
929 	0x8c20, 0x0fff0fff, 0x00800080,
930 	0x8c24, 0x0fff0fff, 0x00800080,
931 	0x8c18, 0xffffffff, 0x20202078,
932 	0x8c1c, 0x0000ffff, 0x00001010,
933 	0x28350, 0x00000f01, 0x00000000,
934 	0x9508, 0x3700001f, 0x00000002,
935 	0x960c, 0xffffffff, 0x54763210,
936 	0x88c4, 0x001f3ae3, 0x000000c2,
937 	0x88d4, 0x0000001f, 0x00000010,
938 	0x8974, 0xffffffff, 0x00000000
939 };
940 
941 static const u32 caicos_golden_registers[] =
942 {
943 	0x5eb4, 0xffffffff, 0x00000002,
944 	0x5e78, 0x8f311ff1, 0x001000f0,
945 	0x8c8, 0x00003420, 0x00001450,
946 	0x8cc, 0x000fffff, 0x00040035,
947 	0x3f90, 0xffff0000, 0xfffc0000,
948 	0x9148, 0xffff0000, 0xfffc0000,
949 	0x3f94, 0xffff0000, 0xfffc0000,
950 	0x914c, 0xffff0000, 0xfffc0000,
951 	0xc78, 0x00000080, 0x00000080,
952 	0xbd4, 0x00073007, 0x00010001,
953 	0xd02c, 0xbfffff1f, 0x08421000,
954 	0xd0b8, 0x03773777, 0x02010001,
955 	0x5bc0, 0x00200000, 0x50100000,
956 	0x98f8, 0x33773777, 0x02010001,
957 	0x98fc, 0xffffffff, 0x33221100,
958 	0x7030, 0x31000311, 0x00000011,
959 	0x2f48, 0x33773777, 0x02010001,
960 	0x6b28, 0x00000010, 0x00000012,
961 	0x7728, 0x00000010, 0x00000012,
962 	0x10328, 0x00000010, 0x00000012,
963 	0x10f28, 0x00000010, 0x00000012,
964 	0x11b28, 0x00000010, 0x00000012,
965 	0x12728, 0x00000010, 0x00000012,
966 	0x240c, 0x000007ff, 0x00000380,
967 	0x8a14, 0xf000001f, 0x00000001,
968 	0x8b24, 0x3fff3fff, 0x00ff0fff,
969 	0x8b10, 0x0000ff0f, 0x00000000,
970 	0x28a4c, 0x07ffffff, 0x06000000,
971 	0x10c, 0x00000001, 0x00010003,
972 	0xa02c, 0xffffffff, 0x0000009b,
973 	0x913c, 0x0000000f, 0x0100000a,
974 	0x8d00, 0xffff7f7f, 0x100e4848,
975 	0x8d04, 0x00ffffff, 0x00164745,
976 	0x8c00, 0xfffc0003, 0xe4000003,
977 	0x8c04, 0xf8ff00ff, 0x40600060,
978 	0x8c08, 0x00ff00ff, 0x001c001c,
979 	0x8cf0, 0x1fff1fff, 0x08e00410,
980 	0x8c20, 0x0fff0fff, 0x00800080,
981 	0x8c24, 0x0fff0fff, 0x00800080,
982 	0x8c18, 0xffffffff, 0x20202078,
983 	0x8c1c, 0x0000ffff, 0x00001010,
984 	0x28350, 0x00000f01, 0x00000000,
985 	0x9508, 0x3700001f, 0x00000002,
986 	0x960c, 0xffffffff, 0x54763210,
987 	0x88c4, 0x001f3ae3, 0x000000c2,
988 	0x88d4, 0x0000001f, 0x00000010,
989 	0x8974, 0xffffffff, 0x00000000
990 };
991 
992 static void evergreen_init_golden_registers(struct radeon_device *rdev)
993 {
994 	switch (rdev->family) {
995 	case CHIP_CYPRESS:
996 	case CHIP_HEMLOCK:
997 		radeon_program_register_sequence(rdev,
998 						 evergreen_golden_registers,
999 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1000 		radeon_program_register_sequence(rdev,
1001 						 evergreen_golden_registers2,
1002 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1003 		radeon_program_register_sequence(rdev,
1004 						 cypress_mgcg_init,
1005 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1006 		break;
1007 	case CHIP_JUNIPER:
1008 		radeon_program_register_sequence(rdev,
1009 						 evergreen_golden_registers,
1010 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1011 		radeon_program_register_sequence(rdev,
1012 						 evergreen_golden_registers2,
1013 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1014 		radeon_program_register_sequence(rdev,
1015 						 juniper_mgcg_init,
1016 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1017 		break;
1018 	case CHIP_REDWOOD:
1019 		radeon_program_register_sequence(rdev,
1020 						 evergreen_golden_registers,
1021 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1022 		radeon_program_register_sequence(rdev,
1023 						 evergreen_golden_registers2,
1024 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1025 		radeon_program_register_sequence(rdev,
1026 						 redwood_mgcg_init,
1027 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1028 		break;
1029 	case CHIP_CEDAR:
1030 		radeon_program_register_sequence(rdev,
1031 						 cedar_golden_registers,
1032 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
1033 		radeon_program_register_sequence(rdev,
1034 						 evergreen_golden_registers2,
1035 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1036 		radeon_program_register_sequence(rdev,
1037 						 cedar_mgcg_init,
1038 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1039 		break;
1040 	case CHIP_PALM:
1041 		radeon_program_register_sequence(rdev,
1042 						 wrestler_golden_registers,
1043 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1044 		break;
1045 	case CHIP_SUMO:
1046 		radeon_program_register_sequence(rdev,
1047 						 supersumo_golden_registers,
1048 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1049 		break;
1050 	case CHIP_SUMO2:
1051 		radeon_program_register_sequence(rdev,
1052 						 supersumo_golden_registers,
1053 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1054 		radeon_program_register_sequence(rdev,
1055 						 sumo_golden_registers,
1056 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
1057 		break;
1058 	case CHIP_BARTS:
1059 		radeon_program_register_sequence(rdev,
1060 						 barts_golden_registers,
1061 						 (const u32)ARRAY_SIZE(barts_golden_registers));
1062 		break;
1063 	case CHIP_TURKS:
1064 		radeon_program_register_sequence(rdev,
1065 						 turks_golden_registers,
1066 						 (const u32)ARRAY_SIZE(turks_golden_registers));
1067 		break;
1068 	case CHIP_CAICOS:
1069 		radeon_program_register_sequence(rdev,
1070 						 caicos_golden_registers,
1071 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1072 		break;
1073 	default:
1074 		break;
1075 	}
1076 }
1077 
1078 /**
1079  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1080  *
1081  * @rdev: radeon_device pointer
1082  * @reg: register offset in bytes
1083  * @val: register value
1084  *
1085  * Returns 0 for success or -EINVAL for an invalid register
1086  *
1087  */
1088 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1089 					u32 reg, u32 *val)
1090 {
1091 	switch (reg) {
1092 	case GRBM_STATUS:
1093 	case GRBM_STATUS_SE0:
1094 	case GRBM_STATUS_SE1:
1095 	case SRBM_STATUS:
1096 	case SRBM_STATUS2:
1097 	case DMA_STATUS_REG:
1098 	case UVD_STATUS:
1099 		*val = RREG32(reg);
1100 		return 0;
1101 	default:
1102 		return -EINVAL;
1103 	}
1104 }
1105 
1106 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1107 			     unsigned *bankh, unsigned *mtaspect,
1108 			     unsigned *tile_split)
1109 {
1110 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1111 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1112 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1113 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1114 	switch (*bankw) {
1115 	default:
1116 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1117 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1118 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1119 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1120 	}
1121 	switch (*bankh) {
1122 	default:
1123 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1124 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1125 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1126 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1127 	}
1128 	switch (*mtaspect) {
1129 	default:
1130 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1131 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1132 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1133 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1134 	}
1135 }
1136 
1137 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1138 			      u32 cntl_reg, u32 status_reg)
1139 {
1140 	int r, i;
1141 	struct atom_clock_dividers dividers;
1142 
1143 	r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1144 					   clock, false, &dividers);
1145 	if (r)
1146 		return r;
1147 
1148 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1149 
1150 	for (i = 0; i < 100; i++) {
1151 		if (RREG32(status_reg) & DCLK_STATUS)
1152 			break;
1153 		mdelay(10);
1154 	}
1155 	if (i == 100)
1156 		return -ETIMEDOUT;
1157 
1158 	return 0;
1159 }
1160 
1161 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1162 {
1163 	int r = 0;
1164 	u32 cg_scratch = RREG32(CG_SCRATCH1);
1165 
1166 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1167 	if (r)
1168 		goto done;
1169 	cg_scratch &= 0xffff0000;
1170 	cg_scratch |= vclk / 100; /* Mhz */
1171 
1172 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1173 	if (r)
1174 		goto done;
1175 	cg_scratch &= 0x0000ffff;
1176 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1177 
1178 done:
1179 	WREG32(CG_SCRATCH1, cg_scratch);
1180 
1181 	return r;
1182 }
1183 
1184 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1185 {
1186 	/* start off with something large */
1187 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1188 	int r;
1189 
1190 	/* bypass vclk and dclk with bclk */
1191 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1192 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1193 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1194 
1195 	/* put PLL in bypass mode */
1196 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1197 
1198 	if (!vclk || !dclk) {
1199 		/* keep the Bypass mode, put PLL to sleep */
1200 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1201 		return 0;
1202 	}
1203 
1204 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1205 					  16384, 0x03FFFFFF, 0, 128, 5,
1206 					  &fb_div, &vclk_div, &dclk_div);
1207 	if (r)
1208 		return r;
1209 
1210 	/* set VCO_MODE to 1 */
1211 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1212 
1213 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1214 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1215 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1216 
1217 	/* deassert UPLL_RESET */
1218 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1219 
1220 	mdelay(1);
1221 
1222 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1223 	if (r)
1224 		return r;
1225 
1226 	/* assert UPLL_RESET again */
1227 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1228 
1229 	/* disable spread spectrum. */
1230 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1231 
1232 	/* set feedback divider */
1233 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1234 
1235 	/* set ref divider to 0 */
1236 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1237 
1238 	if (fb_div < 307200)
1239 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1240 	else
1241 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1242 
1243 	/* set PDIV_A and PDIV_B */
1244 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1245 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1246 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1247 
1248 	/* give the PLL some time to settle */
1249 	mdelay(15);
1250 
1251 	/* deassert PLL_RESET */
1252 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1253 
1254 	mdelay(15);
1255 
1256 	/* switch from bypass mode to normal mode */
1257 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1258 
1259 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1260 	if (r)
1261 		return r;
1262 
1263 	/* switch VCLK and DCLK selection */
1264 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1265 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1266 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1267 
1268 	mdelay(100);
1269 
1270 	return 0;
1271 }
1272 
1273 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1274 {
1275 	int readrq;
1276 	u16 v;
1277 
1278 	readrq = pcie_get_readrq(rdev->pdev);
1279 	v = ffs(readrq) - 8;
1280 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1281 	 * to avoid hangs or perfomance issues
1282 	 */
1283 	if ((v == 0) || (v == 6) || (v == 7))
1284 		pcie_set_readrq(rdev->pdev, 512);
1285 }
1286 
1287 void dce4_program_fmt(struct drm_encoder *encoder)
1288 {
1289 	struct drm_device *dev = encoder->dev;
1290 	struct radeon_device *rdev = dev->dev_private;
1291 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1292 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1293 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1294 	int bpc = 0;
1295 	u32 tmp = 0;
1296 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1297 
1298 	if (connector) {
1299 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1300 		bpc = radeon_get_monitor_bpc(connector);
1301 		dither = radeon_connector->dither;
1302 	}
1303 
1304 	/* LVDS/eDP FMT is set up by atom */
1305 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1306 		return;
1307 
1308 	/* not needed for analog */
1309 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1310 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1311 		return;
1312 
1313 	if (bpc == 0)
1314 		return;
1315 
1316 	switch (bpc) {
1317 	case 6:
1318 		if (dither == RADEON_FMT_DITHER_ENABLE)
1319 			/* XXX sort out optimal dither settings */
1320 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1321 				FMT_SPATIAL_DITHER_EN);
1322 		else
1323 			tmp |= FMT_TRUNCATE_EN;
1324 		break;
1325 	case 8:
1326 		if (dither == RADEON_FMT_DITHER_ENABLE)
1327 			/* XXX sort out optimal dither settings */
1328 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1329 				FMT_RGB_RANDOM_ENABLE |
1330 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1331 		else
1332 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1333 		break;
1334 	case 10:
1335 	default:
1336 		/* not needed */
1337 		break;
1338 	}
1339 
1340 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1341 }
1342 
1343 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1344 {
1345 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1346 		return true;
1347 	else
1348 		return false;
1349 }
1350 
1351 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1352 {
1353 	u32 pos1, pos2;
1354 
1355 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1356 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1357 
1358 	if (pos1 != pos2)
1359 		return true;
1360 	else
1361 		return false;
1362 }
1363 
1364 /**
1365  * dce4_wait_for_vblank - vblank wait asic callback.
1366  *
1367  * @rdev: radeon_device pointer
1368  * @crtc: crtc to wait for vblank on
1369  *
1370  * Wait for vblank on the requested crtc (evergreen+).
1371  */
1372 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1373 {
1374 	unsigned i = 0;
1375 
1376 	if (crtc >= rdev->num_crtc)
1377 		return;
1378 
1379 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1380 		return;
1381 
1382 	/* depending on when we hit vblank, we may be close to active; if so,
1383 	 * wait for another frame.
1384 	 */
1385 	while (dce4_is_in_vblank(rdev, crtc)) {
1386 		if (i++ % 100 == 0) {
1387 			if (!dce4_is_counter_moving(rdev, crtc))
1388 				break;
1389 		}
1390 	}
1391 
1392 	while (!dce4_is_in_vblank(rdev, crtc)) {
1393 		if (i++ % 100 == 0) {
1394 			if (!dce4_is_counter_moving(rdev, crtc))
1395 				break;
1396 		}
1397 	}
1398 }
1399 
1400 /**
1401  * evergreen_page_flip - pageflip callback.
1402  *
1403  * @rdev: radeon_device pointer
1404  * @crtc_id: crtc to cleanup pageflip on
1405  * @crtc_base: new address of the crtc (GPU MC address)
1406  *
1407  * Triggers the actual pageflip by updating the primary
1408  * surface base address (evergreen+).
1409  */
1410 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1411 			 bool async)
1412 {
1413 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1414 
1415 	/* update the scanout addresses */
1416 	WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1417 	       async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1418 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1419 	       upper_32_bits(crtc_base));
1420 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1421 	       (u32)crtc_base);
1422 	/* post the write */
1423 	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1424 }
1425 
1426 /**
1427  * evergreen_page_flip_pending - check if page flip is still pending
1428  *
1429  * @rdev: radeon_device pointer
1430  * @crtc_id: crtc to check
1431  *
1432  * Returns the current update pending status.
1433  */
1434 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1435 {
1436 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1437 
1438 	/* Return current update_pending status: */
1439 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1440 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1441 }
1442 
1443 /* get temperature in millidegrees */
1444 int evergreen_get_temp(struct radeon_device *rdev)
1445 {
1446 	u32 temp, toffset;
1447 	int actual_temp = 0;
1448 
1449 	if (rdev->family == CHIP_JUNIPER) {
1450 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1451 			TOFFSET_SHIFT;
1452 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1453 			TS0_ADC_DOUT_SHIFT;
1454 
1455 		if (toffset & 0x100)
1456 			actual_temp = temp / 2 - (0x200 - toffset);
1457 		else
1458 			actual_temp = temp / 2 + toffset;
1459 
1460 		actual_temp = actual_temp * 1000;
1461 
1462 	} else {
1463 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1464 			ASIC_T_SHIFT;
1465 
1466 		if (temp & 0x400)
1467 			actual_temp = -256;
1468 		else if (temp & 0x200)
1469 			actual_temp = 255;
1470 		else if (temp & 0x100) {
1471 			actual_temp = temp & 0x1ff;
1472 			actual_temp |= ~0x1ff;
1473 		} else
1474 			actual_temp = temp & 0xff;
1475 
1476 		actual_temp = (actual_temp * 1000) / 2;
1477 	}
1478 
1479 	return actual_temp;
1480 }
1481 
1482 int sumo_get_temp(struct radeon_device *rdev)
1483 {
1484 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1485 	int actual_temp = temp - 49;
1486 
1487 	return actual_temp * 1000;
1488 }
1489 
1490 /**
1491  * sumo_pm_init_profile - Initialize power profiles callback.
1492  *
1493  * @rdev: radeon_device pointer
1494  *
1495  * Initialize the power states used in profile mode
1496  * (sumo, trinity, SI).
1497  * Used for profile mode only.
1498  */
1499 void sumo_pm_init_profile(struct radeon_device *rdev)
1500 {
1501 	int idx;
1502 
1503 	/* default */
1504 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1505 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1506 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1507 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1508 
1509 	/* low,mid sh/mh */
1510 	if (rdev->flags & RADEON_IS_MOBILITY)
1511 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1512 	else
1513 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1514 
1515 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1516 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1517 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1518 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1519 
1520 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1521 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1522 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1523 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1524 
1525 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1526 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1527 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1528 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1529 
1530 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1531 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1532 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1533 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1534 
1535 	/* high sh/mh */
1536 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1537 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1538 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1539 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1540 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1541 		rdev->pm.power_state[idx].num_clock_modes - 1;
1542 
1543 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1544 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1545 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1546 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1547 		rdev->pm.power_state[idx].num_clock_modes - 1;
1548 }
1549 
1550 /**
1551  * btc_pm_init_profile - Initialize power profiles callback.
1552  *
1553  * @rdev: radeon_device pointer
1554  *
1555  * Initialize the power states used in profile mode
1556  * (BTC, cayman).
1557  * Used for profile mode only.
1558  */
1559 void btc_pm_init_profile(struct radeon_device *rdev)
1560 {
1561 	int idx;
1562 
1563 	/* default */
1564 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1565 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1566 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1567 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1568 	/* starting with BTC, there is one state that is used for both
1569 	 * MH and SH.  Difference is that we always use the high clock index for
1570 	 * mclk.
1571 	 */
1572 	if (rdev->flags & RADEON_IS_MOBILITY)
1573 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1574 	else
1575 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1576 	/* low sh */
1577 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1578 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1579 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1580 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1581 	/* mid sh */
1582 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1583 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1584 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1585 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1586 	/* high sh */
1587 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1588 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1589 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1590 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1591 	/* low mh */
1592 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1593 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1594 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1595 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1596 	/* mid mh */
1597 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1598 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1599 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1600 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1601 	/* high mh */
1602 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1603 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1604 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1605 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1606 }
1607 
1608 /**
1609  * evergreen_pm_misc - set additional pm hw parameters callback.
1610  *
1611  * @rdev: radeon_device pointer
1612  *
1613  * Set non-clock parameters associated with a power state
1614  * (voltage, etc.) (evergreen+).
1615  */
1616 void evergreen_pm_misc(struct radeon_device *rdev)
1617 {
1618 	int req_ps_idx = rdev->pm.requested_power_state_index;
1619 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1620 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1621 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1622 
1623 	if (voltage->type == VOLTAGE_SW) {
1624 		/* 0xff0x are flags rather then an actual voltage */
1625 		if ((voltage->voltage & 0xff00) == 0xff00)
1626 			return;
1627 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1628 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1629 			rdev->pm.current_vddc = voltage->voltage;
1630 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1631 		}
1632 
1633 		/* starting with BTC, there is one state that is used for both
1634 		 * MH and SH.  Difference is that we always use the high clock index for
1635 		 * mclk and vddci.
1636 		 */
1637 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1638 		    (rdev->family >= CHIP_BARTS) &&
1639 		    rdev->pm.active_crtc_count &&
1640 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1641 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1642 			voltage = &rdev->pm.power_state[req_ps_idx].
1643 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1644 
1645 		/* 0xff0x are flags rather then an actual voltage */
1646 		if ((voltage->vddci & 0xff00) == 0xff00)
1647 			return;
1648 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1649 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1650 			rdev->pm.current_vddci = voltage->vddci;
1651 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1652 		}
1653 	}
1654 }
1655 
1656 /**
1657  * evergreen_pm_prepare - pre-power state change callback.
1658  *
1659  * @rdev: radeon_device pointer
1660  *
1661  * Prepare for a power state change (evergreen+).
1662  */
1663 void evergreen_pm_prepare(struct radeon_device *rdev)
1664 {
1665 	struct drm_device *ddev = rdev->ddev;
1666 	struct drm_crtc *crtc;
1667 	struct radeon_crtc *radeon_crtc;
1668 	u32 tmp;
1669 
1670 	/* disable any active CRTCs */
1671 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1672 		radeon_crtc = to_radeon_crtc(crtc);
1673 		if (radeon_crtc->enabled) {
1674 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1675 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1676 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1677 		}
1678 	}
1679 }
1680 
1681 /**
1682  * evergreen_pm_finish - post-power state change callback.
1683  *
1684  * @rdev: radeon_device pointer
1685  *
1686  * Clean up after a power state change (evergreen+).
1687  */
1688 void evergreen_pm_finish(struct radeon_device *rdev)
1689 {
1690 	struct drm_device *ddev = rdev->ddev;
1691 	struct drm_crtc *crtc;
1692 	struct radeon_crtc *radeon_crtc;
1693 	u32 tmp;
1694 
1695 	/* enable any active CRTCs */
1696 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1697 		radeon_crtc = to_radeon_crtc(crtc);
1698 		if (radeon_crtc->enabled) {
1699 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1700 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1701 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1702 		}
1703 	}
1704 }
1705 
1706 /**
1707  * evergreen_hpd_sense - hpd sense callback.
1708  *
1709  * @rdev: radeon_device pointer
1710  * @hpd: hpd (hotplug detect) pin
1711  *
1712  * Checks if a digital monitor is connected (evergreen+).
1713  * Returns true if connected, false if not connected.
1714  */
1715 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1716 {
1717 	bool connected = false;
1718 
1719 	switch (hpd) {
1720 	case RADEON_HPD_1:
1721 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1722 			connected = true;
1723 		break;
1724 	case RADEON_HPD_2:
1725 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1726 			connected = true;
1727 		break;
1728 	case RADEON_HPD_3:
1729 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1730 			connected = true;
1731 		break;
1732 	case RADEON_HPD_4:
1733 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1734 			connected = true;
1735 		break;
1736 	case RADEON_HPD_5:
1737 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1738 			connected = true;
1739 		break;
1740 	case RADEON_HPD_6:
1741 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1742 			connected = true;
1743 		break;
1744 	default:
1745 		break;
1746 	}
1747 
1748 	return connected;
1749 }
1750 
1751 /**
1752  * evergreen_hpd_set_polarity - hpd set polarity callback.
1753  *
1754  * @rdev: radeon_device pointer
1755  * @hpd: hpd (hotplug detect) pin
1756  *
1757  * Set the polarity of the hpd pin (evergreen+).
1758  */
1759 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1760 				enum radeon_hpd_id hpd)
1761 {
1762 	u32 tmp;
1763 	bool connected = evergreen_hpd_sense(rdev, hpd);
1764 
1765 	switch (hpd) {
1766 	case RADEON_HPD_1:
1767 		tmp = RREG32(DC_HPD1_INT_CONTROL);
1768 		if (connected)
1769 			tmp &= ~DC_HPDx_INT_POLARITY;
1770 		else
1771 			tmp |= DC_HPDx_INT_POLARITY;
1772 		WREG32(DC_HPD1_INT_CONTROL, tmp);
1773 		break;
1774 	case RADEON_HPD_2:
1775 		tmp = RREG32(DC_HPD2_INT_CONTROL);
1776 		if (connected)
1777 			tmp &= ~DC_HPDx_INT_POLARITY;
1778 		else
1779 			tmp |= DC_HPDx_INT_POLARITY;
1780 		WREG32(DC_HPD2_INT_CONTROL, tmp);
1781 		break;
1782 	case RADEON_HPD_3:
1783 		tmp = RREG32(DC_HPD3_INT_CONTROL);
1784 		if (connected)
1785 			tmp &= ~DC_HPDx_INT_POLARITY;
1786 		else
1787 			tmp |= DC_HPDx_INT_POLARITY;
1788 		WREG32(DC_HPD3_INT_CONTROL, tmp);
1789 		break;
1790 	case RADEON_HPD_4:
1791 		tmp = RREG32(DC_HPD4_INT_CONTROL);
1792 		if (connected)
1793 			tmp &= ~DC_HPDx_INT_POLARITY;
1794 		else
1795 			tmp |= DC_HPDx_INT_POLARITY;
1796 		WREG32(DC_HPD4_INT_CONTROL, tmp);
1797 		break;
1798 	case RADEON_HPD_5:
1799 		tmp = RREG32(DC_HPD5_INT_CONTROL);
1800 		if (connected)
1801 			tmp &= ~DC_HPDx_INT_POLARITY;
1802 		else
1803 			tmp |= DC_HPDx_INT_POLARITY;
1804 		WREG32(DC_HPD5_INT_CONTROL, tmp);
1805 			break;
1806 	case RADEON_HPD_6:
1807 		tmp = RREG32(DC_HPD6_INT_CONTROL);
1808 		if (connected)
1809 			tmp &= ~DC_HPDx_INT_POLARITY;
1810 		else
1811 			tmp |= DC_HPDx_INT_POLARITY;
1812 		WREG32(DC_HPD6_INT_CONTROL, tmp);
1813 		break;
1814 	default:
1815 		break;
1816 	}
1817 }
1818 
1819 /**
1820  * evergreen_hpd_init - hpd setup callback.
1821  *
1822  * @rdev: radeon_device pointer
1823  *
1824  * Setup the hpd pins used by the card (evergreen+).
1825  * Enable the pin, set the polarity, and enable the hpd interrupts.
1826  */
1827 void evergreen_hpd_init(struct radeon_device *rdev)
1828 {
1829 	struct drm_device *dev = rdev->ddev;
1830 	struct drm_connector *connector;
1831 	unsigned enabled = 0;
1832 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1833 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1834 
1835 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1836 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1837 
1838 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1839 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1840 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1841 			 * aux dp channel on imac and help (but not completely fix)
1842 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1843 			 * also avoid interrupt storms during dpms.
1844 			 */
1845 			continue;
1846 		}
1847 		switch (radeon_connector->hpd.hpd) {
1848 		case RADEON_HPD_1:
1849 			WREG32(DC_HPD1_CONTROL, tmp);
1850 			break;
1851 		case RADEON_HPD_2:
1852 			WREG32(DC_HPD2_CONTROL, tmp);
1853 			break;
1854 		case RADEON_HPD_3:
1855 			WREG32(DC_HPD3_CONTROL, tmp);
1856 			break;
1857 		case RADEON_HPD_4:
1858 			WREG32(DC_HPD4_CONTROL, tmp);
1859 			break;
1860 		case RADEON_HPD_5:
1861 			WREG32(DC_HPD5_CONTROL, tmp);
1862 			break;
1863 		case RADEON_HPD_6:
1864 			WREG32(DC_HPD6_CONTROL, tmp);
1865 			break;
1866 		default:
1867 			break;
1868 		}
1869 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1870 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1871 			enabled |= 1 << radeon_connector->hpd.hpd;
1872 	}
1873 	radeon_irq_kms_enable_hpd(rdev, enabled);
1874 }
1875 
1876 /**
1877  * evergreen_hpd_fini - hpd tear down callback.
1878  *
1879  * @rdev: radeon_device pointer
1880  *
1881  * Tear down the hpd pins used by the card (evergreen+).
1882  * Disable the hpd interrupts.
1883  */
1884 void evergreen_hpd_fini(struct radeon_device *rdev)
1885 {
1886 	struct drm_device *dev = rdev->ddev;
1887 	struct drm_connector *connector;
1888 	unsigned disabled = 0;
1889 
1890 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1891 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1892 		switch (radeon_connector->hpd.hpd) {
1893 		case RADEON_HPD_1:
1894 			WREG32(DC_HPD1_CONTROL, 0);
1895 			break;
1896 		case RADEON_HPD_2:
1897 			WREG32(DC_HPD2_CONTROL, 0);
1898 			break;
1899 		case RADEON_HPD_3:
1900 			WREG32(DC_HPD3_CONTROL, 0);
1901 			break;
1902 		case RADEON_HPD_4:
1903 			WREG32(DC_HPD4_CONTROL, 0);
1904 			break;
1905 		case RADEON_HPD_5:
1906 			WREG32(DC_HPD5_CONTROL, 0);
1907 			break;
1908 		case RADEON_HPD_6:
1909 			WREG32(DC_HPD6_CONTROL, 0);
1910 			break;
1911 		default:
1912 			break;
1913 		}
1914 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1915 			disabled |= 1 << radeon_connector->hpd.hpd;
1916 	}
1917 	radeon_irq_kms_disable_hpd(rdev, disabled);
1918 }
1919 
1920 /* watermark setup */
1921 
1922 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1923 					struct radeon_crtc *radeon_crtc,
1924 					struct drm_display_mode *mode,
1925 					struct drm_display_mode *other_mode)
1926 {
1927 	u32 tmp, buffer_alloc, i;
1928 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1929 	/*
1930 	 * Line Buffer Setup
1931 	 * There are 3 line buffers, each one shared by 2 display controllers.
1932 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1933 	 * the display controllers.  The paritioning is done via one of four
1934 	 * preset allocations specified in bits 2:0:
1935 	 * first display controller
1936 	 *  0 - first half of lb (3840 * 2)
1937 	 *  1 - first 3/4 of lb (5760 * 2)
1938 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1939 	 *  3 - first 1/4 of lb (1920 * 2)
1940 	 * second display controller
1941 	 *  4 - second half of lb (3840 * 2)
1942 	 *  5 - second 3/4 of lb (5760 * 2)
1943 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1944 	 *  7 - last 1/4 of lb (1920 * 2)
1945 	 */
1946 	/* this can get tricky if we have two large displays on a paired group
1947 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1948 	 * non-linked crtcs for maximum line buffer allocation.
1949 	 */
1950 	if (radeon_crtc->base.enabled && mode) {
1951 		if (other_mode) {
1952 			tmp = 0; /* 1/2 */
1953 			buffer_alloc = 1;
1954 		} else {
1955 			tmp = 2; /* whole */
1956 			buffer_alloc = 2;
1957 		}
1958 	} else {
1959 		tmp = 0;
1960 		buffer_alloc = 0;
1961 	}
1962 
1963 	/* second controller of the pair uses second half of the lb */
1964 	if (radeon_crtc->crtc_id % 2)
1965 		tmp += 4;
1966 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1967 
1968 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1969 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1970 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1971 		for (i = 0; i < rdev->usec_timeout; i++) {
1972 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1973 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1974 				break;
1975 			udelay(1);
1976 		}
1977 	}
1978 
1979 	if (radeon_crtc->base.enabled && mode) {
1980 		switch (tmp) {
1981 		case 0:
1982 		case 4:
1983 		default:
1984 			if (ASIC_IS_DCE5(rdev))
1985 				return 4096 * 2;
1986 			else
1987 				return 3840 * 2;
1988 		case 1:
1989 		case 5:
1990 			if (ASIC_IS_DCE5(rdev))
1991 				return 6144 * 2;
1992 			else
1993 				return 5760 * 2;
1994 		case 2:
1995 		case 6:
1996 			if (ASIC_IS_DCE5(rdev))
1997 				return 8192 * 2;
1998 			else
1999 				return 7680 * 2;
2000 		case 3:
2001 		case 7:
2002 			if (ASIC_IS_DCE5(rdev))
2003 				return 2048 * 2;
2004 			else
2005 				return 1920 * 2;
2006 		}
2007 	}
2008 
2009 	/* controller not enabled, so no lb used */
2010 	return 0;
2011 }
2012 
2013 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2014 {
2015 	u32 tmp = RREG32(MC_SHARED_CHMAP);
2016 
2017 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2018 	case 0:
2019 	default:
2020 		return 1;
2021 	case 1:
2022 		return 2;
2023 	case 2:
2024 		return 4;
2025 	case 3:
2026 		return 8;
2027 	}
2028 }
2029 
2030 struct evergreen_wm_params {
2031 	u32 dram_channels; /* number of dram channels */
2032 	u32 yclk;          /* bandwidth per dram data pin in kHz */
2033 	u32 sclk;          /* engine clock in kHz */
2034 	u32 disp_clk;      /* display clock in kHz */
2035 	u32 src_width;     /* viewport width */
2036 	u32 active_time;   /* active display time in ns */
2037 	u32 blank_time;    /* blank time in ns */
2038 	bool interlaced;    /* mode is interlaced */
2039 	fixed20_12 vsc;    /* vertical scale ratio */
2040 	u32 num_heads;     /* number of active crtcs */
2041 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2042 	u32 lb_size;       /* line buffer allocated to pipe */
2043 	u32 vtaps;         /* vertical scaler taps */
2044 };
2045 
2046 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2047 {
2048 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2049 	fixed20_12 dram_efficiency; /* 0.7 */
2050 	fixed20_12 yclk, dram_channels, bandwidth;
2051 	fixed20_12 a;
2052 
2053 	a.full = dfixed_const(1000);
2054 	yclk.full = dfixed_const(wm->yclk);
2055 	yclk.full = dfixed_div(yclk, a);
2056 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2057 	a.full = dfixed_const(10);
2058 	dram_efficiency.full = dfixed_const(7);
2059 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
2060 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2061 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2062 
2063 	return dfixed_trunc(bandwidth);
2064 }
2065 
2066 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2067 {
2068 	/* Calculate DRAM Bandwidth and the part allocated to display. */
2069 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2070 	fixed20_12 yclk, dram_channels, bandwidth;
2071 	fixed20_12 a;
2072 
2073 	a.full = dfixed_const(1000);
2074 	yclk.full = dfixed_const(wm->yclk);
2075 	yclk.full = dfixed_div(yclk, a);
2076 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2077 	a.full = dfixed_const(10);
2078 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2079 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2080 	bandwidth.full = dfixed_mul(dram_channels, yclk);
2081 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2082 
2083 	return dfixed_trunc(bandwidth);
2084 }
2085 
2086 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2087 {
2088 	/* Calculate the display Data return Bandwidth */
2089 	fixed20_12 return_efficiency; /* 0.8 */
2090 	fixed20_12 sclk, bandwidth;
2091 	fixed20_12 a;
2092 
2093 	a.full = dfixed_const(1000);
2094 	sclk.full = dfixed_const(wm->sclk);
2095 	sclk.full = dfixed_div(sclk, a);
2096 	a.full = dfixed_const(10);
2097 	return_efficiency.full = dfixed_const(8);
2098 	return_efficiency.full = dfixed_div(return_efficiency, a);
2099 	a.full = dfixed_const(32);
2100 	bandwidth.full = dfixed_mul(a, sclk);
2101 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2102 
2103 	return dfixed_trunc(bandwidth);
2104 }
2105 
2106 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2107 {
2108 	/* Calculate the DMIF Request Bandwidth */
2109 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2110 	fixed20_12 disp_clk, bandwidth;
2111 	fixed20_12 a;
2112 
2113 	a.full = dfixed_const(1000);
2114 	disp_clk.full = dfixed_const(wm->disp_clk);
2115 	disp_clk.full = dfixed_div(disp_clk, a);
2116 	a.full = dfixed_const(10);
2117 	disp_clk_request_efficiency.full = dfixed_const(8);
2118 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2119 	a.full = dfixed_const(32);
2120 	bandwidth.full = dfixed_mul(a, disp_clk);
2121 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2122 
2123 	return dfixed_trunc(bandwidth);
2124 }
2125 
2126 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2127 {
2128 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2129 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2130 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2131 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2132 
2133 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2134 }
2135 
2136 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2137 {
2138 	/* Calculate the display mode Average Bandwidth
2139 	 * DisplayMode should contain the source and destination dimensions,
2140 	 * timing, etc.
2141 	 */
2142 	fixed20_12 bpp;
2143 	fixed20_12 line_time;
2144 	fixed20_12 src_width;
2145 	fixed20_12 bandwidth;
2146 	fixed20_12 a;
2147 
2148 	a.full = dfixed_const(1000);
2149 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2150 	line_time.full = dfixed_div(line_time, a);
2151 	bpp.full = dfixed_const(wm->bytes_per_pixel);
2152 	src_width.full = dfixed_const(wm->src_width);
2153 	bandwidth.full = dfixed_mul(src_width, bpp);
2154 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2155 	bandwidth.full = dfixed_div(bandwidth, line_time);
2156 
2157 	return dfixed_trunc(bandwidth);
2158 }
2159 
2160 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2161 {
2162 	/* First calcualte the latency in ns */
2163 	u32 mc_latency = 2000; /* 2000 ns. */
2164 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2165 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2166 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2167 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2168 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2169 		(wm->num_heads * cursor_line_pair_return_time);
2170 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2171 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2172 	fixed20_12 a, b, c;
2173 
2174 	if (wm->num_heads == 0)
2175 		return 0;
2176 
2177 	a.full = dfixed_const(2);
2178 	b.full = dfixed_const(1);
2179 	if ((wm->vsc.full > a.full) ||
2180 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2181 	    (wm->vtaps >= 5) ||
2182 	    ((wm->vsc.full >= a.full) && wm->interlaced))
2183 		max_src_lines_per_dst_line = 4;
2184 	else
2185 		max_src_lines_per_dst_line = 2;
2186 
2187 	a.full = dfixed_const(available_bandwidth);
2188 	b.full = dfixed_const(wm->num_heads);
2189 	a.full = dfixed_div(a, b);
2190 
2191 	lb_fill_bw = min(dfixed_trunc(a), wm->disp_clk * wm->bytes_per_pixel / 1000);
2192 
2193 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2194 	b.full = dfixed_const(1000);
2195 	c.full = dfixed_const(lb_fill_bw);
2196 	b.full = dfixed_div(c, b);
2197 	a.full = dfixed_div(a, b);
2198 	line_fill_time = dfixed_trunc(a);
2199 
2200 	if (line_fill_time < wm->active_time)
2201 		return latency;
2202 	else
2203 		return latency + (line_fill_time - wm->active_time);
2204 
2205 }
2206 
2207 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2208 {
2209 	if (evergreen_average_bandwidth(wm) <=
2210 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2211 		return true;
2212 	else
2213 		return false;
2214 };
2215 
2216 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2217 {
2218 	if (evergreen_average_bandwidth(wm) <=
2219 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2220 		return true;
2221 	else
2222 		return false;
2223 };
2224 
2225 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2226 {
2227 	u32 lb_partitions = wm->lb_size / wm->src_width;
2228 	u32 line_time = wm->active_time + wm->blank_time;
2229 	u32 latency_tolerant_lines;
2230 	u32 latency_hiding;
2231 	fixed20_12 a;
2232 
2233 	a.full = dfixed_const(1);
2234 	if (wm->vsc.full > a.full)
2235 		latency_tolerant_lines = 1;
2236 	else {
2237 		if (lb_partitions <= (wm->vtaps + 1))
2238 			latency_tolerant_lines = 1;
2239 		else
2240 			latency_tolerant_lines = 2;
2241 	}
2242 
2243 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2244 
2245 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2246 		return true;
2247 	else
2248 		return false;
2249 }
2250 
2251 static void evergreen_program_watermarks(struct radeon_device *rdev,
2252 					 struct radeon_crtc *radeon_crtc,
2253 					 u32 lb_size, u32 num_heads)
2254 {
2255 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2256 	struct evergreen_wm_params wm_low, wm_high;
2257 	u32 dram_channels;
2258 	u32 active_time;
2259 	u32 line_time = 0;
2260 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2261 	u32 priority_a_mark = 0, priority_b_mark = 0;
2262 	u32 priority_a_cnt = PRIORITY_OFF;
2263 	u32 priority_b_cnt = PRIORITY_OFF;
2264 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2265 	u32 tmp, arb_control3;
2266 	fixed20_12 a, b, c;
2267 
2268 	if (radeon_crtc->base.enabled && num_heads && mode) {
2269 		active_time = 1000000UL * (u32)mode->crtc_hdisplay / (u32)mode->clock;
2270 		line_time = min((u32) (1000000UL * (u32)mode->crtc_htotal / (u32)mode->clock), (u32)65535);
2271 		priority_a_cnt = 0;
2272 		priority_b_cnt = 0;
2273 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2274 
2275 		/* watermark for high clocks */
2276 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2277 			wm_high.yclk =
2278 				radeon_dpm_get_mclk(rdev, false) * 10;
2279 			wm_high.sclk =
2280 				radeon_dpm_get_sclk(rdev, false) * 10;
2281 		} else {
2282 			wm_high.yclk = rdev->pm.current_mclk * 10;
2283 			wm_high.sclk = rdev->pm.current_sclk * 10;
2284 		}
2285 
2286 		wm_high.disp_clk = mode->clock;
2287 		wm_high.src_width = mode->crtc_hdisplay;
2288 		wm_high.active_time = active_time;
2289 		wm_high.blank_time = line_time - wm_high.active_time;
2290 		wm_high.interlaced = false;
2291 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2292 			wm_high.interlaced = true;
2293 		wm_high.vsc = radeon_crtc->vsc;
2294 		wm_high.vtaps = 1;
2295 		if (radeon_crtc->rmx_type != RMX_OFF)
2296 			wm_high.vtaps = 2;
2297 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2298 		wm_high.lb_size = lb_size;
2299 		wm_high.dram_channels = dram_channels;
2300 		wm_high.num_heads = num_heads;
2301 
2302 		/* watermark for low clocks */
2303 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2304 			wm_low.yclk =
2305 				radeon_dpm_get_mclk(rdev, true) * 10;
2306 			wm_low.sclk =
2307 				radeon_dpm_get_sclk(rdev, true) * 10;
2308 		} else {
2309 			wm_low.yclk = rdev->pm.current_mclk * 10;
2310 			wm_low.sclk = rdev->pm.current_sclk * 10;
2311 		}
2312 
2313 		wm_low.disp_clk = mode->clock;
2314 		wm_low.src_width = mode->crtc_hdisplay;
2315 		wm_low.active_time = active_time;
2316 		wm_low.blank_time = line_time - wm_low.active_time;
2317 		wm_low.interlaced = false;
2318 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2319 			wm_low.interlaced = true;
2320 		wm_low.vsc = radeon_crtc->vsc;
2321 		wm_low.vtaps = 1;
2322 		if (radeon_crtc->rmx_type != RMX_OFF)
2323 			wm_low.vtaps = 2;
2324 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2325 		wm_low.lb_size = lb_size;
2326 		wm_low.dram_channels = dram_channels;
2327 		wm_low.num_heads = num_heads;
2328 
2329 		/* set for high clocks */
2330 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2331 		/* set for low clocks */
2332 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2333 
2334 		/* possibly force display priority to high */
2335 		/* should really do this at mode validation time... */
2336 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2337 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2338 		    !evergreen_check_latency_hiding(&wm_high) ||
2339 		    (rdev->disp_priority == 2)) {
2340 			DRM_DEBUG_KMS("force priority a to high\n");
2341 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2342 		}
2343 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2344 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2345 		    !evergreen_check_latency_hiding(&wm_low) ||
2346 		    (rdev->disp_priority == 2)) {
2347 			DRM_DEBUG_KMS("force priority b to high\n");
2348 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2349 		}
2350 
2351 		a.full = dfixed_const(1000);
2352 		b.full = dfixed_const(mode->clock);
2353 		b.full = dfixed_div(b, a);
2354 		c.full = dfixed_const(latency_watermark_a);
2355 		c.full = dfixed_mul(c, b);
2356 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2357 		c.full = dfixed_div(c, a);
2358 		a.full = dfixed_const(16);
2359 		c.full = dfixed_div(c, a);
2360 		priority_a_mark = dfixed_trunc(c);
2361 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2362 
2363 		a.full = dfixed_const(1000);
2364 		b.full = dfixed_const(mode->clock);
2365 		b.full = dfixed_div(b, a);
2366 		c.full = dfixed_const(latency_watermark_b);
2367 		c.full = dfixed_mul(c, b);
2368 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2369 		c.full = dfixed_div(c, a);
2370 		a.full = dfixed_const(16);
2371 		c.full = dfixed_div(c, a);
2372 		priority_b_mark = dfixed_trunc(c);
2373 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2374 
2375 		/* Save number of lines the linebuffer leads before the scanout */
2376 		radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2377 	}
2378 
2379 	/* select wm A */
2380 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2381 	tmp = arb_control3;
2382 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2383 	tmp |= LATENCY_WATERMARK_MASK(1);
2384 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2385 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2386 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2387 		LATENCY_HIGH_WATERMARK(line_time)));
2388 	/* select wm B */
2389 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2390 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2391 	tmp |= LATENCY_WATERMARK_MASK(2);
2392 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2393 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2394 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2395 		LATENCY_HIGH_WATERMARK(line_time)));
2396 	/* restore original selection */
2397 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2398 
2399 	/* write the priority marks */
2400 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2401 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2402 
2403 	/* save values for DPM */
2404 	radeon_crtc->line_time = line_time;
2405 	radeon_crtc->wm_high = latency_watermark_a;
2406 	radeon_crtc->wm_low = latency_watermark_b;
2407 }
2408 
2409 /**
2410  * evergreen_bandwidth_update - update display watermarks callback.
2411  *
2412  * @rdev: radeon_device pointer
2413  *
2414  * Update the display watermarks based on the requested mode(s)
2415  * (evergreen+).
2416  */
2417 void evergreen_bandwidth_update(struct radeon_device *rdev)
2418 {
2419 	struct drm_display_mode *mode0 = NULL;
2420 	struct drm_display_mode *mode1 = NULL;
2421 	u32 num_heads = 0, lb_size;
2422 	int i;
2423 
2424 	if (!rdev->mode_info.mode_config_initialized)
2425 		return;
2426 
2427 	radeon_update_display_priority(rdev);
2428 
2429 	for (i = 0; i < rdev->num_crtc; i++) {
2430 		if (rdev->mode_info.crtcs[i]->base.enabled)
2431 			num_heads++;
2432 	}
2433 	for (i = 0; i < rdev->num_crtc; i += 2) {
2434 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2435 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2436 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2437 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2438 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2439 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2440 	}
2441 }
2442 
2443 /**
2444  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2445  *
2446  * @rdev: radeon_device pointer
2447  *
2448  * Wait for the MC (memory controller) to be idle.
2449  * (evergreen+).
2450  * Returns 0 if the MC is idle, -1 if not.
2451  */
2452 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2453 {
2454 	unsigned i;
2455 	u32 tmp;
2456 
2457 	for (i = 0; i < rdev->usec_timeout; i++) {
2458 		/* read MC_STATUS */
2459 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2460 		if (!tmp)
2461 			return 0;
2462 		udelay(1);
2463 	}
2464 	return -1;
2465 }
2466 
2467 /*
2468  * GART
2469  */
2470 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2471 {
2472 	unsigned i;
2473 	u32 tmp;
2474 
2475 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2476 
2477 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2478 	for (i = 0; i < rdev->usec_timeout; i++) {
2479 		/* read MC_STATUS */
2480 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2481 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2482 		if (tmp == 2) {
2483 			pr_warn("[drm] r600 flush TLB failed\n");
2484 			return;
2485 		}
2486 		if (tmp) {
2487 			return;
2488 		}
2489 		udelay(1);
2490 	}
2491 }
2492 
2493 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2494 {
2495 	u32 tmp;
2496 	int r;
2497 
2498 	if (rdev->gart.robj == NULL) {
2499 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2500 		return -EINVAL;
2501 	}
2502 	r = radeon_gart_table_vram_pin(rdev);
2503 	if (r)
2504 		return r;
2505 	/* Setup L2 cache */
2506 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2507 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2508 				EFFECTIVE_L2_QUEUE_SIZE(7));
2509 	WREG32(VM_L2_CNTL2, 0);
2510 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2511 	/* Setup TLB control */
2512 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2513 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2514 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2515 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2516 	if (rdev->flags & RADEON_IS_IGP) {
2517 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2518 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2519 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2520 	} else {
2521 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2522 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2523 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2524 		if ((rdev->family == CHIP_JUNIPER) ||
2525 		    (rdev->family == CHIP_CYPRESS) ||
2526 		    (rdev->family == CHIP_HEMLOCK) ||
2527 		    (rdev->family == CHIP_BARTS))
2528 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2529 	}
2530 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2531 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2532 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2533 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2534 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2535 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2536 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2537 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2538 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2539 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2540 			(u32)(rdev->dummy_page.addr >> 12));
2541 	WREG32(VM_CONTEXT1_CNTL, 0);
2542 
2543 	evergreen_pcie_gart_tlb_flush(rdev);
2544 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2545 		 (unsigned)(rdev->mc.gtt_size >> 20),
2546 		 (unsigned long long)rdev->gart.table_addr);
2547 	rdev->gart.ready = true;
2548 	return 0;
2549 }
2550 
2551 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2552 {
2553 	u32 tmp;
2554 
2555 	/* Disable all tables */
2556 	WREG32(VM_CONTEXT0_CNTL, 0);
2557 	WREG32(VM_CONTEXT1_CNTL, 0);
2558 
2559 	/* Setup L2 cache */
2560 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2561 				EFFECTIVE_L2_QUEUE_SIZE(7));
2562 	WREG32(VM_L2_CNTL2, 0);
2563 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2564 	/* Setup TLB control */
2565 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2566 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2567 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2568 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2569 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2570 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2571 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2572 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2573 	radeon_gart_table_vram_unpin(rdev);
2574 }
2575 
2576 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2577 {
2578 	evergreen_pcie_gart_disable(rdev);
2579 	radeon_gart_table_vram_free(rdev);
2580 	radeon_gart_fini(rdev);
2581 }
2582 
2583 
2584 static void evergreen_agp_enable(struct radeon_device *rdev)
2585 {
2586 	u32 tmp;
2587 
2588 	/* Setup L2 cache */
2589 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2590 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2591 				EFFECTIVE_L2_QUEUE_SIZE(7));
2592 	WREG32(VM_L2_CNTL2, 0);
2593 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2594 	/* Setup TLB control */
2595 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2596 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2597 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2598 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2599 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2600 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2601 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2602 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2603 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2604 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2605 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2606 	WREG32(VM_CONTEXT0_CNTL, 0);
2607 	WREG32(VM_CONTEXT1_CNTL, 0);
2608 }
2609 
2610 static const unsigned ni_dig_offsets[] =
2611 {
2612 	NI_DIG0_REGISTER_OFFSET,
2613 	NI_DIG1_REGISTER_OFFSET,
2614 	NI_DIG2_REGISTER_OFFSET,
2615 	NI_DIG3_REGISTER_OFFSET,
2616 	NI_DIG4_REGISTER_OFFSET,
2617 	NI_DIG5_REGISTER_OFFSET
2618 };
2619 
2620 static const unsigned ni_tx_offsets[] =
2621 {
2622 	NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2623 	NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2624 	NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2625 	NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2626 	NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2627 	NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2628 };
2629 
2630 static const unsigned evergreen_dp_offsets[] =
2631 {
2632 	EVERGREEN_DP0_REGISTER_OFFSET,
2633 	EVERGREEN_DP1_REGISTER_OFFSET,
2634 	EVERGREEN_DP2_REGISTER_OFFSET,
2635 	EVERGREEN_DP3_REGISTER_OFFSET,
2636 	EVERGREEN_DP4_REGISTER_OFFSET,
2637 	EVERGREEN_DP5_REGISTER_OFFSET
2638 };
2639 
2640 
2641 /*
2642  * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2643  * We go from crtc to connector and it is not relible  since it
2644  * should be an opposite direction .If crtc is enable then
2645  * find the dig_fe which selects this crtc and insure that it enable.
2646  * if such dig_fe is found then find dig_be which selects found dig_be and
2647  * insure that it enable and in DP_SST mode.
2648  * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2649  * from dp symbols clocks .
2650  */
2651 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2652 					       unsigned crtc_id, unsigned *ret_dig_fe)
2653 {
2654 	unsigned i;
2655 	unsigned dig_fe;
2656 	unsigned dig_be;
2657 	unsigned dig_en_be;
2658 	unsigned uniphy_pll;
2659 	unsigned digs_fe_selected;
2660 	unsigned dig_be_mode;
2661 	unsigned dig_fe_mask;
2662 	bool is_enabled = false;
2663 	bool found_crtc = false;
2664 
2665 	/* loop through all running dig_fe to find selected crtc */
2666 	for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2667 		dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2668 		if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2669 		    crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2670 			/* found running pipe */
2671 			found_crtc = true;
2672 			dig_fe_mask = 1 << i;
2673 			dig_fe = i;
2674 			break;
2675 		}
2676 	}
2677 
2678 	if (found_crtc) {
2679 		/* loop through all running dig_be to find selected dig_fe */
2680 		for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2681 			dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2682 			/* if dig_fe_selected by dig_be? */
2683 			digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2684 			dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2685 			if (dig_fe_mask &  digs_fe_selected &&
2686 			    /* if dig_be in sst mode? */
2687 			    dig_be_mode == NI_DIG_BE_DPSST) {
2688 				dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2689 						   ni_dig_offsets[i]);
2690 				uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2691 						    ni_tx_offsets[i]);
2692 				/* dig_be enable and tx is running */
2693 				if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2694 				    dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2695 				    uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2696 					is_enabled = true;
2697 					*ret_dig_fe = dig_fe;
2698 					break;
2699 				}
2700 			}
2701 		}
2702 	}
2703 
2704 	return is_enabled;
2705 }
2706 
2707 /*
2708  * Blank dig when in dp sst mode
2709  * Dig ignores crtc timing
2710  */
2711 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2712 				      unsigned dig_fe)
2713 {
2714 	unsigned stream_ctrl;
2715 	unsigned fifo_ctrl;
2716 	unsigned counter = 0;
2717 
2718 	if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2719 		DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2720 		return;
2721 	}
2722 
2723 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2724 			     evergreen_dp_offsets[dig_fe]);
2725 	if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2726 		DRM_ERROR("dig %d , should be enable\n", dig_fe);
2727 		return;
2728 	}
2729 
2730 	stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2731 	WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2732 	       evergreen_dp_offsets[dig_fe], stream_ctrl);
2733 
2734 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2735 			     evergreen_dp_offsets[dig_fe]);
2736 	while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2737 		msleep(1);
2738 		counter++;
2739 		stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2740 				     evergreen_dp_offsets[dig_fe]);
2741 	}
2742 	if (counter >= 32 )
2743 		DRM_ERROR("counter exceeds %d\n", counter);
2744 
2745 	fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2746 	fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2747 	WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2748 
2749 }
2750 
2751 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2752 {
2753 	u32 crtc_enabled, tmp, frame_count, blackout;
2754 	int i, j;
2755 	unsigned dig_fe;
2756 
2757 	if (!ASIC_IS_NODCE(rdev)) {
2758 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2759 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2760 
2761 		/* disable VGA render */
2762 		WREG32(VGA_RENDER_CONTROL, 0);
2763 	}
2764 	/* blank the display controllers */
2765 	for (i = 0; i < rdev->num_crtc; i++) {
2766 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2767 		if (crtc_enabled) {
2768 			save->crtc_enabled[i] = true;
2769 			if (ASIC_IS_DCE6(rdev)) {
2770 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2771 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2772 					radeon_wait_for_vblank(rdev, i);
2773 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2774 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2775 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2776 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2777 				}
2778 			} else {
2779 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2780 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2781 					radeon_wait_for_vblank(rdev, i);
2782 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2783 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2784 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2785 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2786 				}
2787 			}
2788 			/* wait for the next frame */
2789 			frame_count = radeon_get_vblank_counter(rdev, i);
2790 			for (j = 0; j < rdev->usec_timeout; j++) {
2791 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2792 					break;
2793 				udelay(1);
2794 			}
2795 			/*we should disable dig if it drives dp sst*/
2796 			/*but we are in radeon_device_init and the topology is unknown*/
2797 			/*and it is available after radeon_modeset_init*/
2798 			/*the following method radeon_atom_encoder_dpms_dig*/
2799 			/*does the job if we initialize it properly*/
2800 			/*for now we do it this manually*/
2801 			/**/
2802 			if (ASIC_IS_DCE5(rdev) &&
2803 			    evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2804 				evergreen_blank_dp_output(rdev, dig_fe);
2805 			/*we could remove 6 lines below*/
2806 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2807 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2808 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2809 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2810 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2811 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2812 			save->crtc_enabled[i] = false;
2813 			/* ***** */
2814 		} else {
2815 			save->crtc_enabled[i] = false;
2816 		}
2817 	}
2818 
2819 	radeon_mc_wait_for_idle(rdev);
2820 
2821 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2822 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2823 		/* Block CPU access */
2824 		WREG32(BIF_FB_EN, 0);
2825 		/* blackout the MC */
2826 		blackout &= ~BLACKOUT_MODE_MASK;
2827 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2828 	}
2829 	/* wait for the MC to settle */
2830 	udelay(100);
2831 
2832 	/* lock double buffered regs */
2833 	for (i = 0; i < rdev->num_crtc; i++) {
2834 		if (save->crtc_enabled[i]) {
2835 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2836 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2837 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2838 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2839 			}
2840 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2841 			if (!(tmp & 1)) {
2842 				tmp |= 1;
2843 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2844 			}
2845 		}
2846 	}
2847 }
2848 
2849 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2850 {
2851 	u32 tmp, frame_count;
2852 	int i, j;
2853 
2854 	/* update crtc base addresses */
2855 	for (i = 0; i < rdev->num_crtc; i++) {
2856 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2857 		       upper_32_bits(rdev->mc.vram_start));
2858 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2859 		       upper_32_bits(rdev->mc.vram_start));
2860 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2861 		       (u32)rdev->mc.vram_start);
2862 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2863 		       (u32)rdev->mc.vram_start);
2864 	}
2865 
2866 	if (!ASIC_IS_NODCE(rdev)) {
2867 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2868 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2869 	}
2870 
2871 	/* unlock regs and wait for update */
2872 	for (i = 0; i < rdev->num_crtc; i++) {
2873 		if (save->crtc_enabled[i]) {
2874 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2875 			if ((tmp & 0x7) != 0) {
2876 				tmp &= ~0x7;
2877 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2878 			}
2879 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2880 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2881 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2882 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2883 			}
2884 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2885 			if (tmp & 1) {
2886 				tmp &= ~1;
2887 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2888 			}
2889 			for (j = 0; j < rdev->usec_timeout; j++) {
2890 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2891 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2892 					break;
2893 				udelay(1);
2894 			}
2895 		}
2896 	}
2897 
2898 	/* unblackout the MC */
2899 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2900 	tmp &= ~BLACKOUT_MODE_MASK;
2901 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2902 	/* allow CPU access */
2903 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2904 
2905 	for (i = 0; i < rdev->num_crtc; i++) {
2906 		if (save->crtc_enabled[i]) {
2907 			if (ASIC_IS_DCE6(rdev)) {
2908 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2909 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2910 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2911 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2912 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2913 			} else {
2914 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2915 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2916 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2917 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2918 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2919 			}
2920 			/* wait for the next frame */
2921 			frame_count = radeon_get_vblank_counter(rdev, i);
2922 			for (j = 0; j < rdev->usec_timeout; j++) {
2923 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2924 					break;
2925 				udelay(1);
2926 			}
2927 		}
2928 	}
2929 	if (!ASIC_IS_NODCE(rdev)) {
2930 		/* Unlock vga access */
2931 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2932 		mdelay(1);
2933 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2934 	}
2935 }
2936 
2937 void evergreen_mc_program(struct radeon_device *rdev)
2938 {
2939 	struct evergreen_mc_save save;
2940 	u32 tmp;
2941 	int i, j;
2942 
2943 	/* Initialize HDP */
2944 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2945 		WREG32((0x2c14 + j), 0x00000000);
2946 		WREG32((0x2c18 + j), 0x00000000);
2947 		WREG32((0x2c1c + j), 0x00000000);
2948 		WREG32((0x2c20 + j), 0x00000000);
2949 		WREG32((0x2c24 + j), 0x00000000);
2950 	}
2951 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2952 
2953 	evergreen_mc_stop(rdev, &save);
2954 	if (evergreen_mc_wait_for_idle(rdev)) {
2955 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2956 	}
2957 	/* Lockout access through VGA aperture*/
2958 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2959 	/* Update configuration */
2960 	if (rdev->flags & RADEON_IS_AGP) {
2961 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2962 			/* VRAM before AGP */
2963 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2964 				rdev->mc.vram_start >> 12);
2965 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2966 				rdev->mc.gtt_end >> 12);
2967 		} else {
2968 			/* VRAM after AGP */
2969 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2970 				rdev->mc.gtt_start >> 12);
2971 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2972 				rdev->mc.vram_end >> 12);
2973 		}
2974 	} else {
2975 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2976 			rdev->mc.vram_start >> 12);
2977 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2978 			rdev->mc.vram_end >> 12);
2979 	}
2980 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2981 	/* llano/ontario only */
2982 	if ((rdev->family == CHIP_PALM) ||
2983 	    (rdev->family == CHIP_SUMO) ||
2984 	    (rdev->family == CHIP_SUMO2)) {
2985 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2986 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2987 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2988 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2989 	}
2990 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2991 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2992 	WREG32(MC_VM_FB_LOCATION, tmp);
2993 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2994 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2995 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2996 	if (rdev->flags & RADEON_IS_AGP) {
2997 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2998 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2999 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
3000 	} else {
3001 		WREG32(MC_VM_AGP_BASE, 0);
3002 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3003 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3004 	}
3005 	if (evergreen_mc_wait_for_idle(rdev)) {
3006 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3007 	}
3008 	evergreen_mc_resume(rdev, &save);
3009 	/* we need to own VRAM, so turn off the VGA renderer here
3010 	 * to stop it overwriting our objects */
3011 	rv515_vga_render_disable(rdev);
3012 }
3013 
3014 /*
3015  * CP.
3016  */
3017 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3018 {
3019 	struct radeon_ring *ring = &rdev->ring[ib->ring];
3020 	u32 next_rptr;
3021 
3022 	/* set to DX10/11 mode */
3023 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3024 	radeon_ring_write(ring, 1);
3025 
3026 	if (ring->rptr_save_reg) {
3027 		next_rptr = ring->wptr + 3 + 4;
3028 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3029 		radeon_ring_write(ring, ((ring->rptr_save_reg -
3030 					  PACKET3_SET_CONFIG_REG_START) >> 2));
3031 		radeon_ring_write(ring, next_rptr);
3032 	} else if (rdev->wb.enabled) {
3033 		next_rptr = ring->wptr + 5 + 4;
3034 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3035 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3036 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3037 		radeon_ring_write(ring, next_rptr);
3038 		radeon_ring_write(ring, 0);
3039 	}
3040 
3041 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3042 	radeon_ring_write(ring,
3043 #ifdef __BIG_ENDIAN
3044 			  (2 << 0) |
3045 #endif
3046 			  (ib->gpu_addr & 0xFFFFFFFC));
3047 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3048 	radeon_ring_write(ring, ib->length_dw);
3049 }
3050 
3051 
3052 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3053 {
3054 	const __be32 *fw_data;
3055 	int i;
3056 
3057 	if (!rdev->me_fw || !rdev->pfp_fw)
3058 		return -EINVAL;
3059 
3060 	r700_cp_stop(rdev);
3061 	WREG32(CP_RB_CNTL,
3062 #ifdef __BIG_ENDIAN
3063 	       BUF_SWAP_32BIT |
3064 #endif
3065 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3066 
3067 	fw_data = (const __be32 *)rdev->pfp_fw->data;
3068 	WREG32(CP_PFP_UCODE_ADDR, 0);
3069 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3070 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3071 	WREG32(CP_PFP_UCODE_ADDR, 0);
3072 
3073 	fw_data = (const __be32 *)rdev->me_fw->data;
3074 	WREG32(CP_ME_RAM_WADDR, 0);
3075 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3076 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3077 
3078 	WREG32(CP_PFP_UCODE_ADDR, 0);
3079 	WREG32(CP_ME_RAM_WADDR, 0);
3080 	WREG32(CP_ME_RAM_RADDR, 0);
3081 	return 0;
3082 }
3083 
3084 static int evergreen_cp_start(struct radeon_device *rdev)
3085 {
3086 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3087 	int r, i;
3088 	uint32_t cp_me;
3089 
3090 	r = radeon_ring_lock(rdev, ring, 7);
3091 	if (r) {
3092 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3093 		return r;
3094 	}
3095 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3096 	radeon_ring_write(ring, 0x1);
3097 	radeon_ring_write(ring, 0x0);
3098 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3099 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3100 	radeon_ring_write(ring, 0);
3101 	radeon_ring_write(ring, 0);
3102 	radeon_ring_unlock_commit(rdev, ring, false);
3103 
3104 	cp_me = 0xff;
3105 	WREG32(CP_ME_CNTL, cp_me);
3106 
3107 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3108 	if (r) {
3109 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3110 		return r;
3111 	}
3112 
3113 	/* setup clear context state */
3114 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3115 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3116 
3117 	for (i = 0; i < evergreen_default_size; i++)
3118 		radeon_ring_write(ring, evergreen_default_state[i]);
3119 
3120 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3121 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3122 
3123 	/* set clear context state */
3124 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3125 	radeon_ring_write(ring, 0);
3126 
3127 	/* SQ_VTX_BASE_VTX_LOC */
3128 	radeon_ring_write(ring, 0xc0026f00);
3129 	radeon_ring_write(ring, 0x00000000);
3130 	radeon_ring_write(ring, 0x00000000);
3131 	radeon_ring_write(ring, 0x00000000);
3132 
3133 	/* Clear consts */
3134 	radeon_ring_write(ring, 0xc0036f00);
3135 	radeon_ring_write(ring, 0x00000bc4);
3136 	radeon_ring_write(ring, 0xffffffff);
3137 	radeon_ring_write(ring, 0xffffffff);
3138 	radeon_ring_write(ring, 0xffffffff);
3139 
3140 	radeon_ring_write(ring, 0xc0026900);
3141 	radeon_ring_write(ring, 0x00000316);
3142 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3143 	radeon_ring_write(ring, 0x00000010); /*  */
3144 
3145 	radeon_ring_unlock_commit(rdev, ring, false);
3146 
3147 	return 0;
3148 }
3149 
3150 static int evergreen_cp_resume(struct radeon_device *rdev)
3151 {
3152 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3153 	u32 tmp;
3154 	u32 rb_bufsz;
3155 	int r;
3156 
3157 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3158 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3159 				 SOFT_RESET_PA |
3160 				 SOFT_RESET_SH |
3161 				 SOFT_RESET_VGT |
3162 				 SOFT_RESET_SPI |
3163 				 SOFT_RESET_SX));
3164 	RREG32(GRBM_SOFT_RESET);
3165 	mdelay(15);
3166 	WREG32(GRBM_SOFT_RESET, 0);
3167 	RREG32(GRBM_SOFT_RESET);
3168 
3169 	/* Set ring buffer size */
3170 	rb_bufsz = order_base_2(ring->ring_size / 8);
3171 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3172 #ifdef __BIG_ENDIAN
3173 	tmp |= BUF_SWAP_32BIT;
3174 #endif
3175 	WREG32(CP_RB_CNTL, tmp);
3176 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
3177 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3178 
3179 	/* Set the write pointer delay */
3180 	WREG32(CP_RB_WPTR_DELAY, 0);
3181 
3182 	/* Initialize the ring buffer's read and write pointers */
3183 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3184 	WREG32(CP_RB_RPTR_WR, 0);
3185 	ring->wptr = 0;
3186 	WREG32(CP_RB_WPTR, ring->wptr);
3187 
3188 	/* set the wb address whether it's enabled or not */
3189 	WREG32(CP_RB_RPTR_ADDR,
3190 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3191 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3192 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3193 
3194 	if (rdev->wb.enabled)
3195 		WREG32(SCRATCH_UMSK, 0xff);
3196 	else {
3197 		tmp |= RB_NO_UPDATE;
3198 		WREG32(SCRATCH_UMSK, 0);
3199 	}
3200 
3201 	mdelay(1);
3202 	WREG32(CP_RB_CNTL, tmp);
3203 
3204 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3205 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3206 
3207 	evergreen_cp_start(rdev);
3208 	ring->ready = true;
3209 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3210 	if (r) {
3211 		ring->ready = false;
3212 		return r;
3213 	}
3214 	return 0;
3215 }
3216 
3217 /*
3218  * Core functions
3219  */
3220 static void evergreen_gpu_init(struct radeon_device *rdev)
3221 {
3222 	u32 gb_addr_config;
3223 	u32 mc_shared_chmap, mc_arb_ramcfg;
3224 	u32 sx_debug_1;
3225 	u32 smx_dc_ctl0;
3226 	u32 sq_config;
3227 	u32 sq_lds_resource_mgmt;
3228 	u32 sq_gpr_resource_mgmt_1;
3229 	u32 sq_gpr_resource_mgmt_2;
3230 	u32 sq_gpr_resource_mgmt_3;
3231 	u32 sq_thread_resource_mgmt;
3232 	u32 sq_thread_resource_mgmt_2;
3233 	u32 sq_stack_resource_mgmt_1;
3234 	u32 sq_stack_resource_mgmt_2;
3235 	u32 sq_stack_resource_mgmt_3;
3236 	u32 vgt_cache_invalidation;
3237 	u32 hdp_host_path_cntl, tmp;
3238 	u32 disabled_rb_mask;
3239 	int i, j, ps_thread_count;
3240 
3241 	switch (rdev->family) {
3242 	case CHIP_CYPRESS:
3243 	case CHIP_HEMLOCK:
3244 		rdev->config.evergreen.num_ses = 2;
3245 		rdev->config.evergreen.max_pipes = 4;
3246 		rdev->config.evergreen.max_tile_pipes = 8;
3247 		rdev->config.evergreen.max_simds = 10;
3248 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3249 		rdev->config.evergreen.max_gprs = 256;
3250 		rdev->config.evergreen.max_threads = 248;
3251 		rdev->config.evergreen.max_gs_threads = 32;
3252 		rdev->config.evergreen.max_stack_entries = 512;
3253 		rdev->config.evergreen.sx_num_of_sets = 4;
3254 		rdev->config.evergreen.sx_max_export_size = 256;
3255 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3256 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3257 		rdev->config.evergreen.max_hw_contexts = 8;
3258 		rdev->config.evergreen.sq_num_cf_insts = 2;
3259 
3260 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3261 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3262 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3263 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3264 		break;
3265 	case CHIP_JUNIPER:
3266 		rdev->config.evergreen.num_ses = 1;
3267 		rdev->config.evergreen.max_pipes = 4;
3268 		rdev->config.evergreen.max_tile_pipes = 4;
3269 		rdev->config.evergreen.max_simds = 10;
3270 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3271 		rdev->config.evergreen.max_gprs = 256;
3272 		rdev->config.evergreen.max_threads = 248;
3273 		rdev->config.evergreen.max_gs_threads = 32;
3274 		rdev->config.evergreen.max_stack_entries = 512;
3275 		rdev->config.evergreen.sx_num_of_sets = 4;
3276 		rdev->config.evergreen.sx_max_export_size = 256;
3277 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3278 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3279 		rdev->config.evergreen.max_hw_contexts = 8;
3280 		rdev->config.evergreen.sq_num_cf_insts = 2;
3281 
3282 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3283 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3284 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3285 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3286 		break;
3287 	case CHIP_REDWOOD:
3288 		rdev->config.evergreen.num_ses = 1;
3289 		rdev->config.evergreen.max_pipes = 4;
3290 		rdev->config.evergreen.max_tile_pipes = 4;
3291 		rdev->config.evergreen.max_simds = 5;
3292 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3293 		rdev->config.evergreen.max_gprs = 256;
3294 		rdev->config.evergreen.max_threads = 248;
3295 		rdev->config.evergreen.max_gs_threads = 32;
3296 		rdev->config.evergreen.max_stack_entries = 256;
3297 		rdev->config.evergreen.sx_num_of_sets = 4;
3298 		rdev->config.evergreen.sx_max_export_size = 256;
3299 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3300 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3301 		rdev->config.evergreen.max_hw_contexts = 8;
3302 		rdev->config.evergreen.sq_num_cf_insts = 2;
3303 
3304 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3305 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3306 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3307 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3308 		break;
3309 	case CHIP_CEDAR:
3310 	default:
3311 		rdev->config.evergreen.num_ses = 1;
3312 		rdev->config.evergreen.max_pipes = 2;
3313 		rdev->config.evergreen.max_tile_pipes = 2;
3314 		rdev->config.evergreen.max_simds = 2;
3315 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3316 		rdev->config.evergreen.max_gprs = 256;
3317 		rdev->config.evergreen.max_threads = 192;
3318 		rdev->config.evergreen.max_gs_threads = 16;
3319 		rdev->config.evergreen.max_stack_entries = 256;
3320 		rdev->config.evergreen.sx_num_of_sets = 4;
3321 		rdev->config.evergreen.sx_max_export_size = 128;
3322 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3323 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3324 		rdev->config.evergreen.max_hw_contexts = 4;
3325 		rdev->config.evergreen.sq_num_cf_insts = 1;
3326 
3327 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3328 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3329 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3330 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3331 		break;
3332 	case CHIP_PALM:
3333 		rdev->config.evergreen.num_ses = 1;
3334 		rdev->config.evergreen.max_pipes = 2;
3335 		rdev->config.evergreen.max_tile_pipes = 2;
3336 		rdev->config.evergreen.max_simds = 2;
3337 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3338 		rdev->config.evergreen.max_gprs = 256;
3339 		rdev->config.evergreen.max_threads = 192;
3340 		rdev->config.evergreen.max_gs_threads = 16;
3341 		rdev->config.evergreen.max_stack_entries = 256;
3342 		rdev->config.evergreen.sx_num_of_sets = 4;
3343 		rdev->config.evergreen.sx_max_export_size = 128;
3344 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3345 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3346 		rdev->config.evergreen.max_hw_contexts = 4;
3347 		rdev->config.evergreen.sq_num_cf_insts = 1;
3348 
3349 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3350 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3351 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3352 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3353 		break;
3354 	case CHIP_SUMO:
3355 		rdev->config.evergreen.num_ses = 1;
3356 		rdev->config.evergreen.max_pipes = 4;
3357 		rdev->config.evergreen.max_tile_pipes = 4;
3358 		if (rdev->pdev->device == 0x9648)
3359 			rdev->config.evergreen.max_simds = 3;
3360 		else if ((rdev->pdev->device == 0x9647) ||
3361 			 (rdev->pdev->device == 0x964a))
3362 			rdev->config.evergreen.max_simds = 4;
3363 		else
3364 			rdev->config.evergreen.max_simds = 5;
3365 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3366 		rdev->config.evergreen.max_gprs = 256;
3367 		rdev->config.evergreen.max_threads = 248;
3368 		rdev->config.evergreen.max_gs_threads = 32;
3369 		rdev->config.evergreen.max_stack_entries = 256;
3370 		rdev->config.evergreen.sx_num_of_sets = 4;
3371 		rdev->config.evergreen.sx_max_export_size = 256;
3372 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3373 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3374 		rdev->config.evergreen.max_hw_contexts = 8;
3375 		rdev->config.evergreen.sq_num_cf_insts = 2;
3376 
3377 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3378 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3379 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3380 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3381 		break;
3382 	case CHIP_SUMO2:
3383 		rdev->config.evergreen.num_ses = 1;
3384 		rdev->config.evergreen.max_pipes = 4;
3385 		rdev->config.evergreen.max_tile_pipes = 4;
3386 		rdev->config.evergreen.max_simds = 2;
3387 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3388 		rdev->config.evergreen.max_gprs = 256;
3389 		rdev->config.evergreen.max_threads = 248;
3390 		rdev->config.evergreen.max_gs_threads = 32;
3391 		rdev->config.evergreen.max_stack_entries = 512;
3392 		rdev->config.evergreen.sx_num_of_sets = 4;
3393 		rdev->config.evergreen.sx_max_export_size = 256;
3394 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3395 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3396 		rdev->config.evergreen.max_hw_contexts = 4;
3397 		rdev->config.evergreen.sq_num_cf_insts = 2;
3398 
3399 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3400 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3401 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3402 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3403 		break;
3404 	case CHIP_BARTS:
3405 		rdev->config.evergreen.num_ses = 2;
3406 		rdev->config.evergreen.max_pipes = 4;
3407 		rdev->config.evergreen.max_tile_pipes = 8;
3408 		rdev->config.evergreen.max_simds = 7;
3409 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3410 		rdev->config.evergreen.max_gprs = 256;
3411 		rdev->config.evergreen.max_threads = 248;
3412 		rdev->config.evergreen.max_gs_threads = 32;
3413 		rdev->config.evergreen.max_stack_entries = 512;
3414 		rdev->config.evergreen.sx_num_of_sets = 4;
3415 		rdev->config.evergreen.sx_max_export_size = 256;
3416 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3417 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3418 		rdev->config.evergreen.max_hw_contexts = 8;
3419 		rdev->config.evergreen.sq_num_cf_insts = 2;
3420 
3421 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3422 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3423 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3424 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3425 		break;
3426 	case CHIP_TURKS:
3427 		rdev->config.evergreen.num_ses = 1;
3428 		rdev->config.evergreen.max_pipes = 4;
3429 		rdev->config.evergreen.max_tile_pipes = 4;
3430 		rdev->config.evergreen.max_simds = 6;
3431 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3432 		rdev->config.evergreen.max_gprs = 256;
3433 		rdev->config.evergreen.max_threads = 248;
3434 		rdev->config.evergreen.max_gs_threads = 32;
3435 		rdev->config.evergreen.max_stack_entries = 256;
3436 		rdev->config.evergreen.sx_num_of_sets = 4;
3437 		rdev->config.evergreen.sx_max_export_size = 256;
3438 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3439 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3440 		rdev->config.evergreen.max_hw_contexts = 8;
3441 		rdev->config.evergreen.sq_num_cf_insts = 2;
3442 
3443 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3444 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3445 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3446 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3447 		break;
3448 	case CHIP_CAICOS:
3449 		rdev->config.evergreen.num_ses = 1;
3450 		rdev->config.evergreen.max_pipes = 2;
3451 		rdev->config.evergreen.max_tile_pipes = 2;
3452 		rdev->config.evergreen.max_simds = 2;
3453 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3454 		rdev->config.evergreen.max_gprs = 256;
3455 		rdev->config.evergreen.max_threads = 192;
3456 		rdev->config.evergreen.max_gs_threads = 16;
3457 		rdev->config.evergreen.max_stack_entries = 256;
3458 		rdev->config.evergreen.sx_num_of_sets = 4;
3459 		rdev->config.evergreen.sx_max_export_size = 128;
3460 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3461 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3462 		rdev->config.evergreen.max_hw_contexts = 4;
3463 		rdev->config.evergreen.sq_num_cf_insts = 1;
3464 
3465 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3466 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3467 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3468 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3469 		break;
3470 	}
3471 
3472 	/* Initialize HDP */
3473 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3474 		WREG32((0x2c14 + j), 0x00000000);
3475 		WREG32((0x2c18 + j), 0x00000000);
3476 		WREG32((0x2c1c + j), 0x00000000);
3477 		WREG32((0x2c20 + j), 0x00000000);
3478 		WREG32((0x2c24 + j), 0x00000000);
3479 	}
3480 
3481 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3482 	WREG32(SRBM_INT_CNTL, 0x1);
3483 	WREG32(SRBM_INT_ACK, 0x1);
3484 
3485 	evergreen_fix_pci_max_read_req_size(rdev);
3486 
3487 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3488 	if ((rdev->family == CHIP_PALM) ||
3489 	    (rdev->family == CHIP_SUMO) ||
3490 	    (rdev->family == CHIP_SUMO2))
3491 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3492 	else
3493 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3494 
3495 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3496 	 * not have bank info, so create a custom tiling dword.
3497 	 * bits 3:0   num_pipes
3498 	 * bits 7:4   num_banks
3499 	 * bits 11:8  group_size
3500 	 * bits 15:12 row_size
3501 	 */
3502 	rdev->config.evergreen.tile_config = 0;
3503 	switch (rdev->config.evergreen.max_tile_pipes) {
3504 	case 1:
3505 	default:
3506 		rdev->config.evergreen.tile_config |= (0 << 0);
3507 		break;
3508 	case 2:
3509 		rdev->config.evergreen.tile_config |= (1 << 0);
3510 		break;
3511 	case 4:
3512 		rdev->config.evergreen.tile_config |= (2 << 0);
3513 		break;
3514 	case 8:
3515 		rdev->config.evergreen.tile_config |= (3 << 0);
3516 		break;
3517 	}
3518 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3519 	if (rdev->flags & RADEON_IS_IGP)
3520 		rdev->config.evergreen.tile_config |= 1 << 4;
3521 	else {
3522 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3523 		case 0: /* four banks */
3524 			rdev->config.evergreen.tile_config |= 0 << 4;
3525 			break;
3526 		case 1: /* eight banks */
3527 			rdev->config.evergreen.tile_config |= 1 << 4;
3528 			break;
3529 		case 2: /* sixteen banks */
3530 		default:
3531 			rdev->config.evergreen.tile_config |= 2 << 4;
3532 			break;
3533 		}
3534 	}
3535 	rdev->config.evergreen.tile_config |= 0 << 8;
3536 	rdev->config.evergreen.tile_config |=
3537 		((gb_addr_config & 0x30000000) >> 28) << 12;
3538 
3539 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3540 		u32 efuse_straps_4;
3541 		u32 efuse_straps_3;
3542 
3543 		efuse_straps_4 = RREG32_RCU(0x204);
3544 		efuse_straps_3 = RREG32_RCU(0x203);
3545 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3546 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3547 	} else {
3548 		tmp = 0;
3549 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3550 			u32 rb_disable_bitmap;
3551 
3552 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3553 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3554 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3555 			tmp <<= 4;
3556 			tmp |= rb_disable_bitmap;
3557 		}
3558 	}
3559 	/* enabled rb are just the one not disabled :) */
3560 	disabled_rb_mask = tmp;
3561 	tmp = 0;
3562 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3563 		tmp |= (1 << i);
3564 	/* if all the backends are disabled, fix it up here */
3565 	if ((disabled_rb_mask & tmp) == tmp) {
3566 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3567 			disabled_rb_mask &= ~(1 << i);
3568 	}
3569 
3570 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3571 		u32 simd_disable_bitmap;
3572 
3573 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3574 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3575 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3576 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3577 		tmp <<= 16;
3578 		tmp |= simd_disable_bitmap;
3579 	}
3580 	rdev->config.evergreen.active_simds = hweight32(~tmp);
3581 
3582 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3583 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3584 
3585 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3586 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3587 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3588 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3589 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3590 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3591 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3592 
3593 	if ((rdev->config.evergreen.max_backends == 1) &&
3594 	    (rdev->flags & RADEON_IS_IGP)) {
3595 		if ((disabled_rb_mask & 3) == 1) {
3596 			/* RB0 disabled, RB1 enabled */
3597 			tmp = 0x11111111;
3598 		} else {
3599 			/* RB1 disabled, RB0 enabled */
3600 			tmp = 0x00000000;
3601 		}
3602 	} else {
3603 		tmp = gb_addr_config & NUM_PIPES_MASK;
3604 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3605 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3606 	}
3607 	WREG32(GB_BACKEND_MAP, tmp);
3608 
3609 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3610 	WREG32(CGTS_TCC_DISABLE, 0);
3611 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3612 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3613 
3614 	/* set HW defaults for 3D engine */
3615 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3616 				     ROQ_IB2_START(0x2b)));
3617 
3618 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3619 
3620 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3621 			     SYNC_GRADIENT |
3622 			     SYNC_WALKER |
3623 			     SYNC_ALIGNER));
3624 
3625 	sx_debug_1 = RREG32(SX_DEBUG_1);
3626 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3627 	WREG32(SX_DEBUG_1, sx_debug_1);
3628 
3629 
3630 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3631 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3632 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3633 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3634 
3635 	if (rdev->family <= CHIP_SUMO2)
3636 		WREG32(SMX_SAR_CTL0, 0x00010000);
3637 
3638 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3639 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3640 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3641 
3642 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3643 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3644 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3645 
3646 	WREG32(VGT_NUM_INSTANCES, 1);
3647 	WREG32(SPI_CONFIG_CNTL, 0);
3648 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3649 	WREG32(CP_PERFMON_CNTL, 0);
3650 
3651 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3652 				  FETCH_FIFO_HIWATER(0x4) |
3653 				  DONE_FIFO_HIWATER(0xe0) |
3654 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3655 
3656 	sq_config = RREG32(SQ_CONFIG);
3657 	sq_config &= ~(PS_PRIO(3) |
3658 		       VS_PRIO(3) |
3659 		       GS_PRIO(3) |
3660 		       ES_PRIO(3));
3661 	sq_config |= (VC_ENABLE |
3662 		      EXPORT_SRC_C |
3663 		      PS_PRIO(0) |
3664 		      VS_PRIO(1) |
3665 		      GS_PRIO(2) |
3666 		      ES_PRIO(3));
3667 
3668 	switch (rdev->family) {
3669 	case CHIP_CEDAR:
3670 	case CHIP_PALM:
3671 	case CHIP_SUMO:
3672 	case CHIP_SUMO2:
3673 	case CHIP_CAICOS:
3674 		/* no vertex cache */
3675 		sq_config &= ~VC_ENABLE;
3676 		break;
3677 	default:
3678 		break;
3679 	}
3680 
3681 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3682 
3683 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3684 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3685 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3686 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3687 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3688 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3689 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3690 
3691 	switch (rdev->family) {
3692 	case CHIP_CEDAR:
3693 	case CHIP_PALM:
3694 	case CHIP_SUMO:
3695 	case CHIP_SUMO2:
3696 		ps_thread_count = 96;
3697 		break;
3698 	default:
3699 		ps_thread_count = 128;
3700 		break;
3701 	}
3702 
3703 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3704 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3705 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3706 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3707 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3708 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3709 
3710 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3711 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3712 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3713 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3714 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3715 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3716 
3717 	WREG32(SQ_CONFIG, sq_config);
3718 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3719 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3720 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3721 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3722 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3723 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3724 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3725 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3726 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3727 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3728 
3729 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3730 					  FORCE_EOV_MAX_REZ_CNT(255)));
3731 
3732 	switch (rdev->family) {
3733 	case CHIP_CEDAR:
3734 	case CHIP_PALM:
3735 	case CHIP_SUMO:
3736 	case CHIP_SUMO2:
3737 	case CHIP_CAICOS:
3738 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3739 		break;
3740 	default:
3741 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3742 		break;
3743 	}
3744 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3745 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3746 
3747 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3748 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3749 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3750 
3751 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3752 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3753 
3754 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3755 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3756 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3757 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3758 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3759 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3760 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3761 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3762 
3763 	/* clear render buffer base addresses */
3764 	WREG32(CB_COLOR0_BASE, 0);
3765 	WREG32(CB_COLOR1_BASE, 0);
3766 	WREG32(CB_COLOR2_BASE, 0);
3767 	WREG32(CB_COLOR3_BASE, 0);
3768 	WREG32(CB_COLOR4_BASE, 0);
3769 	WREG32(CB_COLOR5_BASE, 0);
3770 	WREG32(CB_COLOR6_BASE, 0);
3771 	WREG32(CB_COLOR7_BASE, 0);
3772 	WREG32(CB_COLOR8_BASE, 0);
3773 	WREG32(CB_COLOR9_BASE, 0);
3774 	WREG32(CB_COLOR10_BASE, 0);
3775 	WREG32(CB_COLOR11_BASE, 0);
3776 
3777 	/* set the shader const cache sizes to 0 */
3778 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3779 		WREG32(i, 0);
3780 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3781 		WREG32(i, 0);
3782 
3783 	tmp = RREG32(HDP_MISC_CNTL);
3784 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3785 	WREG32(HDP_MISC_CNTL, tmp);
3786 
3787 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3788 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3789 
3790 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3791 
3792 	udelay(50);
3793 
3794 }
3795 
3796 int evergreen_mc_init(struct radeon_device *rdev)
3797 {
3798 	u32 tmp;
3799 	int chansize, numchan;
3800 
3801 	/* Get VRAM informations */
3802 	rdev->mc.vram_is_ddr = true;
3803 	if ((rdev->family == CHIP_PALM) ||
3804 	    (rdev->family == CHIP_SUMO) ||
3805 	    (rdev->family == CHIP_SUMO2))
3806 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3807 	else
3808 		tmp = RREG32(MC_ARB_RAMCFG);
3809 	if (tmp & CHANSIZE_OVERRIDE) {
3810 		chansize = 16;
3811 	} else if (tmp & CHANSIZE_MASK) {
3812 		chansize = 64;
3813 	} else {
3814 		chansize = 32;
3815 	}
3816 	tmp = RREG32(MC_SHARED_CHMAP);
3817 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3818 	case 0:
3819 	default:
3820 		numchan = 1;
3821 		break;
3822 	case 1:
3823 		numchan = 2;
3824 		break;
3825 	case 2:
3826 		numchan = 4;
3827 		break;
3828 	case 3:
3829 		numchan = 8;
3830 		break;
3831 	}
3832 	rdev->mc.vram_width = numchan * chansize;
3833 	/* Could aper size report 0 ? */
3834 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3835 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3836 	/* Setup GPU memory space */
3837 	if ((rdev->family == CHIP_PALM) ||
3838 	    (rdev->family == CHIP_SUMO) ||
3839 	    (rdev->family == CHIP_SUMO2)) {
3840 		/* size in bytes on fusion */
3841 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3842 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3843 	} else {
3844 		/* size in MB on evergreen/cayman/tn */
3845 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3846 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3847 	}
3848 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3849 	r700_vram_gtt_location(rdev, &rdev->mc);
3850 	radeon_update_bandwidth_info(rdev);
3851 
3852 	return 0;
3853 }
3854 
3855 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3856 {
3857 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3858 		RREG32(GRBM_STATUS));
3859 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3860 		RREG32(GRBM_STATUS_SE0));
3861 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3862 		RREG32(GRBM_STATUS_SE1));
3863 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3864 		RREG32(SRBM_STATUS));
3865 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3866 		RREG32(SRBM_STATUS2));
3867 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3868 		RREG32(CP_STALLED_STAT1));
3869 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3870 		RREG32(CP_STALLED_STAT2));
3871 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3872 		RREG32(CP_BUSY_STAT));
3873 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3874 		RREG32(CP_STAT));
3875 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3876 		RREG32(DMA_STATUS_REG));
3877 	if (rdev->family >= CHIP_CAYMAN) {
3878 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3879 			 RREG32(DMA_STATUS_REG + 0x800));
3880 	}
3881 }
3882 
3883 bool evergreen_is_display_hung(struct radeon_device *rdev)
3884 {
3885 	u32 crtc_hung = 0;
3886 	u32 crtc_status[6];
3887 	u32 i, j, tmp;
3888 
3889 	for (i = 0; i < rdev->num_crtc; i++) {
3890 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3891 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3892 			crtc_hung |= (1 << i);
3893 		}
3894 	}
3895 
3896 	for (j = 0; j < 10; j++) {
3897 		for (i = 0; i < rdev->num_crtc; i++) {
3898 			if (crtc_hung & (1 << i)) {
3899 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3900 				if (tmp != crtc_status[i])
3901 					crtc_hung &= ~(1 << i);
3902 			}
3903 		}
3904 		if (crtc_hung == 0)
3905 			return false;
3906 		udelay(100);
3907 	}
3908 
3909 	return true;
3910 }
3911 
3912 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3913 {
3914 	u32 reset_mask = 0;
3915 	u32 tmp;
3916 
3917 	/* GRBM_STATUS */
3918 	tmp = RREG32(GRBM_STATUS);
3919 	if (tmp & (PA_BUSY | SC_BUSY |
3920 		   SH_BUSY | SX_BUSY |
3921 		   TA_BUSY | VGT_BUSY |
3922 		   DB_BUSY | CB_BUSY |
3923 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3924 		reset_mask |= RADEON_RESET_GFX;
3925 
3926 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3927 		   CP_BUSY | CP_COHERENCY_BUSY))
3928 		reset_mask |= RADEON_RESET_CP;
3929 
3930 	if (tmp & GRBM_EE_BUSY)
3931 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3932 
3933 	/* DMA_STATUS_REG */
3934 	tmp = RREG32(DMA_STATUS_REG);
3935 	if (!(tmp & DMA_IDLE))
3936 		reset_mask |= RADEON_RESET_DMA;
3937 
3938 	/* SRBM_STATUS2 */
3939 	tmp = RREG32(SRBM_STATUS2);
3940 	if (tmp & DMA_BUSY)
3941 		reset_mask |= RADEON_RESET_DMA;
3942 
3943 	/* SRBM_STATUS */
3944 	tmp = RREG32(SRBM_STATUS);
3945 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3946 		reset_mask |= RADEON_RESET_RLC;
3947 
3948 	if (tmp & IH_BUSY)
3949 		reset_mask |= RADEON_RESET_IH;
3950 
3951 	if (tmp & SEM_BUSY)
3952 		reset_mask |= RADEON_RESET_SEM;
3953 
3954 	if (tmp & GRBM_RQ_PENDING)
3955 		reset_mask |= RADEON_RESET_GRBM;
3956 
3957 	if (tmp & VMC_BUSY)
3958 		reset_mask |= RADEON_RESET_VMC;
3959 
3960 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3961 		   MCC_BUSY | MCD_BUSY))
3962 		reset_mask |= RADEON_RESET_MC;
3963 
3964 	if (evergreen_is_display_hung(rdev))
3965 		reset_mask |= RADEON_RESET_DISPLAY;
3966 
3967 	/* VM_L2_STATUS */
3968 	tmp = RREG32(VM_L2_STATUS);
3969 	if (tmp & L2_BUSY)
3970 		reset_mask |= RADEON_RESET_VMC;
3971 
3972 	/* Skip MC reset as it's mostly likely not hung, just busy */
3973 	if (reset_mask & RADEON_RESET_MC) {
3974 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3975 		reset_mask &= ~RADEON_RESET_MC;
3976 	}
3977 
3978 	return reset_mask;
3979 }
3980 
3981 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3982 {
3983 	struct evergreen_mc_save save;
3984 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3985 	u32 tmp;
3986 
3987 	if (reset_mask == 0)
3988 		return;
3989 
3990 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3991 
3992 	evergreen_print_gpu_status_regs(rdev);
3993 
3994 	/* Disable CP parsing/prefetching */
3995 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3996 
3997 	if (reset_mask & RADEON_RESET_DMA) {
3998 		/* Disable DMA */
3999 		tmp = RREG32(DMA_RB_CNTL);
4000 		tmp &= ~DMA_RB_ENABLE;
4001 		WREG32(DMA_RB_CNTL, tmp);
4002 	}
4003 
4004 	udelay(50);
4005 
4006 	evergreen_mc_stop(rdev, &save);
4007 	if (evergreen_mc_wait_for_idle(rdev)) {
4008 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
4009 	}
4010 
4011 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
4012 		grbm_soft_reset |= SOFT_RESET_DB |
4013 			SOFT_RESET_CB |
4014 			SOFT_RESET_PA |
4015 			SOFT_RESET_SC |
4016 			SOFT_RESET_SPI |
4017 			SOFT_RESET_SX |
4018 			SOFT_RESET_SH |
4019 			SOFT_RESET_TC |
4020 			SOFT_RESET_TA |
4021 			SOFT_RESET_VC |
4022 			SOFT_RESET_VGT;
4023 	}
4024 
4025 	if (reset_mask & RADEON_RESET_CP) {
4026 		grbm_soft_reset |= SOFT_RESET_CP |
4027 			SOFT_RESET_VGT;
4028 
4029 		srbm_soft_reset |= SOFT_RESET_GRBM;
4030 	}
4031 
4032 	if (reset_mask & RADEON_RESET_DMA)
4033 		srbm_soft_reset |= SOFT_RESET_DMA;
4034 
4035 	if (reset_mask & RADEON_RESET_DISPLAY)
4036 		srbm_soft_reset |= SOFT_RESET_DC;
4037 
4038 	if (reset_mask & RADEON_RESET_RLC)
4039 		srbm_soft_reset |= SOFT_RESET_RLC;
4040 
4041 	if (reset_mask & RADEON_RESET_SEM)
4042 		srbm_soft_reset |= SOFT_RESET_SEM;
4043 
4044 	if (reset_mask & RADEON_RESET_IH)
4045 		srbm_soft_reset |= SOFT_RESET_IH;
4046 
4047 	if (reset_mask & RADEON_RESET_GRBM)
4048 		srbm_soft_reset |= SOFT_RESET_GRBM;
4049 
4050 	if (reset_mask & RADEON_RESET_VMC)
4051 		srbm_soft_reset |= SOFT_RESET_VMC;
4052 
4053 	if (!(rdev->flags & RADEON_IS_IGP)) {
4054 		if (reset_mask & RADEON_RESET_MC)
4055 			srbm_soft_reset |= SOFT_RESET_MC;
4056 	}
4057 
4058 	if (grbm_soft_reset) {
4059 		tmp = RREG32(GRBM_SOFT_RESET);
4060 		tmp |= grbm_soft_reset;
4061 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4062 		WREG32(GRBM_SOFT_RESET, tmp);
4063 		tmp = RREG32(GRBM_SOFT_RESET);
4064 
4065 		udelay(50);
4066 
4067 		tmp &= ~grbm_soft_reset;
4068 		WREG32(GRBM_SOFT_RESET, tmp);
4069 		tmp = RREG32(GRBM_SOFT_RESET);
4070 	}
4071 
4072 	if (srbm_soft_reset) {
4073 		tmp = RREG32(SRBM_SOFT_RESET);
4074 		tmp |= srbm_soft_reset;
4075 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4076 		WREG32(SRBM_SOFT_RESET, tmp);
4077 		tmp = RREG32(SRBM_SOFT_RESET);
4078 
4079 		udelay(50);
4080 
4081 		tmp &= ~srbm_soft_reset;
4082 		WREG32(SRBM_SOFT_RESET, tmp);
4083 		tmp = RREG32(SRBM_SOFT_RESET);
4084 	}
4085 
4086 	/* Wait a little for things to settle down */
4087 	udelay(50);
4088 
4089 	evergreen_mc_resume(rdev, &save);
4090 	udelay(50);
4091 
4092 	evergreen_print_gpu_status_regs(rdev);
4093 }
4094 
4095 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4096 {
4097 	struct evergreen_mc_save save;
4098 	u32 tmp, i;
4099 
4100 	dev_info(rdev->dev, "GPU pci config reset\n");
4101 
4102 	/* disable dpm? */
4103 
4104 	/* Disable CP parsing/prefetching */
4105 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4106 	udelay(50);
4107 	/* Disable DMA */
4108 	tmp = RREG32(DMA_RB_CNTL);
4109 	tmp &= ~DMA_RB_ENABLE;
4110 	WREG32(DMA_RB_CNTL, tmp);
4111 	/* XXX other engines? */
4112 
4113 	/* halt the rlc */
4114 	r600_rlc_stop(rdev);
4115 
4116 	udelay(50);
4117 
4118 	/* set mclk/sclk to bypass */
4119 	rv770_set_clk_bypass_mode(rdev);
4120 	/* disable BM */
4121 	pci_clear_master(rdev->pdev);
4122 	/* disable mem access */
4123 	evergreen_mc_stop(rdev, &save);
4124 	if (evergreen_mc_wait_for_idle(rdev)) {
4125 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4126 	}
4127 	/* reset */
4128 	radeon_pci_config_reset(rdev);
4129 	/* wait for asic to come out of reset */
4130 	for (i = 0; i < rdev->usec_timeout; i++) {
4131 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4132 			break;
4133 		udelay(1);
4134 	}
4135 }
4136 
4137 int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
4138 {
4139 	u32 reset_mask;
4140 
4141 	if (hard) {
4142 		evergreen_gpu_pci_config_reset(rdev);
4143 		return 0;
4144 	}
4145 
4146 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4147 
4148 	if (reset_mask)
4149 		r600_set_bios_scratch_engine_hung(rdev, true);
4150 
4151 	/* try soft reset */
4152 	evergreen_gpu_soft_reset(rdev, reset_mask);
4153 
4154 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4155 
4156 	/* try pci config reset */
4157 	if (reset_mask && radeon_hard_reset)
4158 		evergreen_gpu_pci_config_reset(rdev);
4159 
4160 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4161 
4162 	if (!reset_mask)
4163 		r600_set_bios_scratch_engine_hung(rdev, false);
4164 
4165 	return 0;
4166 }
4167 
4168 /**
4169  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4170  *
4171  * @rdev: radeon_device pointer
4172  * @ring: radeon_ring structure holding ring information
4173  *
4174  * Check if the GFX engine is locked up.
4175  * Returns true if the engine appears to be locked up, false if not.
4176  */
4177 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4178 {
4179 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4180 
4181 	if (!(reset_mask & (RADEON_RESET_GFX |
4182 			    RADEON_RESET_COMPUTE |
4183 			    RADEON_RESET_CP))) {
4184 		radeon_ring_lockup_update(rdev, ring);
4185 		return false;
4186 	}
4187 	return radeon_ring_test_lockup(rdev, ring);
4188 }
4189 
4190 /*
4191  * RLC
4192  */
4193 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4194 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
4195 
4196 void sumo_rlc_fini(struct radeon_device *rdev)
4197 {
4198 	int r;
4199 
4200 	/* save restore block */
4201 	if (rdev->rlc.save_restore_obj) {
4202 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4203 		if (unlikely(r != 0))
4204 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4205 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
4206 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4207 
4208 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
4209 		rdev->rlc.save_restore_obj = NULL;
4210 	}
4211 
4212 	/* clear state block */
4213 	if (rdev->rlc.clear_state_obj) {
4214 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4215 		if (unlikely(r != 0))
4216 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4217 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
4218 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4219 
4220 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
4221 		rdev->rlc.clear_state_obj = NULL;
4222 	}
4223 
4224 	/* clear state block */
4225 	if (rdev->rlc.cp_table_obj) {
4226 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4227 		if (unlikely(r != 0))
4228 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4229 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
4230 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4231 
4232 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4233 		rdev->rlc.cp_table_obj = NULL;
4234 	}
4235 }
4236 
4237 #define CP_ME_TABLE_SIZE    96
4238 
4239 int sumo_rlc_init(struct radeon_device *rdev)
4240 {
4241 	const u32 *src_ptr;
4242 	volatile u32 *dst_ptr;
4243 	u32 dws, data, i, j, k, reg_num;
4244 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4245 	u64 reg_list_mc_addr;
4246 	const struct cs_section_def *cs_data;
4247 	int r;
4248 
4249 	src_ptr = rdev->rlc.reg_list;
4250 	dws = rdev->rlc.reg_list_size;
4251 	if (rdev->family >= CHIP_BONAIRE) {
4252 		dws += (5 * 16) + 48 + 48 + 64;
4253 	}
4254 	cs_data = rdev->rlc.cs_data;
4255 
4256 	if (src_ptr) {
4257 		/* save restore block */
4258 		if (rdev->rlc.save_restore_obj == NULL) {
4259 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4260 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4261 					     NULL, &rdev->rlc.save_restore_obj);
4262 			if (r) {
4263 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4264 				return r;
4265 			}
4266 		}
4267 
4268 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4269 		if (unlikely(r != 0)) {
4270 			sumo_rlc_fini(rdev);
4271 			return r;
4272 		}
4273 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4274 				  &rdev->rlc.save_restore_gpu_addr);
4275 		if (r) {
4276 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4277 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4278 			sumo_rlc_fini(rdev);
4279 			return r;
4280 		}
4281 
4282 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4283 		if (r) {
4284 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4285 			sumo_rlc_fini(rdev);
4286 			return r;
4287 		}
4288 		/* write the sr buffer */
4289 		dst_ptr = rdev->rlc.sr_ptr;
4290 		if (rdev->family >= CHIP_TAHITI) {
4291 			/* SI */
4292 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4293 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4294 		} else {
4295 			/* ON/LN/TN */
4296 			/* format:
4297 			 * dw0: (reg2 << 16) | reg1
4298 			 * dw1: reg1 save space
4299 			 * dw2: reg2 save space
4300 			 */
4301 			for (i = 0; i < dws; i++) {
4302 				data = src_ptr[i] >> 2;
4303 				i++;
4304 				if (i < dws)
4305 					data |= (src_ptr[i] >> 2) << 16;
4306 				j = (((i - 1) * 3) / 2);
4307 				dst_ptr[j] = cpu_to_le32(data);
4308 			}
4309 			j = ((i * 3) / 2);
4310 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4311 		}
4312 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4313 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4314 	}
4315 
4316 	if (cs_data) {
4317 		/* clear state block */
4318 		if (rdev->family >= CHIP_BONAIRE) {
4319 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4320 		} else if (rdev->family >= CHIP_TAHITI) {
4321 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4322 			dws = rdev->rlc.clear_state_size + (256 / 4);
4323 		} else {
4324 			reg_list_num = 0;
4325 			dws = 0;
4326 			for (i = 0; cs_data[i].section != NULL; i++) {
4327 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4328 					reg_list_num++;
4329 					dws += cs_data[i].section[j].reg_count;
4330 				}
4331 			}
4332 			reg_list_blk_index = (3 * reg_list_num + 2);
4333 			dws += reg_list_blk_index;
4334 			rdev->rlc.clear_state_size = dws;
4335 		}
4336 
4337 		if (rdev->rlc.clear_state_obj == NULL) {
4338 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4339 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4340 					     NULL, &rdev->rlc.clear_state_obj);
4341 			if (r) {
4342 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4343 				sumo_rlc_fini(rdev);
4344 				return r;
4345 			}
4346 		}
4347 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4348 		if (unlikely(r != 0)) {
4349 			sumo_rlc_fini(rdev);
4350 			return r;
4351 		}
4352 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4353 				  &rdev->rlc.clear_state_gpu_addr);
4354 		if (r) {
4355 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4356 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4357 			sumo_rlc_fini(rdev);
4358 			return r;
4359 		}
4360 
4361 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4362 		if (r) {
4363 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4364 			sumo_rlc_fini(rdev);
4365 			return r;
4366 		}
4367 		/* set up the cs buffer */
4368 		dst_ptr = rdev->rlc.cs_ptr;
4369 		if (rdev->family >= CHIP_BONAIRE) {
4370 			cik_get_csb_buffer(rdev, dst_ptr);
4371 		} else if (rdev->family >= CHIP_TAHITI) {
4372 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4373 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4374 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4375 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4376 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4377 		} else {
4378 			reg_list_hdr_blk_index = 0;
4379 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4380 			data = upper_32_bits(reg_list_mc_addr);
4381 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4382 			reg_list_hdr_blk_index++;
4383 			for (i = 0; cs_data[i].section != NULL; i++) {
4384 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4385 					reg_num = cs_data[i].section[j].reg_count;
4386 					data = reg_list_mc_addr & 0xffffffff;
4387 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4388 					reg_list_hdr_blk_index++;
4389 
4390 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4391 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4392 					reg_list_hdr_blk_index++;
4393 
4394 					data = 0x08000000 | (reg_num * 4);
4395 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4396 					reg_list_hdr_blk_index++;
4397 
4398 					for (k = 0; k < reg_num; k++) {
4399 						data = cs_data[i].section[j].extent[k];
4400 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4401 					}
4402 					reg_list_mc_addr += reg_num * 4;
4403 					reg_list_blk_index += reg_num;
4404 				}
4405 			}
4406 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4407 		}
4408 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4409 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4410 	}
4411 
4412 	if (rdev->rlc.cp_table_size) {
4413 		if (rdev->rlc.cp_table_obj == NULL) {
4414 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4415 					     PAGE_SIZE, true,
4416 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4417 					     NULL, &rdev->rlc.cp_table_obj);
4418 			if (r) {
4419 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4420 				sumo_rlc_fini(rdev);
4421 				return r;
4422 			}
4423 		}
4424 
4425 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4426 		if (unlikely(r != 0)) {
4427 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4428 			sumo_rlc_fini(rdev);
4429 			return r;
4430 		}
4431 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4432 				  &rdev->rlc.cp_table_gpu_addr);
4433 		if (r) {
4434 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4435 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4436 			sumo_rlc_fini(rdev);
4437 			return r;
4438 		}
4439 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4440 		if (r) {
4441 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4442 			sumo_rlc_fini(rdev);
4443 			return r;
4444 		}
4445 
4446 		cik_init_cp_pg_table(rdev);
4447 
4448 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4449 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4450 
4451 	}
4452 
4453 	return 0;
4454 }
4455 
4456 static void evergreen_rlc_start(struct radeon_device *rdev)
4457 {
4458 	u32 mask = RLC_ENABLE;
4459 
4460 	if (rdev->flags & RADEON_IS_IGP) {
4461 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4462 	}
4463 
4464 	WREG32(RLC_CNTL, mask);
4465 }
4466 
4467 int evergreen_rlc_resume(struct radeon_device *rdev)
4468 {
4469 	u32 i;
4470 	const __be32 *fw_data;
4471 
4472 	if (!rdev->rlc_fw)
4473 		return -EINVAL;
4474 
4475 	r600_rlc_stop(rdev);
4476 
4477 	WREG32(RLC_HB_CNTL, 0);
4478 
4479 	if (rdev->flags & RADEON_IS_IGP) {
4480 		if (rdev->family == CHIP_ARUBA) {
4481 			u32 always_on_bitmap =
4482 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4483 			/* find out the number of active simds */
4484 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4485 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4486 			tmp = hweight32(~tmp);
4487 			if (tmp == rdev->config.cayman.max_simds_per_se) {
4488 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4489 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4490 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4491 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4492 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4493 			}
4494 		} else {
4495 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4496 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4497 		}
4498 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4499 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4500 	} else {
4501 		WREG32(RLC_HB_BASE, 0);
4502 		WREG32(RLC_HB_RPTR, 0);
4503 		WREG32(RLC_HB_WPTR, 0);
4504 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4505 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4506 	}
4507 	WREG32(RLC_MC_CNTL, 0);
4508 	WREG32(RLC_UCODE_CNTL, 0);
4509 
4510 	fw_data = (const __be32 *)rdev->rlc_fw->data;
4511 	if (rdev->family >= CHIP_ARUBA) {
4512 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4513 			WREG32(RLC_UCODE_ADDR, i);
4514 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4515 		}
4516 	} else if (rdev->family >= CHIP_CAYMAN) {
4517 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4518 			WREG32(RLC_UCODE_ADDR, i);
4519 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4520 		}
4521 	} else {
4522 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4523 			WREG32(RLC_UCODE_ADDR, i);
4524 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4525 		}
4526 	}
4527 	WREG32(RLC_UCODE_ADDR, 0);
4528 
4529 	evergreen_rlc_start(rdev);
4530 
4531 	return 0;
4532 }
4533 
4534 /* Interrupts */
4535 
4536 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4537 {
4538 	if (crtc >= rdev->num_crtc)
4539 		return 0;
4540 	else
4541 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4542 }
4543 
4544 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4545 {
4546 	u32 tmp;
4547 
4548 	if (rdev->family >= CHIP_CAYMAN) {
4549 		cayman_cp_int_cntl_setup(rdev, 0,
4550 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4551 		cayman_cp_int_cntl_setup(rdev, 1, 0);
4552 		cayman_cp_int_cntl_setup(rdev, 2, 0);
4553 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4554 		WREG32(CAYMAN_DMA1_CNTL, tmp);
4555 	} else
4556 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4557 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4558 	WREG32(DMA_CNTL, tmp);
4559 	WREG32(GRBM_INT_CNTL, 0);
4560 	WREG32(SRBM_INT_CNTL, 0);
4561 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4562 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4563 	if (rdev->num_crtc >= 4) {
4564 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4565 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4566 	}
4567 	if (rdev->num_crtc >= 6) {
4568 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4569 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4570 	}
4571 
4572 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4573 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4574 	if (rdev->num_crtc >= 4) {
4575 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4576 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4577 	}
4578 	if (rdev->num_crtc >= 6) {
4579 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4580 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4581 	}
4582 
4583 	/* only one DAC on DCE5 */
4584 	if (!ASIC_IS_DCE5(rdev))
4585 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4586 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4587 
4588 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4589 	WREG32(DC_HPD1_INT_CONTROL, tmp);
4590 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4591 	WREG32(DC_HPD2_INT_CONTROL, tmp);
4592 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4593 	WREG32(DC_HPD3_INT_CONTROL, tmp);
4594 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4595 	WREG32(DC_HPD4_INT_CONTROL, tmp);
4596 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4597 	WREG32(DC_HPD5_INT_CONTROL, tmp);
4598 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4599 	WREG32(DC_HPD6_INT_CONTROL, tmp);
4600 
4601 }
4602 
4603 int evergreen_irq_set(struct radeon_device *rdev)
4604 {
4605 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4606 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4607 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4608 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4609 	u32 grbm_int_cntl = 0;
4610 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4611 	u32 dma_cntl, dma_cntl1 = 0;
4612 	u32 thermal_int = 0;
4613 
4614 	if (!rdev->irq.installed) {
4615 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4616 		return -EINVAL;
4617 	}
4618 	/* don't enable anything if the ih is disabled */
4619 	if (!rdev->ih.enabled) {
4620 		r600_disable_interrupts(rdev);
4621 		/* force the active interrupt state to all disabled */
4622 		evergreen_disable_interrupt_state(rdev);
4623 		return 0;
4624 	}
4625 
4626 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4627 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4628 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4629 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4630 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4631 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4632 	if (rdev->family == CHIP_ARUBA)
4633 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4634 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4635 	else
4636 		thermal_int = RREG32(CG_THERMAL_INT) &
4637 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4638 
4639 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4640 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4641 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4642 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4643 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4644 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4645 
4646 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4647 
4648 	if (rdev->family >= CHIP_CAYMAN) {
4649 		/* enable CP interrupts on all rings */
4650 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4651 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4652 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4653 		}
4654 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4655 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4656 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4657 		}
4658 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4659 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4660 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4661 		}
4662 	} else {
4663 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4664 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4665 			cp_int_cntl |= RB_INT_ENABLE;
4666 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4667 		}
4668 	}
4669 
4670 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4671 		DRM_DEBUG("r600_irq_set: sw int dma\n");
4672 		dma_cntl |= TRAP_ENABLE;
4673 	}
4674 
4675 	if (rdev->family >= CHIP_CAYMAN) {
4676 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4677 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4678 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4679 			dma_cntl1 |= TRAP_ENABLE;
4680 		}
4681 	}
4682 
4683 	if (rdev->irq.dpm_thermal) {
4684 		DRM_DEBUG("dpm thermal\n");
4685 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4686 	}
4687 
4688 	if (rdev->irq.crtc_vblank_int[0] ||
4689 	    atomic_read(&rdev->irq.pflip[0])) {
4690 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4691 		crtc1 |= VBLANK_INT_MASK;
4692 	}
4693 	if (rdev->irq.crtc_vblank_int[1] ||
4694 	    atomic_read(&rdev->irq.pflip[1])) {
4695 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4696 		crtc2 |= VBLANK_INT_MASK;
4697 	}
4698 	if (rdev->irq.crtc_vblank_int[2] ||
4699 	    atomic_read(&rdev->irq.pflip[2])) {
4700 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4701 		crtc3 |= VBLANK_INT_MASK;
4702 	}
4703 	if (rdev->irq.crtc_vblank_int[3] ||
4704 	    atomic_read(&rdev->irq.pflip[3])) {
4705 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4706 		crtc4 |= VBLANK_INT_MASK;
4707 	}
4708 	if (rdev->irq.crtc_vblank_int[4] ||
4709 	    atomic_read(&rdev->irq.pflip[4])) {
4710 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4711 		crtc5 |= VBLANK_INT_MASK;
4712 	}
4713 	if (rdev->irq.crtc_vblank_int[5] ||
4714 	    atomic_read(&rdev->irq.pflip[5])) {
4715 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4716 		crtc6 |= VBLANK_INT_MASK;
4717 	}
4718 	if (rdev->irq.hpd[0]) {
4719 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4720 		hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4721 	}
4722 	if (rdev->irq.hpd[1]) {
4723 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4724 		hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4725 	}
4726 	if (rdev->irq.hpd[2]) {
4727 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4728 		hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4729 	}
4730 	if (rdev->irq.hpd[3]) {
4731 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4732 		hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4733 	}
4734 	if (rdev->irq.hpd[4]) {
4735 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4736 		hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4737 	}
4738 	if (rdev->irq.hpd[5]) {
4739 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4740 		hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4741 	}
4742 	if (rdev->irq.afmt[0]) {
4743 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4744 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4745 	}
4746 	if (rdev->irq.afmt[1]) {
4747 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4748 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4749 	}
4750 	if (rdev->irq.afmt[2]) {
4751 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4752 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4753 	}
4754 	if (rdev->irq.afmt[3]) {
4755 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4756 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4757 	}
4758 	if (rdev->irq.afmt[4]) {
4759 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4760 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4761 	}
4762 	if (rdev->irq.afmt[5]) {
4763 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4764 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4765 	}
4766 
4767 	if (rdev->family >= CHIP_CAYMAN) {
4768 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4769 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4770 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4771 	} else
4772 		WREG32(CP_INT_CNTL, cp_int_cntl);
4773 
4774 	WREG32(DMA_CNTL, dma_cntl);
4775 
4776 	if (rdev->family >= CHIP_CAYMAN)
4777 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4778 
4779 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4780 
4781 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4782 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4783 	if (rdev->num_crtc >= 4) {
4784 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4785 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4786 	}
4787 	if (rdev->num_crtc >= 6) {
4788 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4789 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4790 	}
4791 
4792 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4793 	       GRPH_PFLIP_INT_MASK);
4794 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4795 	       GRPH_PFLIP_INT_MASK);
4796 	if (rdev->num_crtc >= 4) {
4797 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4798 		       GRPH_PFLIP_INT_MASK);
4799 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4800 		       GRPH_PFLIP_INT_MASK);
4801 	}
4802 	if (rdev->num_crtc >= 6) {
4803 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4804 		       GRPH_PFLIP_INT_MASK);
4805 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4806 		       GRPH_PFLIP_INT_MASK);
4807 	}
4808 
4809 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4810 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4811 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4812 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4813 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4814 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4815 	if (rdev->family == CHIP_ARUBA)
4816 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4817 	else
4818 		WREG32(CG_THERMAL_INT, thermal_int);
4819 
4820 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4821 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4822 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4823 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4824 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4825 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4826 
4827 	/* posting read */
4828 	RREG32(SRBM_STATUS);
4829 
4830 	return 0;
4831 }
4832 
4833 static void evergreen_irq_ack(struct radeon_device *rdev)
4834 {
4835 	u32 tmp;
4836 
4837 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4838 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4839 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4840 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4841 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4842 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4843 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4844 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4845 	if (rdev->num_crtc >= 4) {
4846 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4847 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4848 	}
4849 	if (rdev->num_crtc >= 6) {
4850 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4851 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4852 	}
4853 
4854 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4855 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4856 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4857 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4858 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4859 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4860 
4861 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4862 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4863 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4864 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4865 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4866 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4867 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4868 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4869 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4870 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4871 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4872 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4873 
4874 	if (rdev->num_crtc >= 4) {
4875 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4876 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4877 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4878 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4879 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4880 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4881 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4882 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4883 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4884 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4885 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4886 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4887 	}
4888 
4889 	if (rdev->num_crtc >= 6) {
4890 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4891 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4892 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4893 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4894 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4895 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4896 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4897 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4898 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4899 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4900 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4901 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4902 	}
4903 
4904 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4905 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4906 		tmp |= DC_HPDx_INT_ACK;
4907 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4908 	}
4909 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4910 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4911 		tmp |= DC_HPDx_INT_ACK;
4912 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4913 	}
4914 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4915 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4916 		tmp |= DC_HPDx_INT_ACK;
4917 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4918 	}
4919 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4920 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4921 		tmp |= DC_HPDx_INT_ACK;
4922 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4923 	}
4924 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4925 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4926 		tmp |= DC_HPDx_INT_ACK;
4927 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4928 	}
4929 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4930 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4931 		tmp |= DC_HPDx_INT_ACK;
4932 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4933 	}
4934 
4935 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4936 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4937 		tmp |= DC_HPDx_RX_INT_ACK;
4938 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4939 	}
4940 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4941 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4942 		tmp |= DC_HPDx_RX_INT_ACK;
4943 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4944 	}
4945 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4946 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4947 		tmp |= DC_HPDx_RX_INT_ACK;
4948 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4949 	}
4950 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4951 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4952 		tmp |= DC_HPDx_RX_INT_ACK;
4953 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4954 	}
4955 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4956 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4957 		tmp |= DC_HPDx_RX_INT_ACK;
4958 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4959 	}
4960 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4961 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4962 		tmp |= DC_HPDx_RX_INT_ACK;
4963 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4964 	}
4965 
4966 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4967 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4968 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4969 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4970 	}
4971 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4972 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4973 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4974 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4975 	}
4976 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4977 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4978 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4979 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4980 	}
4981 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4982 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4983 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4984 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4985 	}
4986 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4987 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4988 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4989 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4990 	}
4991 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4992 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4993 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4994 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4995 	}
4996 }
4997 
4998 static void evergreen_irq_disable(struct radeon_device *rdev)
4999 {
5000 	r600_disable_interrupts(rdev);
5001 	/* Wait and acknowledge irq */
5002 	mdelay(1);
5003 	evergreen_irq_ack(rdev);
5004 	evergreen_disable_interrupt_state(rdev);
5005 }
5006 
5007 void evergreen_irq_suspend(struct radeon_device *rdev)
5008 {
5009 	evergreen_irq_disable(rdev);
5010 	r600_rlc_stop(rdev);
5011 }
5012 
5013 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
5014 {
5015 	u32 wptr, tmp;
5016 
5017 	if (rdev->wb.enabled)
5018 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5019 	else
5020 		wptr = RREG32(IH_RB_WPTR);
5021 
5022 	if (wptr & RB_OVERFLOW) {
5023 		wptr &= ~RB_OVERFLOW;
5024 		/* When a ring buffer overflow happen start parsing interrupt
5025 		 * from the last not overwritten vector (wptr + 16). Hopefully
5026 		 * this should allow us to catchup.
5027 		 */
5028 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5029 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5030 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5031 		tmp = RREG32(IH_RB_CNTL);
5032 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
5033 		WREG32(IH_RB_CNTL, tmp);
5034 	}
5035 	return (wptr & rdev->ih.ptr_mask);
5036 }
5037 
5038 int evergreen_irq_process(struct radeon_device *rdev)
5039 {
5040 	u32 wptr;
5041 	u32 rptr;
5042 	u32 src_id, src_data;
5043 	u32 ring_index;
5044 	bool queue_hotplug = false;
5045 	bool queue_hdmi = false;
5046 	bool queue_dp = false;
5047 	bool queue_thermal = false;
5048 	u32 status, addr;
5049 
5050 	if (!rdev->ih.enabled || rdev->shutdown)
5051 		return IRQ_NONE;
5052 
5053 	wptr = evergreen_get_ih_wptr(rdev);
5054 
5055 restart_ih:
5056 	/* is somebody else already processing irqs? */
5057 	if (atomic_xchg(&rdev->ih.lock, 1))
5058 		return IRQ_NONE;
5059 
5060 	rptr = rdev->ih.rptr;
5061 	DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5062 
5063 	/* Order reading of wptr vs. reading of IH ring data */
5064 	rmb();
5065 
5066 	/* display interrupts */
5067 	evergreen_irq_ack(rdev);
5068 
5069 	while (rptr != wptr) {
5070 		/* wptr/rptr are in bytes! */
5071 		ring_index = rptr / 4;
5072 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5073 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5074 
5075 		switch (src_id) {
5076 		case 1: /* D1 vblank/vline */
5077 			switch (src_data) {
5078 			case 0: /* D1 vblank */
5079 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5080 					DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5081 
5082 				if (rdev->irq.crtc_vblank_int[0]) {
5083 					drm_handle_vblank(rdev->ddev, 0);
5084 					rdev->pm.vblank_sync = true;
5085 					wake_up(&rdev->irq.vblank_queue);
5086 				}
5087 				if (atomic_read(&rdev->irq.pflip[0]))
5088 					radeon_crtc_handle_vblank(rdev, 0);
5089 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5090 				DRM_DEBUG("IH: D1 vblank\n");
5091 
5092 				break;
5093 			case 1: /* D1 vline */
5094 				if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5095 					DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5096 
5097 				rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5098 				DRM_DEBUG("IH: D1 vline\n");
5099 
5100 				break;
5101 			default:
5102 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5103 				break;
5104 			}
5105 			break;
5106 		case 2: /* D2 vblank/vline */
5107 			switch (src_data) {
5108 			case 0: /* D2 vblank */
5109 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5110 					DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5111 
5112 				if (rdev->irq.crtc_vblank_int[1]) {
5113 					drm_handle_vblank(rdev->ddev, 1);
5114 					rdev->pm.vblank_sync = true;
5115 					wake_up(&rdev->irq.vblank_queue);
5116 				}
5117 				if (atomic_read(&rdev->irq.pflip[1]))
5118 					radeon_crtc_handle_vblank(rdev, 1);
5119 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5120 				DRM_DEBUG("IH: D2 vblank\n");
5121 
5122 				break;
5123 			case 1: /* D2 vline */
5124 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5125 					DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5126 
5127 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5128 				DRM_DEBUG("IH: D2 vline\n");
5129 
5130 				break;
5131 			default:
5132 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5133 				break;
5134 			}
5135 			break;
5136 		case 3: /* D3 vblank/vline */
5137 			switch (src_data) {
5138 			case 0: /* D3 vblank */
5139 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5140 					DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5141 
5142 				if (rdev->irq.crtc_vblank_int[2]) {
5143 					drm_handle_vblank(rdev->ddev, 2);
5144 					rdev->pm.vblank_sync = true;
5145 					wake_up(&rdev->irq.vblank_queue);
5146 				}
5147 				if (atomic_read(&rdev->irq.pflip[2]))
5148 					radeon_crtc_handle_vblank(rdev, 2);
5149 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5150 				DRM_DEBUG("IH: D3 vblank\n");
5151 
5152 				break;
5153 			case 1: /* D3 vline */
5154 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5155 					DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5156 
5157 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5158 				DRM_DEBUG("IH: D3 vline\n");
5159 
5160 				break;
5161 			default:
5162 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5163 				break;
5164 			}
5165 			break;
5166 		case 4: /* D4 vblank/vline */
5167 			switch (src_data) {
5168 			case 0: /* D4 vblank */
5169 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5170 					DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5171 
5172 				if (rdev->irq.crtc_vblank_int[3]) {
5173 					drm_handle_vblank(rdev->ddev, 3);
5174 					rdev->pm.vblank_sync = true;
5175 					wake_up(&rdev->irq.vblank_queue);
5176 				}
5177 				if (atomic_read(&rdev->irq.pflip[3]))
5178 					radeon_crtc_handle_vblank(rdev, 3);
5179 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5180 				DRM_DEBUG("IH: D4 vblank\n");
5181 
5182 				break;
5183 			case 1: /* D4 vline */
5184 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5185 					DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5186 
5187 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5188 				DRM_DEBUG("IH: D4 vline\n");
5189 
5190 				break;
5191 			default:
5192 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5193 				break;
5194 			}
5195 			break;
5196 		case 5: /* D5 vblank/vline */
5197 			switch (src_data) {
5198 			case 0: /* D5 vblank */
5199 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5200 					DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5201 
5202 				if (rdev->irq.crtc_vblank_int[4]) {
5203 					drm_handle_vblank(rdev->ddev, 4);
5204 					rdev->pm.vblank_sync = true;
5205 					wake_up(&rdev->irq.vblank_queue);
5206 				}
5207 				if (atomic_read(&rdev->irq.pflip[4]))
5208 					radeon_crtc_handle_vblank(rdev, 4);
5209 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5210 				DRM_DEBUG("IH: D5 vblank\n");
5211 
5212 				break;
5213 			case 1: /* D5 vline */
5214 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5215 					DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5216 
5217 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5218 				DRM_DEBUG("IH: D5 vline\n");
5219 
5220 				break;
5221 			default:
5222 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5223 				break;
5224 			}
5225 			break;
5226 		case 6: /* D6 vblank/vline */
5227 			switch (src_data) {
5228 			case 0: /* D6 vblank */
5229 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5230 					DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5231 
5232 				if (rdev->irq.crtc_vblank_int[5]) {
5233 					drm_handle_vblank(rdev->ddev, 5);
5234 					rdev->pm.vblank_sync = true;
5235 					wake_up(&rdev->irq.vblank_queue);
5236 				}
5237 				if (atomic_read(&rdev->irq.pflip[5]))
5238 					radeon_crtc_handle_vblank(rdev, 5);
5239 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5240 				DRM_DEBUG("IH: D6 vblank\n");
5241 
5242 				break;
5243 			case 1: /* D6 vline */
5244 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5245 					DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5246 
5247 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5248 				DRM_DEBUG("IH: D6 vline\n");
5249 
5250 				break;
5251 			default:
5252 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5253 				break;
5254 			}
5255 			break;
5256 		case 8: /* D1 page flip */
5257 		case 10: /* D2 page flip */
5258 		case 12: /* D3 page flip */
5259 		case 14: /* D4 page flip */
5260 		case 16: /* D5 page flip */
5261 		case 18: /* D6 page flip */
5262 			DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5263 			if (radeon_use_pflipirq > 0)
5264 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5265 			break;
5266 		case 42: /* HPD hotplug */
5267 			switch (src_data) {
5268 			case 0:
5269 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5270 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5271 
5272 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5273 				queue_hotplug = true;
5274 				DRM_DEBUG("IH: HPD1\n");
5275 				break;
5276 			case 1:
5277 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5278 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5279 
5280 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5281 				queue_hotplug = true;
5282 				DRM_DEBUG("IH: HPD2\n");
5283 				break;
5284 			case 2:
5285 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5286 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5287 
5288 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5289 				queue_hotplug = true;
5290 				DRM_DEBUG("IH: HPD3\n");
5291 				break;
5292 			case 3:
5293 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5294 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5295 
5296 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5297 				queue_hotplug = true;
5298 				DRM_DEBUG("IH: HPD4\n");
5299 				break;
5300 			case 4:
5301 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5302 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5303 
5304 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5305 				queue_hotplug = true;
5306 				DRM_DEBUG("IH: HPD5\n");
5307 				break;
5308 			case 5:
5309 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5310 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5311 
5312 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5313 				queue_hotplug = true;
5314 				DRM_DEBUG("IH: HPD6\n");
5315 				break;
5316 			case 6:
5317 				if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5318 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5319 
5320 				rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5321 				queue_dp = true;
5322 				DRM_DEBUG("IH: HPD_RX 1\n");
5323 				break;
5324 			case 7:
5325 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5326 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5327 
5328 				rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5329 				queue_dp = true;
5330 				DRM_DEBUG("IH: HPD_RX 2\n");
5331 				break;
5332 			case 8:
5333 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5334 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5335 
5336 				rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5337 				queue_dp = true;
5338 				DRM_DEBUG("IH: HPD_RX 3\n");
5339 				break;
5340 			case 9:
5341 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5342 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5343 
5344 				rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5345 				queue_dp = true;
5346 				DRM_DEBUG("IH: HPD_RX 4\n");
5347 				break;
5348 			case 10:
5349 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5350 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5351 
5352 				rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5353 				queue_dp = true;
5354 				DRM_DEBUG("IH: HPD_RX 5\n");
5355 				break;
5356 			case 11:
5357 				if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5358 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5359 
5360 				rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5361 				queue_dp = true;
5362 				DRM_DEBUG("IH: HPD_RX 6\n");
5363 				break;
5364 			default:
5365 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5366 				break;
5367 			}
5368 			break;
5369 		case 44: /* hdmi */
5370 			switch (src_data) {
5371 			case 0:
5372 				if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5373 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5374 
5375 				rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5376 				queue_hdmi = true;
5377 				DRM_DEBUG("IH: HDMI0\n");
5378 				break;
5379 			case 1:
5380 				if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5381 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5382 
5383 				rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5384 				queue_hdmi = true;
5385 				DRM_DEBUG("IH: HDMI1\n");
5386 				break;
5387 			case 2:
5388 				if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5389 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5390 
5391 				rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5392 				queue_hdmi = true;
5393 				DRM_DEBUG("IH: HDMI2\n");
5394 				break;
5395 			case 3:
5396 				if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5397 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5398 
5399 				rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5400 				queue_hdmi = true;
5401 				DRM_DEBUG("IH: HDMI3\n");
5402 				break;
5403 			case 4:
5404 				if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5405 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5406 
5407 				rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5408 				queue_hdmi = true;
5409 				DRM_DEBUG("IH: HDMI4\n");
5410 				break;
5411 			case 5:
5412 				if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5413 					DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5414 
5415 				rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5416 				queue_hdmi = true;
5417 				DRM_DEBUG("IH: HDMI5\n");
5418 				break;
5419 			default:
5420 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5421 				break;
5422 			}
5423 		case 96:
5424 			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5425 			WREG32(SRBM_INT_ACK, 0x1);
5426 			break;
5427 		case 124: /* UVD */
5428 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5429 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5430 			break;
5431 		case 146:
5432 		case 147:
5433 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5434 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5435 			/* reset addr and status */
5436 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5437 			if (addr == 0x0 && status == 0x0)
5438 				break;
5439 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5440 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5441 				addr);
5442 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5443 				status);
5444 			cayman_vm_decode_fault(rdev, status, addr);
5445 			break;
5446 		case 176: /* CP_INT in ring buffer */
5447 		case 177: /* CP_INT in IB1 */
5448 		case 178: /* CP_INT in IB2 */
5449 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5450 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5451 			break;
5452 		case 181: /* CP EOP event */
5453 			DRM_DEBUG("IH: CP EOP\n");
5454 			if (rdev->family >= CHIP_CAYMAN) {
5455 				switch (src_data) {
5456 				case 0:
5457 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5458 					break;
5459 				case 1:
5460 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5461 					break;
5462 				case 2:
5463 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5464 					break;
5465 				}
5466 			} else
5467 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5468 			break;
5469 		case 224: /* DMA trap event */
5470 			DRM_DEBUG("IH: DMA trap\n");
5471 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5472 			break;
5473 		case 230: /* thermal low to high */
5474 			DRM_DEBUG("IH: thermal low to high\n");
5475 			rdev->pm.dpm.thermal.high_to_low = false;
5476 			queue_thermal = true;
5477 			break;
5478 		case 231: /* thermal high to low */
5479 			DRM_DEBUG("IH: thermal high to low\n");
5480 			rdev->pm.dpm.thermal.high_to_low = true;
5481 			queue_thermal = true;
5482 			break;
5483 		case 233: /* GUI IDLE */
5484 			DRM_DEBUG("IH: GUI idle\n");
5485 			break;
5486 		case 244: /* DMA trap event */
5487 			if (rdev->family >= CHIP_CAYMAN) {
5488 				DRM_DEBUG("IH: DMA1 trap\n");
5489 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5490 			}
5491 			break;
5492 		default:
5493 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5494 			break;
5495 		}
5496 
5497 		/* wptr/rptr are in bytes! */
5498 		rptr += 16;
5499 		rptr &= rdev->ih.ptr_mask;
5500 		WREG32(IH_RB_RPTR, rptr);
5501 	}
5502 	if (queue_dp)
5503 		schedule_work(&rdev->dp_work);
5504 	if (queue_hotplug)
5505 		schedule_delayed_work(&rdev->hotplug_work, 0);
5506 	if (queue_hdmi)
5507 		schedule_work(&rdev->audio_work);
5508 	if (queue_thermal && rdev->pm.dpm_enabled)
5509 		schedule_work(&rdev->pm.dpm.thermal.work);
5510 	rdev->ih.rptr = rptr;
5511 	atomic_set(&rdev->ih.lock, 0);
5512 
5513 	/* make sure wptr hasn't changed while processing */
5514 	wptr = evergreen_get_ih_wptr(rdev);
5515 	if (wptr != rptr)
5516 		goto restart_ih;
5517 
5518 	return IRQ_HANDLED;
5519 }
5520 
5521 static void evergreen_uvd_init(struct radeon_device *rdev)
5522 {
5523 	int r;
5524 
5525 	if (!rdev->has_uvd)
5526 		return;
5527 
5528 	r = radeon_uvd_init(rdev);
5529 	if (r) {
5530 		dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
5531 		/*
5532 		 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
5533 		 * to early fails uvd_v2_2_resume() and thus nothing happens
5534 		 * there. So it is pointless to try to go through that code
5535 		 * hence why we disable uvd here.
5536 		 */
5537 		rdev->has_uvd = 0;
5538 		return;
5539 	}
5540 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5541 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
5542 }
5543 
5544 static void evergreen_uvd_start(struct radeon_device *rdev)
5545 {
5546 	int r;
5547 
5548 	if (!rdev->has_uvd)
5549 		return;
5550 
5551 	r = uvd_v2_2_resume(rdev);
5552 	if (r) {
5553 		dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
5554 		goto error;
5555 	}
5556 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
5557 	if (r) {
5558 		dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
5559 		goto error;
5560 	}
5561 	return;
5562 
5563 error:
5564 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5565 }
5566 
5567 static void evergreen_uvd_resume(struct radeon_device *rdev)
5568 {
5569 	struct radeon_ring *ring;
5570 	int r;
5571 
5572 	if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
5573 		return;
5574 
5575 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5576 	r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
5577 	if (r) {
5578 		dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
5579 		return;
5580 	}
5581 	r = uvd_v1_0_init(rdev);
5582 	if (r) {
5583 		dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
5584 		return;
5585 	}
5586 }
5587 
5588 static int evergreen_startup(struct radeon_device *rdev)
5589 {
5590 	struct radeon_ring *ring;
5591 	int r;
5592 
5593 	/* enable pcie gen2 link */
5594 	evergreen_pcie_gen2_enable(rdev);
5595 	/* enable aspm */
5596 	evergreen_program_aspm(rdev);
5597 
5598 	/* scratch needs to be initialized before MC */
5599 	r = r600_vram_scratch_init(rdev);
5600 	if (r)
5601 		return r;
5602 
5603 	evergreen_mc_program(rdev);
5604 
5605 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5606 		r = ni_mc_load_microcode(rdev);
5607 		if (r) {
5608 			DRM_ERROR("Failed to load MC firmware!\n");
5609 			return r;
5610 		}
5611 	}
5612 
5613 	if (rdev->flags & RADEON_IS_AGP) {
5614 		evergreen_agp_enable(rdev);
5615 	} else {
5616 		r = evergreen_pcie_gart_enable(rdev);
5617 		if (r)
5618 			return r;
5619 	}
5620 	evergreen_gpu_init(rdev);
5621 
5622 	/* allocate rlc buffers */
5623 	if (rdev->flags & RADEON_IS_IGP) {
5624 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5625 		rdev->rlc.reg_list_size =
5626 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5627 		rdev->rlc.cs_data = evergreen_cs_data;
5628 		r = sumo_rlc_init(rdev);
5629 		if (r) {
5630 			DRM_ERROR("Failed to init rlc BOs!\n");
5631 			return r;
5632 		}
5633 	}
5634 
5635 	/* allocate wb buffer */
5636 	r = radeon_wb_init(rdev);
5637 	if (r)
5638 		return r;
5639 
5640 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5641 	if (r) {
5642 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5643 		return r;
5644 	}
5645 
5646 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5647 	if (r) {
5648 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5649 		return r;
5650 	}
5651 
5652 	evergreen_uvd_start(rdev);
5653 
5654 	/* Enable IRQ */
5655 	if (!rdev->irq.installed) {
5656 		r = radeon_irq_kms_init(rdev);
5657 		if (r)
5658 			return r;
5659 	}
5660 
5661 	r = r600_irq_init(rdev);
5662 	if (r) {
5663 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5664 		radeon_irq_kms_fini(rdev);
5665 		return r;
5666 	}
5667 	evergreen_irq_set(rdev);
5668 
5669 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5670 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5671 			     RADEON_CP_PACKET2);
5672 	if (r)
5673 		return r;
5674 
5675 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5676 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5677 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5678 	if (r)
5679 		return r;
5680 
5681 	r = evergreen_cp_load_microcode(rdev);
5682 	if (r)
5683 		return r;
5684 	r = evergreen_cp_resume(rdev);
5685 	if (r)
5686 		return r;
5687 	r = r600_dma_resume(rdev);
5688 	if (r)
5689 		return r;
5690 
5691 	evergreen_uvd_resume(rdev);
5692 
5693 	r = radeon_ib_pool_init(rdev);
5694 	if (r) {
5695 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5696 		return r;
5697 	}
5698 
5699 	r = radeon_audio_init(rdev);
5700 	if (r) {
5701 		DRM_ERROR("radeon: audio init failed\n");
5702 		return r;
5703 	}
5704 
5705 	return 0;
5706 }
5707 
5708 int evergreen_resume(struct radeon_device *rdev)
5709 {
5710 	int r;
5711 
5712 	/* reset the asic, the gfx blocks are often in a bad state
5713 	 * after the driver is unloaded or after a resume
5714 	 */
5715 	if (radeon_asic_reset(rdev))
5716 		dev_warn(rdev->dev, "GPU reset failed !\n");
5717 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5718 	 * posting will perform necessary task to bring back GPU into good
5719 	 * shape.
5720 	 */
5721 	/* post card */
5722 	atom_asic_init(rdev->mode_info.atom_context);
5723 
5724 	/* init golden registers */
5725 	evergreen_init_golden_registers(rdev);
5726 
5727 	if (rdev->pm.pm_method == PM_METHOD_DPM)
5728 		radeon_pm_resume(rdev);
5729 
5730 	rdev->accel_working = true;
5731 	r = evergreen_startup(rdev);
5732 	if (r) {
5733 		DRM_ERROR("evergreen startup failed on resume\n");
5734 		rdev->accel_working = false;
5735 		return r;
5736 	}
5737 
5738 	return r;
5739 
5740 }
5741 
5742 int evergreen_suspend(struct radeon_device *rdev)
5743 {
5744 	radeon_pm_suspend(rdev);
5745 	radeon_audio_fini(rdev);
5746 	if (rdev->has_uvd) {
5747 		uvd_v1_0_fini(rdev);
5748 		radeon_uvd_suspend(rdev);
5749 	}
5750 	r700_cp_stop(rdev);
5751 	r600_dma_stop(rdev);
5752 	evergreen_irq_suspend(rdev);
5753 	radeon_wb_disable(rdev);
5754 	evergreen_pcie_gart_disable(rdev);
5755 
5756 	return 0;
5757 }
5758 
5759 /* Plan is to move initialization in that function and use
5760  * helper function so that radeon_device_init pretty much
5761  * do nothing more than calling asic specific function. This
5762  * should also allow to remove a bunch of callback function
5763  * like vram_info.
5764  */
5765 int evergreen_init(struct radeon_device *rdev)
5766 {
5767 	int r;
5768 
5769 	/* Read BIOS */
5770 	if (!radeon_get_bios(rdev)) {
5771 		if (ASIC_IS_AVIVO(rdev))
5772 			return -EINVAL;
5773 	}
5774 	/* Must be an ATOMBIOS */
5775 	if (!rdev->is_atom_bios) {
5776 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5777 		return -EINVAL;
5778 	}
5779 	r = radeon_atombios_init(rdev);
5780 	if (r)
5781 		return r;
5782 	/* reset the asic, the gfx blocks are often in a bad state
5783 	 * after the driver is unloaded or after a resume
5784 	 */
5785 	if (radeon_asic_reset(rdev))
5786 		dev_warn(rdev->dev, "GPU reset failed !\n");
5787 	/* Post card if necessary */
5788 	if (!radeon_card_posted(rdev)) {
5789 		if (!rdev->bios) {
5790 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5791 			return -EINVAL;
5792 		}
5793 		DRM_INFO("GPU not posted. posting now...\n");
5794 		atom_asic_init(rdev->mode_info.atom_context);
5795 	}
5796 	/* init golden registers */
5797 	evergreen_init_golden_registers(rdev);
5798 	/* Initialize scratch registers */
5799 	r600_scratch_init(rdev);
5800 	/* Initialize surface registers */
5801 	radeon_surface_init(rdev);
5802 	/* Initialize clocks */
5803 	radeon_get_clock_info(rdev->ddev);
5804 	/* Fence driver */
5805 	r = radeon_fence_driver_init(rdev);
5806 	if (r)
5807 		return r;
5808 	/* initialize AGP */
5809 	if (rdev->flags & RADEON_IS_AGP) {
5810 		r = radeon_agp_init(rdev);
5811 		if (r)
5812 			radeon_agp_disable(rdev);
5813 	}
5814 	/* initialize memory controller */
5815 	r = evergreen_mc_init(rdev);
5816 	if (r)
5817 		return r;
5818 	/* Memory manager */
5819 	r = radeon_bo_init(rdev);
5820 	if (r)
5821 		return r;
5822 
5823 	if (ASIC_IS_DCE5(rdev)) {
5824 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5825 			r = ni_init_microcode(rdev);
5826 			if (r) {
5827 				DRM_ERROR("Failed to load firmware!\n");
5828 				return r;
5829 			}
5830 		}
5831 	} else {
5832 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5833 			r = r600_init_microcode(rdev);
5834 			if (r) {
5835 				DRM_ERROR("Failed to load firmware!\n");
5836 				return r;
5837 			}
5838 		}
5839 	}
5840 
5841 	/* Initialize power management */
5842 	radeon_pm_init(rdev);
5843 
5844 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5845 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5846 
5847 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5848 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5849 
5850 	evergreen_uvd_init(rdev);
5851 
5852 	rdev->ih.ring_obj = NULL;
5853 	r600_ih_ring_init(rdev, 64 * 1024);
5854 
5855 	r = r600_pcie_gart_init(rdev);
5856 	if (r)
5857 		return r;
5858 
5859 	rdev->accel_working = true;
5860 	r = evergreen_startup(rdev);
5861 	if (r) {
5862 		dev_err(rdev->dev, "disabling GPU acceleration\n");
5863 		r700_cp_fini(rdev);
5864 		r600_dma_fini(rdev);
5865 		r600_irq_fini(rdev);
5866 		if (rdev->flags & RADEON_IS_IGP)
5867 			sumo_rlc_fini(rdev);
5868 		radeon_wb_fini(rdev);
5869 		radeon_ib_pool_fini(rdev);
5870 		radeon_irq_kms_fini(rdev);
5871 		evergreen_pcie_gart_fini(rdev);
5872 		rdev->accel_working = false;
5873 	}
5874 
5875 	/* Don't start up if the MC ucode is missing on BTC parts.
5876 	 * The default clocks and voltages before the MC ucode
5877 	 * is loaded are not suffient for advanced operations.
5878 	 */
5879 	if (ASIC_IS_DCE5(rdev)) {
5880 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5881 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5882 			return -EINVAL;
5883 		}
5884 	}
5885 
5886 	return 0;
5887 }
5888 
5889 void evergreen_fini(struct radeon_device *rdev)
5890 {
5891 	radeon_pm_fini(rdev);
5892 	radeon_audio_fini(rdev);
5893 	r700_cp_fini(rdev);
5894 	r600_dma_fini(rdev);
5895 	r600_irq_fini(rdev);
5896 	if (rdev->flags & RADEON_IS_IGP)
5897 		sumo_rlc_fini(rdev);
5898 	radeon_wb_fini(rdev);
5899 	radeon_ib_pool_fini(rdev);
5900 	radeon_irq_kms_fini(rdev);
5901 	uvd_v1_0_fini(rdev);
5902 	radeon_uvd_fini(rdev);
5903 	evergreen_pcie_gart_fini(rdev);
5904 	r600_vram_scratch_fini(rdev);
5905 	radeon_gem_fini(rdev);
5906 	radeon_fence_driver_fini(rdev);
5907 	radeon_agp_fini(rdev);
5908 	radeon_bo_fini(rdev);
5909 	radeon_atombios_fini(rdev);
5910 	kfree(rdev->bios);
5911 	rdev->bios = NULL;
5912 }
5913 
5914 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5915 {
5916 	u32 link_width_cntl, speed_cntl;
5917 
5918 	if (radeon_pcie_gen2 == 0)
5919 		return;
5920 
5921 	if (rdev->flags & RADEON_IS_IGP)
5922 		return;
5923 
5924 	if (!(rdev->flags & RADEON_IS_PCIE))
5925 		return;
5926 
5927 	/* x2 cards have a special sequence */
5928 	if (ASIC_IS_X2(rdev))
5929 		return;
5930 
5931 	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5932 		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5933 		return;
5934 
5935 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5936 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5937 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5938 		return;
5939 	}
5940 
5941 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5942 
5943 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5944 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5945 
5946 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5947 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5948 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5949 
5950 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5951 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5952 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5953 
5954 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5955 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5956 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5957 
5958 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5959 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5960 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5961 
5962 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5963 		speed_cntl |= LC_GEN2_EN_STRAP;
5964 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5965 
5966 	} else {
5967 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5968 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5969 		if (1)
5970 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5971 		else
5972 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5973 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5974 	}
5975 }
5976 
5977 void evergreen_program_aspm(struct radeon_device *rdev)
5978 {
5979 	u32 data, orig;
5980 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5981 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5982 	/* fusion_platform = true
5983 	 * if the system is a fusion system
5984 	 * (APU or DGPU in a fusion system).
5985 	 * todo: check if the system is a fusion platform.
5986 	 */
5987 	bool fusion_platform = false;
5988 
5989 	if (radeon_aspm == 0)
5990 		return;
5991 
5992 	if (!(rdev->flags & RADEON_IS_PCIE))
5993 		return;
5994 
5995 	switch (rdev->family) {
5996 	case CHIP_CYPRESS:
5997 	case CHIP_HEMLOCK:
5998 	case CHIP_JUNIPER:
5999 	case CHIP_REDWOOD:
6000 	case CHIP_CEDAR:
6001 	case CHIP_SUMO:
6002 	case CHIP_SUMO2:
6003 	case CHIP_PALM:
6004 	case CHIP_ARUBA:
6005 		disable_l0s = true;
6006 		break;
6007 	default:
6008 		disable_l0s = false;
6009 		break;
6010 	}
6011 
6012 	if (rdev->flags & RADEON_IS_IGP)
6013 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
6014 
6015 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
6016 	if (fusion_platform)
6017 		data &= ~MULTI_PIF;
6018 	else
6019 		data |= MULTI_PIF;
6020 	if (data != orig)
6021 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
6022 
6023 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
6024 	if (fusion_platform)
6025 		data &= ~MULTI_PIF;
6026 	else
6027 		data |= MULTI_PIF;
6028 	if (data != orig)
6029 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
6030 
6031 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6032 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6033 	if (!disable_l0s) {
6034 		if (rdev->family >= CHIP_BARTS)
6035 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
6036 		else
6037 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
6038 	}
6039 
6040 	if (!disable_l1) {
6041 		if (rdev->family >= CHIP_BARTS)
6042 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
6043 		else
6044 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
6045 
6046 		if (!disable_plloff_in_l1) {
6047 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6048 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6049 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6050 			if (data != orig)
6051 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6052 
6053 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6054 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6055 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6056 			if (data != orig)
6057 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6058 
6059 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6060 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6061 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6062 			if (data != orig)
6063 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6064 
6065 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6066 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6067 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6068 			if (data != orig)
6069 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6070 
6071 			if (rdev->family >= CHIP_BARTS) {
6072 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6073 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6074 				data |= PLL_RAMP_UP_TIME_0(4);
6075 				if (data != orig)
6076 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6077 
6078 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6079 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6080 				data |= PLL_RAMP_UP_TIME_1(4);
6081 				if (data != orig)
6082 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6083 
6084 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6085 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
6086 				data |= PLL_RAMP_UP_TIME_0(4);
6087 				if (data != orig)
6088 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6089 
6090 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6091 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
6092 				data |= PLL_RAMP_UP_TIME_1(4);
6093 				if (data != orig)
6094 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6095 			}
6096 
6097 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6098 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6099 			data |= LC_DYN_LANES_PWR_STATE(3);
6100 			if (data != orig)
6101 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6102 
6103 			if (rdev->family >= CHIP_BARTS) {
6104 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6105 				data &= ~LS2_EXIT_TIME_MASK;
6106 				data |= LS2_EXIT_TIME(1);
6107 				if (data != orig)
6108 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6109 
6110 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6111 				data &= ~LS2_EXIT_TIME_MASK;
6112 				data |= LS2_EXIT_TIME(1);
6113 				if (data != orig)
6114 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6115 			}
6116 		}
6117 	}
6118 
6119 	/* evergreen parts only */
6120 	if (rdev->family < CHIP_BARTS)
6121 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6122 
6123 	if (pcie_lc_cntl != pcie_lc_cntl_old)
6124 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6125 }
6126