xref: /openbmc/linux/drivers/gpu/drm/radeon/evergreen.c (revision 7effbd18)
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 
25 #include <linux/firmware.h>
26 #include <linux/pci.h>
27 #include <linux/slab.h>
28 
29 #include <drm/drm_vblank.h>
30 #include <drm/radeon_drm.h>
31 #include <drm/drm_fourcc.h>
32 #include <drm/drm_framebuffer.h>
33 
34 #include "atom.h"
35 #include "avivod.h"
36 #include "cik.h"
37 #include "ni.h"
38 #include "rv770.h"
39 #include "evergreen.h"
40 #include "evergreen_blit_shaders.h"
41 #include "evergreen_reg.h"
42 #include "evergreend.h"
43 #include "radeon.h"
44 #include "radeon_asic.h"
45 #include "radeon_audio.h"
46 #include "radeon_ucode.h"
47 #include "si.h"
48 
49 #define DC_HPDx_CONTROL(x)        (DC_HPD1_CONTROL     + (x * 0xc))
50 #define DC_HPDx_INT_CONTROL(x)    (DC_HPD1_INT_CONTROL + (x * 0xc))
51 #define DC_HPDx_INT_STATUS_REG(x) (DC_HPD1_INT_STATUS  + (x * 0xc))
52 
53 /*
54  * Indirect registers accessor
55  */
56 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
57 {
58 	unsigned long flags;
59 	u32 r;
60 
61 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
62 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
63 	r = RREG32(EVERGREEN_CG_IND_DATA);
64 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
65 	return r;
66 }
67 
68 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
69 {
70 	unsigned long flags;
71 
72 	spin_lock_irqsave(&rdev->cg_idx_lock, flags);
73 	WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
74 	WREG32(EVERGREEN_CG_IND_DATA, (v));
75 	spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
76 }
77 
78 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
79 {
80 	unsigned long flags;
81 	u32 r;
82 
83 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
84 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
85 	r = RREG32(EVERGREEN_PIF_PHY0_DATA);
86 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
87 	return r;
88 }
89 
90 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
91 {
92 	unsigned long flags;
93 
94 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
95 	WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
96 	WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
97 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
98 }
99 
100 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
101 {
102 	unsigned long flags;
103 	u32 r;
104 
105 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
106 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
107 	r = RREG32(EVERGREEN_PIF_PHY1_DATA);
108 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
109 	return r;
110 }
111 
112 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
113 {
114 	unsigned long flags;
115 
116 	spin_lock_irqsave(&rdev->pif_idx_lock, flags);
117 	WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
118 	WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
119 	spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
120 }
121 
122 static const u32 crtc_offsets[6] =
123 {
124 	EVERGREEN_CRTC0_REGISTER_OFFSET,
125 	EVERGREEN_CRTC1_REGISTER_OFFSET,
126 	EVERGREEN_CRTC2_REGISTER_OFFSET,
127 	EVERGREEN_CRTC3_REGISTER_OFFSET,
128 	EVERGREEN_CRTC4_REGISTER_OFFSET,
129 	EVERGREEN_CRTC5_REGISTER_OFFSET
130 };
131 
132 #include "clearstate_evergreen.h"
133 
134 static const u32 sumo_rlc_save_restore_register_list[] =
135 {
136 	0x98fc,
137 	0x9830,
138 	0x9834,
139 	0x9838,
140 	0x9870,
141 	0x9874,
142 	0x8a14,
143 	0x8b24,
144 	0x8bcc,
145 	0x8b10,
146 	0x8d00,
147 	0x8d04,
148 	0x8c00,
149 	0x8c04,
150 	0x8c08,
151 	0x8c0c,
152 	0x8d8c,
153 	0x8c20,
154 	0x8c24,
155 	0x8c28,
156 	0x8c18,
157 	0x8c1c,
158 	0x8cf0,
159 	0x8e2c,
160 	0x8e38,
161 	0x8c30,
162 	0x9508,
163 	0x9688,
164 	0x9608,
165 	0x960c,
166 	0x9610,
167 	0x9614,
168 	0x88c4,
169 	0x88d4,
170 	0xa008,
171 	0x900c,
172 	0x9100,
173 	0x913c,
174 	0x98f8,
175 	0x98f4,
176 	0x9b7c,
177 	0x3f8c,
178 	0x8950,
179 	0x8954,
180 	0x8a18,
181 	0x8b28,
182 	0x9144,
183 	0x9148,
184 	0x914c,
185 	0x3f90,
186 	0x3f94,
187 	0x915c,
188 	0x9160,
189 	0x9178,
190 	0x917c,
191 	0x9180,
192 	0x918c,
193 	0x9190,
194 	0x9194,
195 	0x9198,
196 	0x919c,
197 	0x91a8,
198 	0x91ac,
199 	0x91b0,
200 	0x91b4,
201 	0x91b8,
202 	0x91c4,
203 	0x91c8,
204 	0x91cc,
205 	0x91d0,
206 	0x91d4,
207 	0x91e0,
208 	0x91e4,
209 	0x91ec,
210 	0x91f0,
211 	0x91f4,
212 	0x9200,
213 	0x9204,
214 	0x929c,
215 	0x9150,
216 	0x802c,
217 };
218 
219 static void evergreen_gpu_init(struct radeon_device *rdev);
220 void evergreen_fini(struct radeon_device *rdev);
221 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
222 void evergreen_program_aspm(struct radeon_device *rdev);
223 
224 static const u32 evergreen_golden_registers[] =
225 {
226 	0x3f90, 0xffff0000, 0xff000000,
227 	0x9148, 0xffff0000, 0xff000000,
228 	0x3f94, 0xffff0000, 0xff000000,
229 	0x914c, 0xffff0000, 0xff000000,
230 	0x9b7c, 0xffffffff, 0x00000000,
231 	0x8a14, 0xffffffff, 0x00000007,
232 	0x8b10, 0xffffffff, 0x00000000,
233 	0x960c, 0xffffffff, 0x54763210,
234 	0x88c4, 0xffffffff, 0x000000c2,
235 	0x88d4, 0xffffffff, 0x00000010,
236 	0x8974, 0xffffffff, 0x00000000,
237 	0xc78, 0x00000080, 0x00000080,
238 	0x5eb4, 0xffffffff, 0x00000002,
239 	0x5e78, 0xffffffff, 0x001000f0,
240 	0x6104, 0x01000300, 0x00000000,
241 	0x5bc0, 0x00300000, 0x00000000,
242 	0x7030, 0xffffffff, 0x00000011,
243 	0x7c30, 0xffffffff, 0x00000011,
244 	0x10830, 0xffffffff, 0x00000011,
245 	0x11430, 0xffffffff, 0x00000011,
246 	0x12030, 0xffffffff, 0x00000011,
247 	0x12c30, 0xffffffff, 0x00000011,
248 	0xd02c, 0xffffffff, 0x08421000,
249 	0x240c, 0xffffffff, 0x00000380,
250 	0x8b24, 0xffffffff, 0x00ff0fff,
251 	0x28a4c, 0x06000000, 0x06000000,
252 	0x10c, 0x00000001, 0x00000001,
253 	0x8d00, 0xffffffff, 0x100e4848,
254 	0x8d04, 0xffffffff, 0x00164745,
255 	0x8c00, 0xffffffff, 0xe4000003,
256 	0x8c04, 0xffffffff, 0x40600060,
257 	0x8c08, 0xffffffff, 0x001c001c,
258 	0x8cf0, 0xffffffff, 0x08e00620,
259 	0x8c20, 0xffffffff, 0x00800080,
260 	0x8c24, 0xffffffff, 0x00800080,
261 	0x8c18, 0xffffffff, 0x20202078,
262 	0x8c1c, 0xffffffff, 0x00001010,
263 	0x28350, 0xffffffff, 0x00000000,
264 	0xa008, 0xffffffff, 0x00010000,
265 	0x5c4, 0xffffffff, 0x00000001,
266 	0x9508, 0xffffffff, 0x00000002,
267 	0x913c, 0x0000000f, 0x0000000a
268 };
269 
270 static const u32 evergreen_golden_registers2[] =
271 {
272 	0x2f4c, 0xffffffff, 0x00000000,
273 	0x54f4, 0xffffffff, 0x00000000,
274 	0x54f0, 0xffffffff, 0x00000000,
275 	0x5498, 0xffffffff, 0x00000000,
276 	0x549c, 0xffffffff, 0x00000000,
277 	0x5494, 0xffffffff, 0x00000000,
278 	0x53cc, 0xffffffff, 0x00000000,
279 	0x53c8, 0xffffffff, 0x00000000,
280 	0x53c4, 0xffffffff, 0x00000000,
281 	0x53c0, 0xffffffff, 0x00000000,
282 	0x53bc, 0xffffffff, 0x00000000,
283 	0x53b8, 0xffffffff, 0x00000000,
284 	0x53b4, 0xffffffff, 0x00000000,
285 	0x53b0, 0xffffffff, 0x00000000
286 };
287 
288 static const u32 cypress_mgcg_init[] =
289 {
290 	0x802c, 0xffffffff, 0xc0000000,
291 	0x5448, 0xffffffff, 0x00000100,
292 	0x55e4, 0xffffffff, 0x00000100,
293 	0x160c, 0xffffffff, 0x00000100,
294 	0x5644, 0xffffffff, 0x00000100,
295 	0xc164, 0xffffffff, 0x00000100,
296 	0x8a18, 0xffffffff, 0x00000100,
297 	0x897c, 0xffffffff, 0x06000100,
298 	0x8b28, 0xffffffff, 0x00000100,
299 	0x9144, 0xffffffff, 0x00000100,
300 	0x9a60, 0xffffffff, 0x00000100,
301 	0x9868, 0xffffffff, 0x00000100,
302 	0x8d58, 0xffffffff, 0x00000100,
303 	0x9510, 0xffffffff, 0x00000100,
304 	0x949c, 0xffffffff, 0x00000100,
305 	0x9654, 0xffffffff, 0x00000100,
306 	0x9030, 0xffffffff, 0x00000100,
307 	0x9034, 0xffffffff, 0x00000100,
308 	0x9038, 0xffffffff, 0x00000100,
309 	0x903c, 0xffffffff, 0x00000100,
310 	0x9040, 0xffffffff, 0x00000100,
311 	0xa200, 0xffffffff, 0x00000100,
312 	0xa204, 0xffffffff, 0x00000100,
313 	0xa208, 0xffffffff, 0x00000100,
314 	0xa20c, 0xffffffff, 0x00000100,
315 	0x971c, 0xffffffff, 0x00000100,
316 	0x977c, 0xffffffff, 0x00000100,
317 	0x3f80, 0xffffffff, 0x00000100,
318 	0xa210, 0xffffffff, 0x00000100,
319 	0xa214, 0xffffffff, 0x00000100,
320 	0x4d8, 0xffffffff, 0x00000100,
321 	0x9784, 0xffffffff, 0x00000100,
322 	0x9698, 0xffffffff, 0x00000100,
323 	0x4d4, 0xffffffff, 0x00000200,
324 	0x30cc, 0xffffffff, 0x00000100,
325 	0xd0c0, 0xffffffff, 0xff000100,
326 	0x802c, 0xffffffff, 0x40000000,
327 	0x915c, 0xffffffff, 0x00010000,
328 	0x9160, 0xffffffff, 0x00030002,
329 	0x9178, 0xffffffff, 0x00070000,
330 	0x917c, 0xffffffff, 0x00030002,
331 	0x9180, 0xffffffff, 0x00050004,
332 	0x918c, 0xffffffff, 0x00010006,
333 	0x9190, 0xffffffff, 0x00090008,
334 	0x9194, 0xffffffff, 0x00070000,
335 	0x9198, 0xffffffff, 0x00030002,
336 	0x919c, 0xffffffff, 0x00050004,
337 	0x91a8, 0xffffffff, 0x00010006,
338 	0x91ac, 0xffffffff, 0x00090008,
339 	0x91b0, 0xffffffff, 0x00070000,
340 	0x91b4, 0xffffffff, 0x00030002,
341 	0x91b8, 0xffffffff, 0x00050004,
342 	0x91c4, 0xffffffff, 0x00010006,
343 	0x91c8, 0xffffffff, 0x00090008,
344 	0x91cc, 0xffffffff, 0x00070000,
345 	0x91d0, 0xffffffff, 0x00030002,
346 	0x91d4, 0xffffffff, 0x00050004,
347 	0x91e0, 0xffffffff, 0x00010006,
348 	0x91e4, 0xffffffff, 0x00090008,
349 	0x91e8, 0xffffffff, 0x00000000,
350 	0x91ec, 0xffffffff, 0x00070000,
351 	0x91f0, 0xffffffff, 0x00030002,
352 	0x91f4, 0xffffffff, 0x00050004,
353 	0x9200, 0xffffffff, 0x00010006,
354 	0x9204, 0xffffffff, 0x00090008,
355 	0x9208, 0xffffffff, 0x00070000,
356 	0x920c, 0xffffffff, 0x00030002,
357 	0x9210, 0xffffffff, 0x00050004,
358 	0x921c, 0xffffffff, 0x00010006,
359 	0x9220, 0xffffffff, 0x00090008,
360 	0x9224, 0xffffffff, 0x00070000,
361 	0x9228, 0xffffffff, 0x00030002,
362 	0x922c, 0xffffffff, 0x00050004,
363 	0x9238, 0xffffffff, 0x00010006,
364 	0x923c, 0xffffffff, 0x00090008,
365 	0x9240, 0xffffffff, 0x00070000,
366 	0x9244, 0xffffffff, 0x00030002,
367 	0x9248, 0xffffffff, 0x00050004,
368 	0x9254, 0xffffffff, 0x00010006,
369 	0x9258, 0xffffffff, 0x00090008,
370 	0x925c, 0xffffffff, 0x00070000,
371 	0x9260, 0xffffffff, 0x00030002,
372 	0x9264, 0xffffffff, 0x00050004,
373 	0x9270, 0xffffffff, 0x00010006,
374 	0x9274, 0xffffffff, 0x00090008,
375 	0x9278, 0xffffffff, 0x00070000,
376 	0x927c, 0xffffffff, 0x00030002,
377 	0x9280, 0xffffffff, 0x00050004,
378 	0x928c, 0xffffffff, 0x00010006,
379 	0x9290, 0xffffffff, 0x00090008,
380 	0x9294, 0xffffffff, 0x00000000,
381 	0x929c, 0xffffffff, 0x00000001,
382 	0x802c, 0xffffffff, 0x40010000,
383 	0x915c, 0xffffffff, 0x00010000,
384 	0x9160, 0xffffffff, 0x00030002,
385 	0x9178, 0xffffffff, 0x00070000,
386 	0x917c, 0xffffffff, 0x00030002,
387 	0x9180, 0xffffffff, 0x00050004,
388 	0x918c, 0xffffffff, 0x00010006,
389 	0x9190, 0xffffffff, 0x00090008,
390 	0x9194, 0xffffffff, 0x00070000,
391 	0x9198, 0xffffffff, 0x00030002,
392 	0x919c, 0xffffffff, 0x00050004,
393 	0x91a8, 0xffffffff, 0x00010006,
394 	0x91ac, 0xffffffff, 0x00090008,
395 	0x91b0, 0xffffffff, 0x00070000,
396 	0x91b4, 0xffffffff, 0x00030002,
397 	0x91b8, 0xffffffff, 0x00050004,
398 	0x91c4, 0xffffffff, 0x00010006,
399 	0x91c8, 0xffffffff, 0x00090008,
400 	0x91cc, 0xffffffff, 0x00070000,
401 	0x91d0, 0xffffffff, 0x00030002,
402 	0x91d4, 0xffffffff, 0x00050004,
403 	0x91e0, 0xffffffff, 0x00010006,
404 	0x91e4, 0xffffffff, 0x00090008,
405 	0x91e8, 0xffffffff, 0x00000000,
406 	0x91ec, 0xffffffff, 0x00070000,
407 	0x91f0, 0xffffffff, 0x00030002,
408 	0x91f4, 0xffffffff, 0x00050004,
409 	0x9200, 0xffffffff, 0x00010006,
410 	0x9204, 0xffffffff, 0x00090008,
411 	0x9208, 0xffffffff, 0x00070000,
412 	0x920c, 0xffffffff, 0x00030002,
413 	0x9210, 0xffffffff, 0x00050004,
414 	0x921c, 0xffffffff, 0x00010006,
415 	0x9220, 0xffffffff, 0x00090008,
416 	0x9224, 0xffffffff, 0x00070000,
417 	0x9228, 0xffffffff, 0x00030002,
418 	0x922c, 0xffffffff, 0x00050004,
419 	0x9238, 0xffffffff, 0x00010006,
420 	0x923c, 0xffffffff, 0x00090008,
421 	0x9240, 0xffffffff, 0x00070000,
422 	0x9244, 0xffffffff, 0x00030002,
423 	0x9248, 0xffffffff, 0x00050004,
424 	0x9254, 0xffffffff, 0x00010006,
425 	0x9258, 0xffffffff, 0x00090008,
426 	0x925c, 0xffffffff, 0x00070000,
427 	0x9260, 0xffffffff, 0x00030002,
428 	0x9264, 0xffffffff, 0x00050004,
429 	0x9270, 0xffffffff, 0x00010006,
430 	0x9274, 0xffffffff, 0x00090008,
431 	0x9278, 0xffffffff, 0x00070000,
432 	0x927c, 0xffffffff, 0x00030002,
433 	0x9280, 0xffffffff, 0x00050004,
434 	0x928c, 0xffffffff, 0x00010006,
435 	0x9290, 0xffffffff, 0x00090008,
436 	0x9294, 0xffffffff, 0x00000000,
437 	0x929c, 0xffffffff, 0x00000001,
438 	0x802c, 0xffffffff, 0xc0000000
439 };
440 
441 static const u32 redwood_mgcg_init[] =
442 {
443 	0x802c, 0xffffffff, 0xc0000000,
444 	0x5448, 0xffffffff, 0x00000100,
445 	0x55e4, 0xffffffff, 0x00000100,
446 	0x160c, 0xffffffff, 0x00000100,
447 	0x5644, 0xffffffff, 0x00000100,
448 	0xc164, 0xffffffff, 0x00000100,
449 	0x8a18, 0xffffffff, 0x00000100,
450 	0x897c, 0xffffffff, 0x06000100,
451 	0x8b28, 0xffffffff, 0x00000100,
452 	0x9144, 0xffffffff, 0x00000100,
453 	0x9a60, 0xffffffff, 0x00000100,
454 	0x9868, 0xffffffff, 0x00000100,
455 	0x8d58, 0xffffffff, 0x00000100,
456 	0x9510, 0xffffffff, 0x00000100,
457 	0x949c, 0xffffffff, 0x00000100,
458 	0x9654, 0xffffffff, 0x00000100,
459 	0x9030, 0xffffffff, 0x00000100,
460 	0x9034, 0xffffffff, 0x00000100,
461 	0x9038, 0xffffffff, 0x00000100,
462 	0x903c, 0xffffffff, 0x00000100,
463 	0x9040, 0xffffffff, 0x00000100,
464 	0xa200, 0xffffffff, 0x00000100,
465 	0xa204, 0xffffffff, 0x00000100,
466 	0xa208, 0xffffffff, 0x00000100,
467 	0xa20c, 0xffffffff, 0x00000100,
468 	0x971c, 0xffffffff, 0x00000100,
469 	0x977c, 0xffffffff, 0x00000100,
470 	0x3f80, 0xffffffff, 0x00000100,
471 	0xa210, 0xffffffff, 0x00000100,
472 	0xa214, 0xffffffff, 0x00000100,
473 	0x4d8, 0xffffffff, 0x00000100,
474 	0x9784, 0xffffffff, 0x00000100,
475 	0x9698, 0xffffffff, 0x00000100,
476 	0x4d4, 0xffffffff, 0x00000200,
477 	0x30cc, 0xffffffff, 0x00000100,
478 	0xd0c0, 0xffffffff, 0xff000100,
479 	0x802c, 0xffffffff, 0x40000000,
480 	0x915c, 0xffffffff, 0x00010000,
481 	0x9160, 0xffffffff, 0x00030002,
482 	0x9178, 0xffffffff, 0x00070000,
483 	0x917c, 0xffffffff, 0x00030002,
484 	0x9180, 0xffffffff, 0x00050004,
485 	0x918c, 0xffffffff, 0x00010006,
486 	0x9190, 0xffffffff, 0x00090008,
487 	0x9194, 0xffffffff, 0x00070000,
488 	0x9198, 0xffffffff, 0x00030002,
489 	0x919c, 0xffffffff, 0x00050004,
490 	0x91a8, 0xffffffff, 0x00010006,
491 	0x91ac, 0xffffffff, 0x00090008,
492 	0x91b0, 0xffffffff, 0x00070000,
493 	0x91b4, 0xffffffff, 0x00030002,
494 	0x91b8, 0xffffffff, 0x00050004,
495 	0x91c4, 0xffffffff, 0x00010006,
496 	0x91c8, 0xffffffff, 0x00090008,
497 	0x91cc, 0xffffffff, 0x00070000,
498 	0x91d0, 0xffffffff, 0x00030002,
499 	0x91d4, 0xffffffff, 0x00050004,
500 	0x91e0, 0xffffffff, 0x00010006,
501 	0x91e4, 0xffffffff, 0x00090008,
502 	0x91e8, 0xffffffff, 0x00000000,
503 	0x91ec, 0xffffffff, 0x00070000,
504 	0x91f0, 0xffffffff, 0x00030002,
505 	0x91f4, 0xffffffff, 0x00050004,
506 	0x9200, 0xffffffff, 0x00010006,
507 	0x9204, 0xffffffff, 0x00090008,
508 	0x9294, 0xffffffff, 0x00000000,
509 	0x929c, 0xffffffff, 0x00000001,
510 	0x802c, 0xffffffff, 0xc0000000
511 };
512 
513 static const u32 cedar_golden_registers[] =
514 {
515 	0x3f90, 0xffff0000, 0xff000000,
516 	0x9148, 0xffff0000, 0xff000000,
517 	0x3f94, 0xffff0000, 0xff000000,
518 	0x914c, 0xffff0000, 0xff000000,
519 	0x9b7c, 0xffffffff, 0x00000000,
520 	0x8a14, 0xffffffff, 0x00000007,
521 	0x8b10, 0xffffffff, 0x00000000,
522 	0x960c, 0xffffffff, 0x54763210,
523 	0x88c4, 0xffffffff, 0x000000c2,
524 	0x88d4, 0xffffffff, 0x00000000,
525 	0x8974, 0xffffffff, 0x00000000,
526 	0xc78, 0x00000080, 0x00000080,
527 	0x5eb4, 0xffffffff, 0x00000002,
528 	0x5e78, 0xffffffff, 0x001000f0,
529 	0x6104, 0x01000300, 0x00000000,
530 	0x5bc0, 0x00300000, 0x00000000,
531 	0x7030, 0xffffffff, 0x00000011,
532 	0x7c30, 0xffffffff, 0x00000011,
533 	0x10830, 0xffffffff, 0x00000011,
534 	0x11430, 0xffffffff, 0x00000011,
535 	0xd02c, 0xffffffff, 0x08421000,
536 	0x240c, 0xffffffff, 0x00000380,
537 	0x8b24, 0xffffffff, 0x00ff0fff,
538 	0x28a4c, 0x06000000, 0x06000000,
539 	0x10c, 0x00000001, 0x00000001,
540 	0x8d00, 0xffffffff, 0x100e4848,
541 	0x8d04, 0xffffffff, 0x00164745,
542 	0x8c00, 0xffffffff, 0xe4000003,
543 	0x8c04, 0xffffffff, 0x40600060,
544 	0x8c08, 0xffffffff, 0x001c001c,
545 	0x8cf0, 0xffffffff, 0x08e00410,
546 	0x8c20, 0xffffffff, 0x00800080,
547 	0x8c24, 0xffffffff, 0x00800080,
548 	0x8c18, 0xffffffff, 0x20202078,
549 	0x8c1c, 0xffffffff, 0x00001010,
550 	0x28350, 0xffffffff, 0x00000000,
551 	0xa008, 0xffffffff, 0x00010000,
552 	0x5c4, 0xffffffff, 0x00000001,
553 	0x9508, 0xffffffff, 0x00000002
554 };
555 
556 static const u32 cedar_mgcg_init[] =
557 {
558 	0x802c, 0xffffffff, 0xc0000000,
559 	0x5448, 0xffffffff, 0x00000100,
560 	0x55e4, 0xffffffff, 0x00000100,
561 	0x160c, 0xffffffff, 0x00000100,
562 	0x5644, 0xffffffff, 0x00000100,
563 	0xc164, 0xffffffff, 0x00000100,
564 	0x8a18, 0xffffffff, 0x00000100,
565 	0x897c, 0xffffffff, 0x06000100,
566 	0x8b28, 0xffffffff, 0x00000100,
567 	0x9144, 0xffffffff, 0x00000100,
568 	0x9a60, 0xffffffff, 0x00000100,
569 	0x9868, 0xffffffff, 0x00000100,
570 	0x8d58, 0xffffffff, 0x00000100,
571 	0x9510, 0xffffffff, 0x00000100,
572 	0x949c, 0xffffffff, 0x00000100,
573 	0x9654, 0xffffffff, 0x00000100,
574 	0x9030, 0xffffffff, 0x00000100,
575 	0x9034, 0xffffffff, 0x00000100,
576 	0x9038, 0xffffffff, 0x00000100,
577 	0x903c, 0xffffffff, 0x00000100,
578 	0x9040, 0xffffffff, 0x00000100,
579 	0xa200, 0xffffffff, 0x00000100,
580 	0xa204, 0xffffffff, 0x00000100,
581 	0xa208, 0xffffffff, 0x00000100,
582 	0xa20c, 0xffffffff, 0x00000100,
583 	0x971c, 0xffffffff, 0x00000100,
584 	0x977c, 0xffffffff, 0x00000100,
585 	0x3f80, 0xffffffff, 0x00000100,
586 	0xa210, 0xffffffff, 0x00000100,
587 	0xa214, 0xffffffff, 0x00000100,
588 	0x4d8, 0xffffffff, 0x00000100,
589 	0x9784, 0xffffffff, 0x00000100,
590 	0x9698, 0xffffffff, 0x00000100,
591 	0x4d4, 0xffffffff, 0x00000200,
592 	0x30cc, 0xffffffff, 0x00000100,
593 	0xd0c0, 0xffffffff, 0xff000100,
594 	0x802c, 0xffffffff, 0x40000000,
595 	0x915c, 0xffffffff, 0x00010000,
596 	0x9178, 0xffffffff, 0x00050000,
597 	0x917c, 0xffffffff, 0x00030002,
598 	0x918c, 0xffffffff, 0x00010004,
599 	0x9190, 0xffffffff, 0x00070006,
600 	0x9194, 0xffffffff, 0x00050000,
601 	0x9198, 0xffffffff, 0x00030002,
602 	0x91a8, 0xffffffff, 0x00010004,
603 	0x91ac, 0xffffffff, 0x00070006,
604 	0x91e8, 0xffffffff, 0x00000000,
605 	0x9294, 0xffffffff, 0x00000000,
606 	0x929c, 0xffffffff, 0x00000001,
607 	0x802c, 0xffffffff, 0xc0000000
608 };
609 
610 static const u32 juniper_mgcg_init[] =
611 {
612 	0x802c, 0xffffffff, 0xc0000000,
613 	0x5448, 0xffffffff, 0x00000100,
614 	0x55e4, 0xffffffff, 0x00000100,
615 	0x160c, 0xffffffff, 0x00000100,
616 	0x5644, 0xffffffff, 0x00000100,
617 	0xc164, 0xffffffff, 0x00000100,
618 	0x8a18, 0xffffffff, 0x00000100,
619 	0x897c, 0xffffffff, 0x06000100,
620 	0x8b28, 0xffffffff, 0x00000100,
621 	0x9144, 0xffffffff, 0x00000100,
622 	0x9a60, 0xffffffff, 0x00000100,
623 	0x9868, 0xffffffff, 0x00000100,
624 	0x8d58, 0xffffffff, 0x00000100,
625 	0x9510, 0xffffffff, 0x00000100,
626 	0x949c, 0xffffffff, 0x00000100,
627 	0x9654, 0xffffffff, 0x00000100,
628 	0x9030, 0xffffffff, 0x00000100,
629 	0x9034, 0xffffffff, 0x00000100,
630 	0x9038, 0xffffffff, 0x00000100,
631 	0x903c, 0xffffffff, 0x00000100,
632 	0x9040, 0xffffffff, 0x00000100,
633 	0xa200, 0xffffffff, 0x00000100,
634 	0xa204, 0xffffffff, 0x00000100,
635 	0xa208, 0xffffffff, 0x00000100,
636 	0xa20c, 0xffffffff, 0x00000100,
637 	0x971c, 0xffffffff, 0x00000100,
638 	0xd0c0, 0xffffffff, 0xff000100,
639 	0x802c, 0xffffffff, 0x40000000,
640 	0x915c, 0xffffffff, 0x00010000,
641 	0x9160, 0xffffffff, 0x00030002,
642 	0x9178, 0xffffffff, 0x00070000,
643 	0x917c, 0xffffffff, 0x00030002,
644 	0x9180, 0xffffffff, 0x00050004,
645 	0x918c, 0xffffffff, 0x00010006,
646 	0x9190, 0xffffffff, 0x00090008,
647 	0x9194, 0xffffffff, 0x00070000,
648 	0x9198, 0xffffffff, 0x00030002,
649 	0x919c, 0xffffffff, 0x00050004,
650 	0x91a8, 0xffffffff, 0x00010006,
651 	0x91ac, 0xffffffff, 0x00090008,
652 	0x91b0, 0xffffffff, 0x00070000,
653 	0x91b4, 0xffffffff, 0x00030002,
654 	0x91b8, 0xffffffff, 0x00050004,
655 	0x91c4, 0xffffffff, 0x00010006,
656 	0x91c8, 0xffffffff, 0x00090008,
657 	0x91cc, 0xffffffff, 0x00070000,
658 	0x91d0, 0xffffffff, 0x00030002,
659 	0x91d4, 0xffffffff, 0x00050004,
660 	0x91e0, 0xffffffff, 0x00010006,
661 	0x91e4, 0xffffffff, 0x00090008,
662 	0x91e8, 0xffffffff, 0x00000000,
663 	0x91ec, 0xffffffff, 0x00070000,
664 	0x91f0, 0xffffffff, 0x00030002,
665 	0x91f4, 0xffffffff, 0x00050004,
666 	0x9200, 0xffffffff, 0x00010006,
667 	0x9204, 0xffffffff, 0x00090008,
668 	0x9208, 0xffffffff, 0x00070000,
669 	0x920c, 0xffffffff, 0x00030002,
670 	0x9210, 0xffffffff, 0x00050004,
671 	0x921c, 0xffffffff, 0x00010006,
672 	0x9220, 0xffffffff, 0x00090008,
673 	0x9224, 0xffffffff, 0x00070000,
674 	0x9228, 0xffffffff, 0x00030002,
675 	0x922c, 0xffffffff, 0x00050004,
676 	0x9238, 0xffffffff, 0x00010006,
677 	0x923c, 0xffffffff, 0x00090008,
678 	0x9240, 0xffffffff, 0x00070000,
679 	0x9244, 0xffffffff, 0x00030002,
680 	0x9248, 0xffffffff, 0x00050004,
681 	0x9254, 0xffffffff, 0x00010006,
682 	0x9258, 0xffffffff, 0x00090008,
683 	0x925c, 0xffffffff, 0x00070000,
684 	0x9260, 0xffffffff, 0x00030002,
685 	0x9264, 0xffffffff, 0x00050004,
686 	0x9270, 0xffffffff, 0x00010006,
687 	0x9274, 0xffffffff, 0x00090008,
688 	0x9278, 0xffffffff, 0x00070000,
689 	0x927c, 0xffffffff, 0x00030002,
690 	0x9280, 0xffffffff, 0x00050004,
691 	0x928c, 0xffffffff, 0x00010006,
692 	0x9290, 0xffffffff, 0x00090008,
693 	0x9294, 0xffffffff, 0x00000000,
694 	0x929c, 0xffffffff, 0x00000001,
695 	0x802c, 0xffffffff, 0xc0000000,
696 	0x977c, 0xffffffff, 0x00000100,
697 	0x3f80, 0xffffffff, 0x00000100,
698 	0xa210, 0xffffffff, 0x00000100,
699 	0xa214, 0xffffffff, 0x00000100,
700 	0x4d8, 0xffffffff, 0x00000100,
701 	0x9784, 0xffffffff, 0x00000100,
702 	0x9698, 0xffffffff, 0x00000100,
703 	0x4d4, 0xffffffff, 0x00000200,
704 	0x30cc, 0xffffffff, 0x00000100,
705 	0x802c, 0xffffffff, 0xc0000000
706 };
707 
708 static const u32 supersumo_golden_registers[] =
709 {
710 	0x5eb4, 0xffffffff, 0x00000002,
711 	0x5c4, 0xffffffff, 0x00000001,
712 	0x7030, 0xffffffff, 0x00000011,
713 	0x7c30, 0xffffffff, 0x00000011,
714 	0x6104, 0x01000300, 0x00000000,
715 	0x5bc0, 0x00300000, 0x00000000,
716 	0x8c04, 0xffffffff, 0x40600060,
717 	0x8c08, 0xffffffff, 0x001c001c,
718 	0x8c20, 0xffffffff, 0x00800080,
719 	0x8c24, 0xffffffff, 0x00800080,
720 	0x8c18, 0xffffffff, 0x20202078,
721 	0x8c1c, 0xffffffff, 0x00001010,
722 	0x918c, 0xffffffff, 0x00010006,
723 	0x91a8, 0xffffffff, 0x00010006,
724 	0x91c4, 0xffffffff, 0x00010006,
725 	0x91e0, 0xffffffff, 0x00010006,
726 	0x9200, 0xffffffff, 0x00010006,
727 	0x9150, 0xffffffff, 0x6e944040,
728 	0x917c, 0xffffffff, 0x00030002,
729 	0x9180, 0xffffffff, 0x00050004,
730 	0x9198, 0xffffffff, 0x00030002,
731 	0x919c, 0xffffffff, 0x00050004,
732 	0x91b4, 0xffffffff, 0x00030002,
733 	0x91b8, 0xffffffff, 0x00050004,
734 	0x91d0, 0xffffffff, 0x00030002,
735 	0x91d4, 0xffffffff, 0x00050004,
736 	0x91f0, 0xffffffff, 0x00030002,
737 	0x91f4, 0xffffffff, 0x00050004,
738 	0x915c, 0xffffffff, 0x00010000,
739 	0x9160, 0xffffffff, 0x00030002,
740 	0x3f90, 0xffff0000, 0xff000000,
741 	0x9178, 0xffffffff, 0x00070000,
742 	0x9194, 0xffffffff, 0x00070000,
743 	0x91b0, 0xffffffff, 0x00070000,
744 	0x91cc, 0xffffffff, 0x00070000,
745 	0x91ec, 0xffffffff, 0x00070000,
746 	0x9148, 0xffff0000, 0xff000000,
747 	0x9190, 0xffffffff, 0x00090008,
748 	0x91ac, 0xffffffff, 0x00090008,
749 	0x91c8, 0xffffffff, 0x00090008,
750 	0x91e4, 0xffffffff, 0x00090008,
751 	0x9204, 0xffffffff, 0x00090008,
752 	0x3f94, 0xffff0000, 0xff000000,
753 	0x914c, 0xffff0000, 0xff000000,
754 	0x929c, 0xffffffff, 0x00000001,
755 	0x8a18, 0xffffffff, 0x00000100,
756 	0x8b28, 0xffffffff, 0x00000100,
757 	0x9144, 0xffffffff, 0x00000100,
758 	0x5644, 0xffffffff, 0x00000100,
759 	0x9b7c, 0xffffffff, 0x00000000,
760 	0x8030, 0xffffffff, 0x0000100a,
761 	0x8a14, 0xffffffff, 0x00000007,
762 	0x8b24, 0xffffffff, 0x00ff0fff,
763 	0x8b10, 0xffffffff, 0x00000000,
764 	0x28a4c, 0x06000000, 0x06000000,
765 	0x4d8, 0xffffffff, 0x00000100,
766 	0x913c, 0xffff000f, 0x0100000a,
767 	0x960c, 0xffffffff, 0x54763210,
768 	0x88c4, 0xffffffff, 0x000000c2,
769 	0x88d4, 0xffffffff, 0x00000010,
770 	0x8974, 0xffffffff, 0x00000000,
771 	0xc78, 0x00000080, 0x00000080,
772 	0x5e78, 0xffffffff, 0x001000f0,
773 	0xd02c, 0xffffffff, 0x08421000,
774 	0xa008, 0xffffffff, 0x00010000,
775 	0x8d00, 0xffffffff, 0x100e4848,
776 	0x8d04, 0xffffffff, 0x00164745,
777 	0x8c00, 0xffffffff, 0xe4000003,
778 	0x8cf0, 0x1fffffff, 0x08e00620,
779 	0x28350, 0xffffffff, 0x00000000,
780 	0x9508, 0xffffffff, 0x00000002
781 };
782 
783 static const u32 sumo_golden_registers[] =
784 {
785 	0x900c, 0x00ffffff, 0x0017071f,
786 	0x8c18, 0xffffffff, 0x10101060,
787 	0x8c1c, 0xffffffff, 0x00001010,
788 	0x8c30, 0x0000000f, 0x00000005,
789 	0x9688, 0x0000000f, 0x00000007
790 };
791 
792 static const u32 wrestler_golden_registers[] =
793 {
794 	0x5eb4, 0xffffffff, 0x00000002,
795 	0x5c4, 0xffffffff, 0x00000001,
796 	0x7030, 0xffffffff, 0x00000011,
797 	0x7c30, 0xffffffff, 0x00000011,
798 	0x6104, 0x01000300, 0x00000000,
799 	0x5bc0, 0x00300000, 0x00000000,
800 	0x918c, 0xffffffff, 0x00010006,
801 	0x91a8, 0xffffffff, 0x00010006,
802 	0x9150, 0xffffffff, 0x6e944040,
803 	0x917c, 0xffffffff, 0x00030002,
804 	0x9198, 0xffffffff, 0x00030002,
805 	0x915c, 0xffffffff, 0x00010000,
806 	0x3f90, 0xffff0000, 0xff000000,
807 	0x9178, 0xffffffff, 0x00070000,
808 	0x9194, 0xffffffff, 0x00070000,
809 	0x9148, 0xffff0000, 0xff000000,
810 	0x9190, 0xffffffff, 0x00090008,
811 	0x91ac, 0xffffffff, 0x00090008,
812 	0x3f94, 0xffff0000, 0xff000000,
813 	0x914c, 0xffff0000, 0xff000000,
814 	0x929c, 0xffffffff, 0x00000001,
815 	0x8a18, 0xffffffff, 0x00000100,
816 	0x8b28, 0xffffffff, 0x00000100,
817 	0x9144, 0xffffffff, 0x00000100,
818 	0x9b7c, 0xffffffff, 0x00000000,
819 	0x8030, 0xffffffff, 0x0000100a,
820 	0x8a14, 0xffffffff, 0x00000001,
821 	0x8b24, 0xffffffff, 0x00ff0fff,
822 	0x8b10, 0xffffffff, 0x00000000,
823 	0x28a4c, 0x06000000, 0x06000000,
824 	0x4d8, 0xffffffff, 0x00000100,
825 	0x913c, 0xffff000f, 0x0100000a,
826 	0x960c, 0xffffffff, 0x54763210,
827 	0x88c4, 0xffffffff, 0x000000c2,
828 	0x88d4, 0xffffffff, 0x00000010,
829 	0x8974, 0xffffffff, 0x00000000,
830 	0xc78, 0x00000080, 0x00000080,
831 	0x5e78, 0xffffffff, 0x001000f0,
832 	0xd02c, 0xffffffff, 0x08421000,
833 	0xa008, 0xffffffff, 0x00010000,
834 	0x8d00, 0xffffffff, 0x100e4848,
835 	0x8d04, 0xffffffff, 0x00164745,
836 	0x8c00, 0xffffffff, 0xe4000003,
837 	0x8cf0, 0x1fffffff, 0x08e00410,
838 	0x28350, 0xffffffff, 0x00000000,
839 	0x9508, 0xffffffff, 0x00000002,
840 	0x900c, 0xffffffff, 0x0017071f,
841 	0x8c18, 0xffffffff, 0x10101060,
842 	0x8c1c, 0xffffffff, 0x00001010
843 };
844 
845 static const u32 barts_golden_registers[] =
846 {
847 	0x5eb4, 0xffffffff, 0x00000002,
848 	0x5e78, 0x8f311ff1, 0x001000f0,
849 	0x3f90, 0xffff0000, 0xff000000,
850 	0x9148, 0xffff0000, 0xff000000,
851 	0x3f94, 0xffff0000, 0xff000000,
852 	0x914c, 0xffff0000, 0xff000000,
853 	0xc78, 0x00000080, 0x00000080,
854 	0xbd4, 0x70073777, 0x00010001,
855 	0xd02c, 0xbfffff1f, 0x08421000,
856 	0xd0b8, 0x03773777, 0x02011003,
857 	0x5bc0, 0x00200000, 0x50100000,
858 	0x98f8, 0x33773777, 0x02011003,
859 	0x98fc, 0xffffffff, 0x76543210,
860 	0x7030, 0x31000311, 0x00000011,
861 	0x2f48, 0x00000007, 0x02011003,
862 	0x6b28, 0x00000010, 0x00000012,
863 	0x7728, 0x00000010, 0x00000012,
864 	0x10328, 0x00000010, 0x00000012,
865 	0x10f28, 0x00000010, 0x00000012,
866 	0x11b28, 0x00000010, 0x00000012,
867 	0x12728, 0x00000010, 0x00000012,
868 	0x240c, 0x000007ff, 0x00000380,
869 	0x8a14, 0xf000001f, 0x00000007,
870 	0x8b24, 0x3fff3fff, 0x00ff0fff,
871 	0x8b10, 0x0000ff0f, 0x00000000,
872 	0x28a4c, 0x07ffffff, 0x06000000,
873 	0x10c, 0x00000001, 0x00010003,
874 	0xa02c, 0xffffffff, 0x0000009b,
875 	0x913c, 0x0000000f, 0x0100000a,
876 	0x8d00, 0xffff7f7f, 0x100e4848,
877 	0x8d04, 0x00ffffff, 0x00164745,
878 	0x8c00, 0xfffc0003, 0xe4000003,
879 	0x8c04, 0xf8ff00ff, 0x40600060,
880 	0x8c08, 0x00ff00ff, 0x001c001c,
881 	0x8cf0, 0x1fff1fff, 0x08e00620,
882 	0x8c20, 0x0fff0fff, 0x00800080,
883 	0x8c24, 0x0fff0fff, 0x00800080,
884 	0x8c18, 0xffffffff, 0x20202078,
885 	0x8c1c, 0x0000ffff, 0x00001010,
886 	0x28350, 0x00000f01, 0x00000000,
887 	0x9508, 0x3700001f, 0x00000002,
888 	0x960c, 0xffffffff, 0x54763210,
889 	0x88c4, 0x001f3ae3, 0x000000c2,
890 	0x88d4, 0x0000001f, 0x00000010,
891 	0x8974, 0xffffffff, 0x00000000
892 };
893 
894 static const u32 turks_golden_registers[] =
895 {
896 	0x5eb4, 0xffffffff, 0x00000002,
897 	0x5e78, 0x8f311ff1, 0x001000f0,
898 	0x8c8, 0x00003000, 0x00001070,
899 	0x8cc, 0x000fffff, 0x00040035,
900 	0x3f90, 0xffff0000, 0xfff00000,
901 	0x9148, 0xffff0000, 0xfff00000,
902 	0x3f94, 0xffff0000, 0xfff00000,
903 	0x914c, 0xffff0000, 0xfff00000,
904 	0xc78, 0x00000080, 0x00000080,
905 	0xbd4, 0x00073007, 0x00010002,
906 	0xd02c, 0xbfffff1f, 0x08421000,
907 	0xd0b8, 0x03773777, 0x02010002,
908 	0x5bc0, 0x00200000, 0x50100000,
909 	0x98f8, 0x33773777, 0x00010002,
910 	0x98fc, 0xffffffff, 0x33221100,
911 	0x7030, 0x31000311, 0x00000011,
912 	0x2f48, 0x33773777, 0x00010002,
913 	0x6b28, 0x00000010, 0x00000012,
914 	0x7728, 0x00000010, 0x00000012,
915 	0x10328, 0x00000010, 0x00000012,
916 	0x10f28, 0x00000010, 0x00000012,
917 	0x11b28, 0x00000010, 0x00000012,
918 	0x12728, 0x00000010, 0x00000012,
919 	0x240c, 0x000007ff, 0x00000380,
920 	0x8a14, 0xf000001f, 0x00000007,
921 	0x8b24, 0x3fff3fff, 0x00ff0fff,
922 	0x8b10, 0x0000ff0f, 0x00000000,
923 	0x28a4c, 0x07ffffff, 0x06000000,
924 	0x10c, 0x00000001, 0x00010003,
925 	0xa02c, 0xffffffff, 0x0000009b,
926 	0x913c, 0x0000000f, 0x0100000a,
927 	0x8d00, 0xffff7f7f, 0x100e4848,
928 	0x8d04, 0x00ffffff, 0x00164745,
929 	0x8c00, 0xfffc0003, 0xe4000003,
930 	0x8c04, 0xf8ff00ff, 0x40600060,
931 	0x8c08, 0x00ff00ff, 0x001c001c,
932 	0x8cf0, 0x1fff1fff, 0x08e00410,
933 	0x8c20, 0x0fff0fff, 0x00800080,
934 	0x8c24, 0x0fff0fff, 0x00800080,
935 	0x8c18, 0xffffffff, 0x20202078,
936 	0x8c1c, 0x0000ffff, 0x00001010,
937 	0x28350, 0x00000f01, 0x00000000,
938 	0x9508, 0x3700001f, 0x00000002,
939 	0x960c, 0xffffffff, 0x54763210,
940 	0x88c4, 0x001f3ae3, 0x000000c2,
941 	0x88d4, 0x0000001f, 0x00000010,
942 	0x8974, 0xffffffff, 0x00000000
943 };
944 
945 static const u32 caicos_golden_registers[] =
946 {
947 	0x5eb4, 0xffffffff, 0x00000002,
948 	0x5e78, 0x8f311ff1, 0x001000f0,
949 	0x8c8, 0x00003420, 0x00001450,
950 	0x8cc, 0x000fffff, 0x00040035,
951 	0x3f90, 0xffff0000, 0xfffc0000,
952 	0x9148, 0xffff0000, 0xfffc0000,
953 	0x3f94, 0xffff0000, 0xfffc0000,
954 	0x914c, 0xffff0000, 0xfffc0000,
955 	0xc78, 0x00000080, 0x00000080,
956 	0xbd4, 0x00073007, 0x00010001,
957 	0xd02c, 0xbfffff1f, 0x08421000,
958 	0xd0b8, 0x03773777, 0x02010001,
959 	0x5bc0, 0x00200000, 0x50100000,
960 	0x98f8, 0x33773777, 0x02010001,
961 	0x98fc, 0xffffffff, 0x33221100,
962 	0x7030, 0x31000311, 0x00000011,
963 	0x2f48, 0x33773777, 0x02010001,
964 	0x6b28, 0x00000010, 0x00000012,
965 	0x7728, 0x00000010, 0x00000012,
966 	0x10328, 0x00000010, 0x00000012,
967 	0x10f28, 0x00000010, 0x00000012,
968 	0x11b28, 0x00000010, 0x00000012,
969 	0x12728, 0x00000010, 0x00000012,
970 	0x240c, 0x000007ff, 0x00000380,
971 	0x8a14, 0xf000001f, 0x00000001,
972 	0x8b24, 0x3fff3fff, 0x00ff0fff,
973 	0x8b10, 0x0000ff0f, 0x00000000,
974 	0x28a4c, 0x07ffffff, 0x06000000,
975 	0x10c, 0x00000001, 0x00010003,
976 	0xa02c, 0xffffffff, 0x0000009b,
977 	0x913c, 0x0000000f, 0x0100000a,
978 	0x8d00, 0xffff7f7f, 0x100e4848,
979 	0x8d04, 0x00ffffff, 0x00164745,
980 	0x8c00, 0xfffc0003, 0xe4000003,
981 	0x8c04, 0xf8ff00ff, 0x40600060,
982 	0x8c08, 0x00ff00ff, 0x001c001c,
983 	0x8cf0, 0x1fff1fff, 0x08e00410,
984 	0x8c20, 0x0fff0fff, 0x00800080,
985 	0x8c24, 0x0fff0fff, 0x00800080,
986 	0x8c18, 0xffffffff, 0x20202078,
987 	0x8c1c, 0x0000ffff, 0x00001010,
988 	0x28350, 0x00000f01, 0x00000000,
989 	0x9508, 0x3700001f, 0x00000002,
990 	0x960c, 0xffffffff, 0x54763210,
991 	0x88c4, 0x001f3ae3, 0x000000c2,
992 	0x88d4, 0x0000001f, 0x00000010,
993 	0x8974, 0xffffffff, 0x00000000
994 };
995 
996 static void evergreen_init_golden_registers(struct radeon_device *rdev)
997 {
998 	switch (rdev->family) {
999 	case CHIP_CYPRESS:
1000 	case CHIP_HEMLOCK:
1001 		radeon_program_register_sequence(rdev,
1002 						 evergreen_golden_registers,
1003 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1004 		radeon_program_register_sequence(rdev,
1005 						 evergreen_golden_registers2,
1006 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1007 		radeon_program_register_sequence(rdev,
1008 						 cypress_mgcg_init,
1009 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1010 		break;
1011 	case CHIP_JUNIPER:
1012 		radeon_program_register_sequence(rdev,
1013 						 evergreen_golden_registers,
1014 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1015 		radeon_program_register_sequence(rdev,
1016 						 evergreen_golden_registers2,
1017 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1018 		radeon_program_register_sequence(rdev,
1019 						 juniper_mgcg_init,
1020 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1021 		break;
1022 	case CHIP_REDWOOD:
1023 		radeon_program_register_sequence(rdev,
1024 						 evergreen_golden_registers,
1025 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1026 		radeon_program_register_sequence(rdev,
1027 						 evergreen_golden_registers2,
1028 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1029 		radeon_program_register_sequence(rdev,
1030 						 redwood_mgcg_init,
1031 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1032 		break;
1033 	case CHIP_CEDAR:
1034 		radeon_program_register_sequence(rdev,
1035 						 cedar_golden_registers,
1036 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
1037 		radeon_program_register_sequence(rdev,
1038 						 evergreen_golden_registers2,
1039 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1040 		radeon_program_register_sequence(rdev,
1041 						 cedar_mgcg_init,
1042 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1043 		break;
1044 	case CHIP_PALM:
1045 		radeon_program_register_sequence(rdev,
1046 						 wrestler_golden_registers,
1047 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1048 		break;
1049 	case CHIP_SUMO:
1050 		radeon_program_register_sequence(rdev,
1051 						 supersumo_golden_registers,
1052 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1053 		break;
1054 	case CHIP_SUMO2:
1055 		radeon_program_register_sequence(rdev,
1056 						 supersumo_golden_registers,
1057 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1058 		radeon_program_register_sequence(rdev,
1059 						 sumo_golden_registers,
1060 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
1061 		break;
1062 	case CHIP_BARTS:
1063 		radeon_program_register_sequence(rdev,
1064 						 barts_golden_registers,
1065 						 (const u32)ARRAY_SIZE(barts_golden_registers));
1066 		break;
1067 	case CHIP_TURKS:
1068 		radeon_program_register_sequence(rdev,
1069 						 turks_golden_registers,
1070 						 (const u32)ARRAY_SIZE(turks_golden_registers));
1071 		break;
1072 	case CHIP_CAICOS:
1073 		radeon_program_register_sequence(rdev,
1074 						 caicos_golden_registers,
1075 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1076 		break;
1077 	default:
1078 		break;
1079 	}
1080 }
1081 
1082 /**
1083  * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1084  *
1085  * @rdev: radeon_device pointer
1086  * @reg: register offset in bytes
1087  * @val: register value
1088  *
1089  * Returns 0 for success or -EINVAL for an invalid register
1090  *
1091  */
1092 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1093 					u32 reg, u32 *val)
1094 {
1095 	switch (reg) {
1096 	case GRBM_STATUS:
1097 	case GRBM_STATUS_SE0:
1098 	case GRBM_STATUS_SE1:
1099 	case SRBM_STATUS:
1100 	case SRBM_STATUS2:
1101 	case DMA_STATUS_REG:
1102 	case UVD_STATUS:
1103 		*val = RREG32(reg);
1104 		return 0;
1105 	default:
1106 		return -EINVAL;
1107 	}
1108 }
1109 
1110 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1111 			     unsigned *bankh, unsigned *mtaspect,
1112 			     unsigned *tile_split)
1113 {
1114 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1115 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1116 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1117 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1118 	switch (*bankw) {
1119 	default:
1120 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1121 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1122 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1123 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1124 	}
1125 	switch (*bankh) {
1126 	default:
1127 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1128 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1129 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1130 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1131 	}
1132 	switch (*mtaspect) {
1133 	default:
1134 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1135 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1136 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1137 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1138 	}
1139 }
1140 
1141 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1142 			      u32 cntl_reg, u32 status_reg)
1143 {
1144 	int r, i;
1145 	struct atom_clock_dividers dividers;
1146 
1147 	r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1148 					   clock, false, &dividers);
1149 	if (r)
1150 		return r;
1151 
1152 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1153 
1154 	for (i = 0; i < 100; i++) {
1155 		if (RREG32(status_reg) & DCLK_STATUS)
1156 			break;
1157 		mdelay(10);
1158 	}
1159 	if (i == 100)
1160 		return -ETIMEDOUT;
1161 
1162 	return 0;
1163 }
1164 
1165 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1166 {
1167 	int r = 0;
1168 	u32 cg_scratch = RREG32(CG_SCRATCH1);
1169 
1170 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1171 	if (r)
1172 		goto done;
1173 	cg_scratch &= 0xffff0000;
1174 	cg_scratch |= vclk / 100; /* Mhz */
1175 
1176 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1177 	if (r)
1178 		goto done;
1179 	cg_scratch &= 0x0000ffff;
1180 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1181 
1182 done:
1183 	WREG32(CG_SCRATCH1, cg_scratch);
1184 
1185 	return r;
1186 }
1187 
1188 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1189 {
1190 	/* start off with something large */
1191 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1192 	int r;
1193 
1194 	/* bypass vclk and dclk with bclk */
1195 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1196 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1197 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1198 
1199 	/* put PLL in bypass mode */
1200 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1201 
1202 	if (!vclk || !dclk) {
1203 		/* keep the Bypass mode, put PLL to sleep */
1204 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1205 		return 0;
1206 	}
1207 
1208 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1209 					  16384, 0x03FFFFFF, 0, 128, 5,
1210 					  &fb_div, &vclk_div, &dclk_div);
1211 	if (r)
1212 		return r;
1213 
1214 	/* set VCO_MODE to 1 */
1215 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1216 
1217 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1218 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1219 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1220 
1221 	/* deassert UPLL_RESET */
1222 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1223 
1224 	mdelay(1);
1225 
1226 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1227 	if (r)
1228 		return r;
1229 
1230 	/* assert UPLL_RESET again */
1231 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1232 
1233 	/* disable spread spectrum. */
1234 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1235 
1236 	/* set feedback divider */
1237 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1238 
1239 	/* set ref divider to 0 */
1240 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1241 
1242 	if (fb_div < 307200)
1243 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1244 	else
1245 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1246 
1247 	/* set PDIV_A and PDIV_B */
1248 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1249 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1250 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1251 
1252 	/* give the PLL some time to settle */
1253 	mdelay(15);
1254 
1255 	/* deassert PLL_RESET */
1256 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1257 
1258 	mdelay(15);
1259 
1260 	/* switch from bypass mode to normal mode */
1261 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1262 
1263 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1264 	if (r)
1265 		return r;
1266 
1267 	/* switch VCLK and DCLK selection */
1268 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1269 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1270 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1271 
1272 	mdelay(100);
1273 
1274 	return 0;
1275 }
1276 
1277 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1278 {
1279 	int readrq;
1280 	u16 v;
1281 
1282 	readrq = pcie_get_readrq(rdev->pdev);
1283 	v = ffs(readrq) - 8;
1284 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1285 	 * to avoid hangs or perfomance issues
1286 	 */
1287 	if ((v == 0) || (v == 6) || (v == 7))
1288 		pcie_set_readrq(rdev->pdev, 512);
1289 }
1290 
1291 void dce4_program_fmt(struct drm_encoder *encoder)
1292 {
1293 	struct drm_device *dev = encoder->dev;
1294 	struct radeon_device *rdev = dev->dev_private;
1295 	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1296 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1297 	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1298 	int bpc = 0;
1299 	u32 tmp = 0;
1300 	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1301 
1302 	if (connector) {
1303 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1304 		bpc = radeon_get_monitor_bpc(connector);
1305 		dither = radeon_connector->dither;
1306 	}
1307 
1308 	/* LVDS/eDP FMT is set up by atom */
1309 	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1310 		return;
1311 
1312 	/* not needed for analog */
1313 	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1314 	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1315 		return;
1316 
1317 	if (bpc == 0)
1318 		return;
1319 
1320 	switch (bpc) {
1321 	case 6:
1322 		if (dither == RADEON_FMT_DITHER_ENABLE)
1323 			/* XXX sort out optimal dither settings */
1324 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1325 				FMT_SPATIAL_DITHER_EN);
1326 		else
1327 			tmp |= FMT_TRUNCATE_EN;
1328 		break;
1329 	case 8:
1330 		if (dither == RADEON_FMT_DITHER_ENABLE)
1331 			/* XXX sort out optimal dither settings */
1332 			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1333 				FMT_RGB_RANDOM_ENABLE |
1334 				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1335 		else
1336 			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1337 		break;
1338 	case 10:
1339 	default:
1340 		/* not needed */
1341 		break;
1342 	}
1343 
1344 	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1345 }
1346 
1347 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1348 {
1349 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1350 		return true;
1351 	else
1352 		return false;
1353 }
1354 
1355 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1356 {
1357 	u32 pos1, pos2;
1358 
1359 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1360 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1361 
1362 	if (pos1 != pos2)
1363 		return true;
1364 	else
1365 		return false;
1366 }
1367 
1368 /**
1369  * dce4_wait_for_vblank - vblank wait asic callback.
1370  *
1371  * @rdev: radeon_device pointer
1372  * @crtc: crtc to wait for vblank on
1373  *
1374  * Wait for vblank on the requested crtc (evergreen+).
1375  */
1376 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1377 {
1378 	unsigned i = 0;
1379 
1380 	if (crtc >= rdev->num_crtc)
1381 		return;
1382 
1383 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1384 		return;
1385 
1386 	/* depending on when we hit vblank, we may be close to active; if so,
1387 	 * wait for another frame.
1388 	 */
1389 	while (dce4_is_in_vblank(rdev, crtc)) {
1390 		if (i++ % 100 == 0) {
1391 			if (!dce4_is_counter_moving(rdev, crtc))
1392 				break;
1393 		}
1394 	}
1395 
1396 	while (!dce4_is_in_vblank(rdev, crtc)) {
1397 		if (i++ % 100 == 0) {
1398 			if (!dce4_is_counter_moving(rdev, crtc))
1399 				break;
1400 		}
1401 	}
1402 }
1403 
1404 /**
1405  * evergreen_page_flip - pageflip callback.
1406  *
1407  * @rdev: radeon_device pointer
1408  * @crtc_id: crtc to cleanup pageflip on
1409  * @crtc_base: new address of the crtc (GPU MC address)
1410  * @async: asynchronous flip
1411  *
1412  * Triggers the actual pageflip by updating the primary
1413  * surface base address (evergreen+).
1414  */
1415 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1416 			 bool async)
1417 {
1418 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1419 	struct drm_framebuffer *fb = radeon_crtc->base.primary->fb;
1420 
1421 	/* flip at hsync for async, default is vsync */
1422 	WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1423 	       async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1424 	/* update pitch */
1425 	WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset,
1426 	       fb->pitches[0] / fb->format->cpp[0]);
1427 	/* update the scanout addresses */
1428 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1429 	       upper_32_bits(crtc_base));
1430 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1431 	       (u32)crtc_base);
1432 	/* post the write */
1433 	RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1434 }
1435 
1436 /**
1437  * evergreen_page_flip_pending - check if page flip is still pending
1438  *
1439  * @rdev: radeon_device pointer
1440  * @crtc_id: crtc to check
1441  *
1442  * Returns the current update pending status.
1443  */
1444 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1445 {
1446 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1447 
1448 	/* Return current update_pending status: */
1449 	return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1450 		EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1451 }
1452 
1453 /* get temperature in millidegrees */
1454 int evergreen_get_temp(struct radeon_device *rdev)
1455 {
1456 	u32 temp, toffset;
1457 	int actual_temp = 0;
1458 
1459 	if (rdev->family == CHIP_JUNIPER) {
1460 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1461 			TOFFSET_SHIFT;
1462 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1463 			TS0_ADC_DOUT_SHIFT;
1464 
1465 		if (toffset & 0x100)
1466 			actual_temp = temp / 2 - (0x200 - toffset);
1467 		else
1468 			actual_temp = temp / 2 + toffset;
1469 
1470 		actual_temp = actual_temp * 1000;
1471 
1472 	} else {
1473 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1474 			ASIC_T_SHIFT;
1475 
1476 		if (temp & 0x400)
1477 			actual_temp = -256;
1478 		else if (temp & 0x200)
1479 			actual_temp = 255;
1480 		else if (temp & 0x100) {
1481 			actual_temp = temp & 0x1ff;
1482 			actual_temp |= ~0x1ff;
1483 		} else
1484 			actual_temp = temp & 0xff;
1485 
1486 		actual_temp = (actual_temp * 1000) / 2;
1487 	}
1488 
1489 	return actual_temp;
1490 }
1491 
1492 int sumo_get_temp(struct radeon_device *rdev)
1493 {
1494 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1495 	int actual_temp = temp - 49;
1496 
1497 	return actual_temp * 1000;
1498 }
1499 
1500 /**
1501  * sumo_pm_init_profile - Initialize power profiles callback.
1502  *
1503  * @rdev: radeon_device pointer
1504  *
1505  * Initialize the power states used in profile mode
1506  * (sumo, trinity, SI).
1507  * Used for profile mode only.
1508  */
1509 void sumo_pm_init_profile(struct radeon_device *rdev)
1510 {
1511 	int idx;
1512 
1513 	/* default */
1514 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1515 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1516 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1517 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1518 
1519 	/* low,mid sh/mh */
1520 	if (rdev->flags & RADEON_IS_MOBILITY)
1521 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1522 	else
1523 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1524 
1525 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1526 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1527 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1528 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1529 
1530 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1531 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1532 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1533 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1534 
1535 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1536 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1537 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1538 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1539 
1540 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1541 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1542 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1543 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1544 
1545 	/* high sh/mh */
1546 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1547 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1548 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1549 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1550 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1551 		rdev->pm.power_state[idx].num_clock_modes - 1;
1552 
1553 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1554 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1555 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1556 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1557 		rdev->pm.power_state[idx].num_clock_modes - 1;
1558 }
1559 
1560 /**
1561  * btc_pm_init_profile - Initialize power profiles callback.
1562  *
1563  * @rdev: radeon_device pointer
1564  *
1565  * Initialize the power states used in profile mode
1566  * (BTC, cayman).
1567  * Used for profile mode only.
1568  */
1569 void btc_pm_init_profile(struct radeon_device *rdev)
1570 {
1571 	int idx;
1572 
1573 	/* default */
1574 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1575 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1576 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1577 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1578 	/* starting with BTC, there is one state that is used for both
1579 	 * MH and SH.  Difference is that we always use the high clock index for
1580 	 * mclk.
1581 	 */
1582 	if (rdev->flags & RADEON_IS_MOBILITY)
1583 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1584 	else
1585 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1586 	/* low sh */
1587 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1588 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1589 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1590 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1591 	/* mid sh */
1592 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1593 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1594 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1595 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1596 	/* high sh */
1597 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1598 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1599 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1600 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1601 	/* low mh */
1602 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1603 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1604 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1605 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1606 	/* mid mh */
1607 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1608 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1609 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1610 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1611 	/* high mh */
1612 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1613 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1614 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1615 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1616 }
1617 
1618 /**
1619  * evergreen_pm_misc - set additional pm hw parameters callback.
1620  *
1621  * @rdev: radeon_device pointer
1622  *
1623  * Set non-clock parameters associated with a power state
1624  * (voltage, etc.) (evergreen+).
1625  */
1626 void evergreen_pm_misc(struct radeon_device *rdev)
1627 {
1628 	int req_ps_idx = rdev->pm.requested_power_state_index;
1629 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1630 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1631 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1632 
1633 	if (voltage->type == VOLTAGE_SW) {
1634 		/* 0xff0x are flags rather then an actual voltage */
1635 		if ((voltage->voltage & 0xff00) == 0xff00)
1636 			return;
1637 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1638 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1639 			rdev->pm.current_vddc = voltage->voltage;
1640 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1641 		}
1642 
1643 		/* starting with BTC, there is one state that is used for both
1644 		 * MH and SH.  Difference is that we always use the high clock index for
1645 		 * mclk and vddci.
1646 		 */
1647 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1648 		    (rdev->family >= CHIP_BARTS) &&
1649 		    rdev->pm.active_crtc_count &&
1650 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1651 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1652 			voltage = &rdev->pm.power_state[req_ps_idx].
1653 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1654 
1655 		/* 0xff0x are flags rather then an actual voltage */
1656 		if ((voltage->vddci & 0xff00) == 0xff00)
1657 			return;
1658 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1659 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1660 			rdev->pm.current_vddci = voltage->vddci;
1661 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1662 		}
1663 	}
1664 }
1665 
1666 /**
1667  * evergreen_pm_prepare - pre-power state change callback.
1668  *
1669  * @rdev: radeon_device pointer
1670  *
1671  * Prepare for a power state change (evergreen+).
1672  */
1673 void evergreen_pm_prepare(struct radeon_device *rdev)
1674 {
1675 	struct drm_device *ddev = rdev->ddev;
1676 	struct drm_crtc *crtc;
1677 	struct radeon_crtc *radeon_crtc;
1678 	u32 tmp;
1679 
1680 	/* disable any active CRTCs */
1681 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1682 		radeon_crtc = to_radeon_crtc(crtc);
1683 		if (radeon_crtc->enabled) {
1684 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1685 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1686 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1687 		}
1688 	}
1689 }
1690 
1691 /**
1692  * evergreen_pm_finish - post-power state change callback.
1693  *
1694  * @rdev: radeon_device pointer
1695  *
1696  * Clean up after a power state change (evergreen+).
1697  */
1698 void evergreen_pm_finish(struct radeon_device *rdev)
1699 {
1700 	struct drm_device *ddev = rdev->ddev;
1701 	struct drm_crtc *crtc;
1702 	struct radeon_crtc *radeon_crtc;
1703 	u32 tmp;
1704 
1705 	/* enable any active CRTCs */
1706 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1707 		radeon_crtc = to_radeon_crtc(crtc);
1708 		if (radeon_crtc->enabled) {
1709 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1710 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1711 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1712 		}
1713 	}
1714 }
1715 
1716 /**
1717  * evergreen_hpd_sense - hpd sense callback.
1718  *
1719  * @rdev: radeon_device pointer
1720  * @hpd: hpd (hotplug detect) pin
1721  *
1722  * Checks if a digital monitor is connected (evergreen+).
1723  * Returns true if connected, false if not connected.
1724  */
1725 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1726 {
1727 	if (hpd == RADEON_HPD_NONE)
1728 		return false;
1729 
1730 	return !!(RREG32(DC_HPDx_INT_STATUS_REG(hpd)) & DC_HPDx_SENSE);
1731 }
1732 
1733 /**
1734  * evergreen_hpd_set_polarity - hpd set polarity callback.
1735  *
1736  * @rdev: radeon_device pointer
1737  * @hpd: hpd (hotplug detect) pin
1738  *
1739  * Set the polarity of the hpd pin (evergreen+).
1740  */
1741 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1742 				enum radeon_hpd_id hpd)
1743 {
1744 	bool connected = evergreen_hpd_sense(rdev, hpd);
1745 
1746 	if (hpd == RADEON_HPD_NONE)
1747 		return;
1748 
1749 	if (connected)
1750 		WREG32_AND(DC_HPDx_INT_CONTROL(hpd), ~DC_HPDx_INT_POLARITY);
1751 	else
1752 		WREG32_OR(DC_HPDx_INT_CONTROL(hpd), DC_HPDx_INT_POLARITY);
1753 }
1754 
1755 /**
1756  * evergreen_hpd_init - hpd setup callback.
1757  *
1758  * @rdev: radeon_device pointer
1759  *
1760  * Setup the hpd pins used by the card (evergreen+).
1761  * Enable the pin, set the polarity, and enable the hpd interrupts.
1762  */
1763 void evergreen_hpd_init(struct radeon_device *rdev)
1764 {
1765 	struct drm_device *dev = rdev->ddev;
1766 	struct drm_connector *connector;
1767 	unsigned enabled = 0;
1768 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1769 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1770 
1771 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1772 		enum radeon_hpd_id hpd =
1773 			to_radeon_connector(connector)->hpd.hpd;
1774 
1775 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1776 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1777 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1778 			 * aux dp channel on imac and help (but not completely fix)
1779 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1780 			 * also avoid interrupt storms during dpms.
1781 			 */
1782 			continue;
1783 		}
1784 
1785 		if (hpd == RADEON_HPD_NONE)
1786 			continue;
1787 
1788 		WREG32(DC_HPDx_CONTROL(hpd), tmp);
1789 		enabled |= 1 << hpd;
1790 
1791 		radeon_hpd_set_polarity(rdev, hpd);
1792 	}
1793 	radeon_irq_kms_enable_hpd(rdev, enabled);
1794 }
1795 
1796 /**
1797  * evergreen_hpd_fini - hpd tear down callback.
1798  *
1799  * @rdev: radeon_device pointer
1800  *
1801  * Tear down the hpd pins used by the card (evergreen+).
1802  * Disable the hpd interrupts.
1803  */
1804 void evergreen_hpd_fini(struct radeon_device *rdev)
1805 {
1806 	struct drm_device *dev = rdev->ddev;
1807 	struct drm_connector *connector;
1808 	unsigned disabled = 0;
1809 
1810 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1811 		enum radeon_hpd_id hpd =
1812 			to_radeon_connector(connector)->hpd.hpd;
1813 
1814 		if (hpd == RADEON_HPD_NONE)
1815 			continue;
1816 
1817 		WREG32(DC_HPDx_CONTROL(hpd), 0);
1818 		disabled |= 1 << hpd;
1819 	}
1820 	radeon_irq_kms_disable_hpd(rdev, disabled);
1821 }
1822 
1823 /* watermark setup */
1824 
1825 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1826 					struct radeon_crtc *radeon_crtc,
1827 					struct drm_display_mode *mode,
1828 					struct drm_display_mode *other_mode)
1829 {
1830 	u32 tmp, buffer_alloc, i;
1831 	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1832 	/*
1833 	 * Line Buffer Setup
1834 	 * There are 3 line buffers, each one shared by 2 display controllers.
1835 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1836 	 * the display controllers.  The paritioning is done via one of four
1837 	 * preset allocations specified in bits 2:0:
1838 	 * first display controller
1839 	 *  0 - first half of lb (3840 * 2)
1840 	 *  1 - first 3/4 of lb (5760 * 2)
1841 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1842 	 *  3 - first 1/4 of lb (1920 * 2)
1843 	 * second display controller
1844 	 *  4 - second half of lb (3840 * 2)
1845 	 *  5 - second 3/4 of lb (5760 * 2)
1846 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1847 	 *  7 - last 1/4 of lb (1920 * 2)
1848 	 */
1849 	/* this can get tricky if we have two large displays on a paired group
1850 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1851 	 * non-linked crtcs for maximum line buffer allocation.
1852 	 */
1853 	if (radeon_crtc->base.enabled && mode) {
1854 		if (other_mode) {
1855 			tmp = 0; /* 1/2 */
1856 			buffer_alloc = 1;
1857 		} else {
1858 			tmp = 2; /* whole */
1859 			buffer_alloc = 2;
1860 		}
1861 	} else {
1862 		tmp = 0;
1863 		buffer_alloc = 0;
1864 	}
1865 
1866 	/* second controller of the pair uses second half of the lb */
1867 	if (radeon_crtc->crtc_id % 2)
1868 		tmp += 4;
1869 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1870 
1871 	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1872 		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1873 		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1874 		for (i = 0; i < rdev->usec_timeout; i++) {
1875 			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1876 			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1877 				break;
1878 			udelay(1);
1879 		}
1880 	}
1881 
1882 	if (radeon_crtc->base.enabled && mode) {
1883 		switch (tmp) {
1884 		case 0:
1885 		case 4:
1886 		default:
1887 			if (ASIC_IS_DCE5(rdev))
1888 				return 4096 * 2;
1889 			else
1890 				return 3840 * 2;
1891 		case 1:
1892 		case 5:
1893 			if (ASIC_IS_DCE5(rdev))
1894 				return 6144 * 2;
1895 			else
1896 				return 5760 * 2;
1897 		case 2:
1898 		case 6:
1899 			if (ASIC_IS_DCE5(rdev))
1900 				return 8192 * 2;
1901 			else
1902 				return 7680 * 2;
1903 		case 3:
1904 		case 7:
1905 			if (ASIC_IS_DCE5(rdev))
1906 				return 2048 * 2;
1907 			else
1908 				return 1920 * 2;
1909 		}
1910 	}
1911 
1912 	/* controller not enabled, so no lb used */
1913 	return 0;
1914 }
1915 
1916 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1917 {
1918 	u32 tmp = RREG32(MC_SHARED_CHMAP);
1919 
1920 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1921 	case 0:
1922 	default:
1923 		return 1;
1924 	case 1:
1925 		return 2;
1926 	case 2:
1927 		return 4;
1928 	case 3:
1929 		return 8;
1930 	}
1931 }
1932 
1933 struct evergreen_wm_params {
1934 	u32 dram_channels; /* number of dram channels */
1935 	u32 yclk;          /* bandwidth per dram data pin in kHz */
1936 	u32 sclk;          /* engine clock in kHz */
1937 	u32 disp_clk;      /* display clock in kHz */
1938 	u32 src_width;     /* viewport width */
1939 	u32 active_time;   /* active display time in ns */
1940 	u32 blank_time;    /* blank time in ns */
1941 	bool interlaced;    /* mode is interlaced */
1942 	fixed20_12 vsc;    /* vertical scale ratio */
1943 	u32 num_heads;     /* number of active crtcs */
1944 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1945 	u32 lb_size;       /* line buffer allocated to pipe */
1946 	u32 vtaps;         /* vertical scaler taps */
1947 };
1948 
1949 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1950 {
1951 	/* Calculate DRAM Bandwidth and the part allocated to display. */
1952 	fixed20_12 dram_efficiency; /* 0.7 */
1953 	fixed20_12 yclk, dram_channels, bandwidth;
1954 	fixed20_12 a;
1955 
1956 	a.full = dfixed_const(1000);
1957 	yclk.full = dfixed_const(wm->yclk);
1958 	yclk.full = dfixed_div(yclk, a);
1959 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1960 	a.full = dfixed_const(10);
1961 	dram_efficiency.full = dfixed_const(7);
1962 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
1963 	bandwidth.full = dfixed_mul(dram_channels, yclk);
1964 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1965 
1966 	return dfixed_trunc(bandwidth);
1967 }
1968 
1969 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1970 {
1971 	/* Calculate DRAM Bandwidth and the part allocated to display. */
1972 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1973 	fixed20_12 yclk, dram_channels, bandwidth;
1974 	fixed20_12 a;
1975 
1976 	a.full = dfixed_const(1000);
1977 	yclk.full = dfixed_const(wm->yclk);
1978 	yclk.full = dfixed_div(yclk, a);
1979 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1980 	a.full = dfixed_const(10);
1981 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1982 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1983 	bandwidth.full = dfixed_mul(dram_channels, yclk);
1984 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1985 
1986 	return dfixed_trunc(bandwidth);
1987 }
1988 
1989 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1990 {
1991 	/* Calculate the display Data return Bandwidth */
1992 	fixed20_12 return_efficiency; /* 0.8 */
1993 	fixed20_12 sclk, bandwidth;
1994 	fixed20_12 a;
1995 
1996 	a.full = dfixed_const(1000);
1997 	sclk.full = dfixed_const(wm->sclk);
1998 	sclk.full = dfixed_div(sclk, a);
1999 	a.full = dfixed_const(10);
2000 	return_efficiency.full = dfixed_const(8);
2001 	return_efficiency.full = dfixed_div(return_efficiency, a);
2002 	a.full = dfixed_const(32);
2003 	bandwidth.full = dfixed_mul(a, sclk);
2004 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2005 
2006 	return dfixed_trunc(bandwidth);
2007 }
2008 
2009 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2010 {
2011 	/* Calculate the DMIF Request Bandwidth */
2012 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2013 	fixed20_12 disp_clk, bandwidth;
2014 	fixed20_12 a;
2015 
2016 	a.full = dfixed_const(1000);
2017 	disp_clk.full = dfixed_const(wm->disp_clk);
2018 	disp_clk.full = dfixed_div(disp_clk, a);
2019 	a.full = dfixed_const(10);
2020 	disp_clk_request_efficiency.full = dfixed_const(8);
2021 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2022 	a.full = dfixed_const(32);
2023 	bandwidth.full = dfixed_mul(a, disp_clk);
2024 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2025 
2026 	return dfixed_trunc(bandwidth);
2027 }
2028 
2029 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2030 {
2031 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2032 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2033 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2034 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2035 
2036 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2037 }
2038 
2039 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2040 {
2041 	/* Calculate the display mode Average Bandwidth
2042 	 * DisplayMode should contain the source and destination dimensions,
2043 	 * timing, etc.
2044 	 */
2045 	fixed20_12 bpp;
2046 	fixed20_12 line_time;
2047 	fixed20_12 src_width;
2048 	fixed20_12 bandwidth;
2049 	fixed20_12 a;
2050 
2051 	a.full = dfixed_const(1000);
2052 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2053 	line_time.full = dfixed_div(line_time, a);
2054 	bpp.full = dfixed_const(wm->bytes_per_pixel);
2055 	src_width.full = dfixed_const(wm->src_width);
2056 	bandwidth.full = dfixed_mul(src_width, bpp);
2057 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2058 	bandwidth.full = dfixed_div(bandwidth, line_time);
2059 
2060 	return dfixed_trunc(bandwidth);
2061 }
2062 
2063 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2064 {
2065 	/* First calcualte the latency in ns */
2066 	u32 mc_latency = 2000; /* 2000 ns. */
2067 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2068 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2069 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2070 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2071 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2072 		(wm->num_heads * cursor_line_pair_return_time);
2073 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2074 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2075 	fixed20_12 a, b, c;
2076 
2077 	if (wm->num_heads == 0)
2078 		return 0;
2079 
2080 	a.full = dfixed_const(2);
2081 	b.full = dfixed_const(1);
2082 	if ((wm->vsc.full > a.full) ||
2083 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2084 	    (wm->vtaps >= 5) ||
2085 	    ((wm->vsc.full >= a.full) && wm->interlaced))
2086 		max_src_lines_per_dst_line = 4;
2087 	else
2088 		max_src_lines_per_dst_line = 2;
2089 
2090 	a.full = dfixed_const(available_bandwidth);
2091 	b.full = dfixed_const(wm->num_heads);
2092 	a.full = dfixed_div(a, b);
2093 
2094 	lb_fill_bw = min(dfixed_trunc(a), wm->disp_clk * wm->bytes_per_pixel / 1000);
2095 
2096 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2097 	b.full = dfixed_const(1000);
2098 	c.full = dfixed_const(lb_fill_bw);
2099 	b.full = dfixed_div(c, b);
2100 	a.full = dfixed_div(a, b);
2101 	line_fill_time = dfixed_trunc(a);
2102 
2103 	if (line_fill_time < wm->active_time)
2104 		return latency;
2105 	else
2106 		return latency + (line_fill_time - wm->active_time);
2107 
2108 }
2109 
2110 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2111 {
2112 	if (evergreen_average_bandwidth(wm) <=
2113 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2114 		return true;
2115 	else
2116 		return false;
2117 };
2118 
2119 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2120 {
2121 	if (evergreen_average_bandwidth(wm) <=
2122 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2123 		return true;
2124 	else
2125 		return false;
2126 };
2127 
2128 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2129 {
2130 	u32 lb_partitions = wm->lb_size / wm->src_width;
2131 	u32 line_time = wm->active_time + wm->blank_time;
2132 	u32 latency_tolerant_lines;
2133 	u32 latency_hiding;
2134 	fixed20_12 a;
2135 
2136 	a.full = dfixed_const(1);
2137 	if (wm->vsc.full > a.full)
2138 		latency_tolerant_lines = 1;
2139 	else {
2140 		if (lb_partitions <= (wm->vtaps + 1))
2141 			latency_tolerant_lines = 1;
2142 		else
2143 			latency_tolerant_lines = 2;
2144 	}
2145 
2146 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2147 
2148 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2149 		return true;
2150 	else
2151 		return false;
2152 }
2153 
2154 static void evergreen_program_watermarks(struct radeon_device *rdev,
2155 					 struct radeon_crtc *radeon_crtc,
2156 					 u32 lb_size, u32 num_heads)
2157 {
2158 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2159 	struct evergreen_wm_params wm_low, wm_high;
2160 	u32 dram_channels;
2161 	u32 active_time;
2162 	u32 line_time = 0;
2163 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2164 	u32 priority_a_mark = 0, priority_b_mark = 0;
2165 	u32 priority_a_cnt = PRIORITY_OFF;
2166 	u32 priority_b_cnt = PRIORITY_OFF;
2167 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2168 	u32 tmp, arb_control3;
2169 	fixed20_12 a, b, c;
2170 
2171 	if (radeon_crtc->base.enabled && num_heads && mode) {
2172 		active_time = (u32) div_u64((u64)mode->crtc_hdisplay * 1000000,
2173 					    (u32)mode->clock);
2174 		line_time = (u32) div_u64((u64)mode->crtc_htotal * 1000000,
2175 					  (u32)mode->clock);
2176 		line_time = min(line_time, (u32)65535);
2177 		priority_a_cnt = 0;
2178 		priority_b_cnt = 0;
2179 		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2180 
2181 		/* watermark for high clocks */
2182 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2183 			wm_high.yclk =
2184 				radeon_dpm_get_mclk(rdev, false) * 10;
2185 			wm_high.sclk =
2186 				radeon_dpm_get_sclk(rdev, false) * 10;
2187 		} else {
2188 			wm_high.yclk = rdev->pm.current_mclk * 10;
2189 			wm_high.sclk = rdev->pm.current_sclk * 10;
2190 		}
2191 
2192 		wm_high.disp_clk = mode->clock;
2193 		wm_high.src_width = mode->crtc_hdisplay;
2194 		wm_high.active_time = active_time;
2195 		wm_high.blank_time = line_time - wm_high.active_time;
2196 		wm_high.interlaced = false;
2197 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2198 			wm_high.interlaced = true;
2199 		wm_high.vsc = radeon_crtc->vsc;
2200 		wm_high.vtaps = 1;
2201 		if (radeon_crtc->rmx_type != RMX_OFF)
2202 			wm_high.vtaps = 2;
2203 		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2204 		wm_high.lb_size = lb_size;
2205 		wm_high.dram_channels = dram_channels;
2206 		wm_high.num_heads = num_heads;
2207 
2208 		/* watermark for low clocks */
2209 		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2210 			wm_low.yclk =
2211 				radeon_dpm_get_mclk(rdev, true) * 10;
2212 			wm_low.sclk =
2213 				radeon_dpm_get_sclk(rdev, true) * 10;
2214 		} else {
2215 			wm_low.yclk = rdev->pm.current_mclk * 10;
2216 			wm_low.sclk = rdev->pm.current_sclk * 10;
2217 		}
2218 
2219 		wm_low.disp_clk = mode->clock;
2220 		wm_low.src_width = mode->crtc_hdisplay;
2221 		wm_low.active_time = active_time;
2222 		wm_low.blank_time = line_time - wm_low.active_time;
2223 		wm_low.interlaced = false;
2224 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2225 			wm_low.interlaced = true;
2226 		wm_low.vsc = radeon_crtc->vsc;
2227 		wm_low.vtaps = 1;
2228 		if (radeon_crtc->rmx_type != RMX_OFF)
2229 			wm_low.vtaps = 2;
2230 		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2231 		wm_low.lb_size = lb_size;
2232 		wm_low.dram_channels = dram_channels;
2233 		wm_low.num_heads = num_heads;
2234 
2235 		/* set for high clocks */
2236 		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2237 		/* set for low clocks */
2238 		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2239 
2240 		/* possibly force display priority to high */
2241 		/* should really do this at mode validation time... */
2242 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2243 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2244 		    !evergreen_check_latency_hiding(&wm_high) ||
2245 		    (rdev->disp_priority == 2)) {
2246 			DRM_DEBUG_KMS("force priority a to high\n");
2247 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2248 		}
2249 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2250 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2251 		    !evergreen_check_latency_hiding(&wm_low) ||
2252 		    (rdev->disp_priority == 2)) {
2253 			DRM_DEBUG_KMS("force priority b to high\n");
2254 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2255 		}
2256 
2257 		a.full = dfixed_const(1000);
2258 		b.full = dfixed_const(mode->clock);
2259 		b.full = dfixed_div(b, a);
2260 		c.full = dfixed_const(latency_watermark_a);
2261 		c.full = dfixed_mul(c, b);
2262 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2263 		c.full = dfixed_div(c, a);
2264 		a.full = dfixed_const(16);
2265 		c.full = dfixed_div(c, a);
2266 		priority_a_mark = dfixed_trunc(c);
2267 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2268 
2269 		a.full = dfixed_const(1000);
2270 		b.full = dfixed_const(mode->clock);
2271 		b.full = dfixed_div(b, a);
2272 		c.full = dfixed_const(latency_watermark_b);
2273 		c.full = dfixed_mul(c, b);
2274 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2275 		c.full = dfixed_div(c, a);
2276 		a.full = dfixed_const(16);
2277 		c.full = dfixed_div(c, a);
2278 		priority_b_mark = dfixed_trunc(c);
2279 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2280 
2281 		/* Save number of lines the linebuffer leads before the scanout */
2282 		radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2283 	}
2284 
2285 	/* select wm A */
2286 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2287 	tmp = arb_control3;
2288 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2289 	tmp |= LATENCY_WATERMARK_MASK(1);
2290 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2291 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2292 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2293 		LATENCY_HIGH_WATERMARK(line_time)));
2294 	/* select wm B */
2295 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2296 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2297 	tmp |= LATENCY_WATERMARK_MASK(2);
2298 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2299 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2300 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2301 		LATENCY_HIGH_WATERMARK(line_time)));
2302 	/* restore original selection */
2303 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2304 
2305 	/* write the priority marks */
2306 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2307 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2308 
2309 	/* save values for DPM */
2310 	radeon_crtc->line_time = line_time;
2311 	radeon_crtc->wm_high = latency_watermark_a;
2312 	radeon_crtc->wm_low = latency_watermark_b;
2313 }
2314 
2315 /**
2316  * evergreen_bandwidth_update - update display watermarks callback.
2317  *
2318  * @rdev: radeon_device pointer
2319  *
2320  * Update the display watermarks based on the requested mode(s)
2321  * (evergreen+).
2322  */
2323 void evergreen_bandwidth_update(struct radeon_device *rdev)
2324 {
2325 	struct drm_display_mode *mode0 = NULL;
2326 	struct drm_display_mode *mode1 = NULL;
2327 	u32 num_heads = 0, lb_size;
2328 	int i;
2329 
2330 	if (!rdev->mode_info.mode_config_initialized)
2331 		return;
2332 
2333 	radeon_update_display_priority(rdev);
2334 
2335 	for (i = 0; i < rdev->num_crtc; i++) {
2336 		if (rdev->mode_info.crtcs[i]->base.enabled)
2337 			num_heads++;
2338 	}
2339 	for (i = 0; i < rdev->num_crtc; i += 2) {
2340 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2341 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2342 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2343 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2344 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2345 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2346 	}
2347 }
2348 
2349 /**
2350  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2351  *
2352  * @rdev: radeon_device pointer
2353  *
2354  * Wait for the MC (memory controller) to be idle.
2355  * (evergreen+).
2356  * Returns 0 if the MC is idle, -1 if not.
2357  */
2358 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2359 {
2360 	unsigned i;
2361 	u32 tmp;
2362 
2363 	for (i = 0; i < rdev->usec_timeout; i++) {
2364 		/* read MC_STATUS */
2365 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2366 		if (!tmp)
2367 			return 0;
2368 		udelay(1);
2369 	}
2370 	return -1;
2371 }
2372 
2373 /*
2374  * GART
2375  */
2376 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2377 {
2378 	unsigned i;
2379 	u32 tmp;
2380 
2381 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2382 
2383 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2384 	for (i = 0; i < rdev->usec_timeout; i++) {
2385 		/* read MC_STATUS */
2386 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2387 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2388 		if (tmp == 2) {
2389 			pr_warn("[drm] r600 flush TLB failed\n");
2390 			return;
2391 		}
2392 		if (tmp) {
2393 			return;
2394 		}
2395 		udelay(1);
2396 	}
2397 }
2398 
2399 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2400 {
2401 	u32 tmp;
2402 	int r;
2403 
2404 	if (rdev->gart.robj == NULL) {
2405 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2406 		return -EINVAL;
2407 	}
2408 	r = radeon_gart_table_vram_pin(rdev);
2409 	if (r)
2410 		return r;
2411 	/* Setup L2 cache */
2412 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2413 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2414 				EFFECTIVE_L2_QUEUE_SIZE(7));
2415 	WREG32(VM_L2_CNTL2, 0);
2416 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2417 	/* Setup TLB control */
2418 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2419 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2420 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2421 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2422 	if (rdev->flags & RADEON_IS_IGP) {
2423 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2424 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2425 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2426 	} else {
2427 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2428 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2429 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2430 		if ((rdev->family == CHIP_JUNIPER) ||
2431 		    (rdev->family == CHIP_CYPRESS) ||
2432 		    (rdev->family == CHIP_HEMLOCK) ||
2433 		    (rdev->family == CHIP_BARTS))
2434 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2435 	}
2436 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2437 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2438 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2439 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2440 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2441 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2442 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2443 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2444 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2445 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2446 			(u32)(rdev->dummy_page.addr >> 12));
2447 	WREG32(VM_CONTEXT1_CNTL, 0);
2448 
2449 	evergreen_pcie_gart_tlb_flush(rdev);
2450 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2451 		 (unsigned)(rdev->mc.gtt_size >> 20),
2452 		 (unsigned long long)rdev->gart.table_addr);
2453 	rdev->gart.ready = true;
2454 	return 0;
2455 }
2456 
2457 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2458 {
2459 	u32 tmp;
2460 
2461 	/* Disable all tables */
2462 	WREG32(VM_CONTEXT0_CNTL, 0);
2463 	WREG32(VM_CONTEXT1_CNTL, 0);
2464 
2465 	/* Setup L2 cache */
2466 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2467 				EFFECTIVE_L2_QUEUE_SIZE(7));
2468 	WREG32(VM_L2_CNTL2, 0);
2469 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2470 	/* Setup TLB control */
2471 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2472 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2473 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2474 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2475 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2476 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2477 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2478 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2479 	radeon_gart_table_vram_unpin(rdev);
2480 }
2481 
2482 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2483 {
2484 	evergreen_pcie_gart_disable(rdev);
2485 	radeon_gart_table_vram_free(rdev);
2486 	radeon_gart_fini(rdev);
2487 }
2488 
2489 
2490 static void evergreen_agp_enable(struct radeon_device *rdev)
2491 {
2492 	u32 tmp;
2493 
2494 	/* Setup L2 cache */
2495 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2496 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2497 				EFFECTIVE_L2_QUEUE_SIZE(7));
2498 	WREG32(VM_L2_CNTL2, 0);
2499 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2500 	/* Setup TLB control */
2501 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2502 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2503 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2504 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2505 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2506 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2507 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2508 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2509 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2510 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2511 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2512 	WREG32(VM_CONTEXT0_CNTL, 0);
2513 	WREG32(VM_CONTEXT1_CNTL, 0);
2514 }
2515 
2516 static const unsigned ni_dig_offsets[] =
2517 {
2518 	NI_DIG0_REGISTER_OFFSET,
2519 	NI_DIG1_REGISTER_OFFSET,
2520 	NI_DIG2_REGISTER_OFFSET,
2521 	NI_DIG3_REGISTER_OFFSET,
2522 	NI_DIG4_REGISTER_OFFSET,
2523 	NI_DIG5_REGISTER_OFFSET
2524 };
2525 
2526 static const unsigned ni_tx_offsets[] =
2527 {
2528 	NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2529 	NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2530 	NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2531 	NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2532 	NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2533 	NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2534 };
2535 
2536 static const unsigned evergreen_dp_offsets[] =
2537 {
2538 	EVERGREEN_DP0_REGISTER_OFFSET,
2539 	EVERGREEN_DP1_REGISTER_OFFSET,
2540 	EVERGREEN_DP2_REGISTER_OFFSET,
2541 	EVERGREEN_DP3_REGISTER_OFFSET,
2542 	EVERGREEN_DP4_REGISTER_OFFSET,
2543 	EVERGREEN_DP5_REGISTER_OFFSET
2544 };
2545 
2546 static const unsigned evergreen_disp_int_status[] =
2547 {
2548 	DISP_INTERRUPT_STATUS,
2549 	DISP_INTERRUPT_STATUS_CONTINUE,
2550 	DISP_INTERRUPT_STATUS_CONTINUE2,
2551 	DISP_INTERRUPT_STATUS_CONTINUE3,
2552 	DISP_INTERRUPT_STATUS_CONTINUE4,
2553 	DISP_INTERRUPT_STATUS_CONTINUE5
2554 };
2555 
2556 /*
2557  * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2558  * We go from crtc to connector and it is not relible  since it
2559  * should be an opposite direction .If crtc is enable then
2560  * find the dig_fe which selects this crtc and insure that it enable.
2561  * if such dig_fe is found then find dig_be which selects found dig_be and
2562  * insure that it enable and in DP_SST mode.
2563  * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2564  * from dp symbols clocks .
2565  */
2566 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2567 					       unsigned crtc_id, unsigned *ret_dig_fe)
2568 {
2569 	unsigned i;
2570 	unsigned dig_fe;
2571 	unsigned dig_be;
2572 	unsigned dig_en_be;
2573 	unsigned uniphy_pll;
2574 	unsigned digs_fe_selected;
2575 	unsigned dig_be_mode;
2576 	unsigned dig_fe_mask;
2577 	bool is_enabled = false;
2578 	bool found_crtc = false;
2579 
2580 	/* loop through all running dig_fe to find selected crtc */
2581 	for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2582 		dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2583 		if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2584 		    crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2585 			/* found running pipe */
2586 			found_crtc = true;
2587 			dig_fe_mask = 1 << i;
2588 			dig_fe = i;
2589 			break;
2590 		}
2591 	}
2592 
2593 	if (found_crtc) {
2594 		/* loop through all running dig_be to find selected dig_fe */
2595 		for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2596 			dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2597 			/* if dig_fe_selected by dig_be? */
2598 			digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2599 			dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2600 			if (dig_fe_mask &  digs_fe_selected &&
2601 			    /* if dig_be in sst mode? */
2602 			    dig_be_mode == NI_DIG_BE_DPSST) {
2603 				dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2604 						   ni_dig_offsets[i]);
2605 				uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2606 						    ni_tx_offsets[i]);
2607 				/* dig_be enable and tx is running */
2608 				if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2609 				    dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2610 				    uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2611 					is_enabled = true;
2612 					*ret_dig_fe = dig_fe;
2613 					break;
2614 				}
2615 			}
2616 		}
2617 	}
2618 
2619 	return is_enabled;
2620 }
2621 
2622 /*
2623  * Blank dig when in dp sst mode
2624  * Dig ignores crtc timing
2625  */
2626 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2627 				      unsigned dig_fe)
2628 {
2629 	unsigned stream_ctrl;
2630 	unsigned fifo_ctrl;
2631 	unsigned counter = 0;
2632 
2633 	if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2634 		DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2635 		return;
2636 	}
2637 
2638 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2639 			     evergreen_dp_offsets[dig_fe]);
2640 	if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2641 		DRM_ERROR("dig %d , should be enable\n", dig_fe);
2642 		return;
2643 	}
2644 
2645 	stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2646 	WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2647 	       evergreen_dp_offsets[dig_fe], stream_ctrl);
2648 
2649 	stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2650 			     evergreen_dp_offsets[dig_fe]);
2651 	while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2652 		msleep(1);
2653 		counter++;
2654 		stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2655 				     evergreen_dp_offsets[dig_fe]);
2656 	}
2657 	if (counter >= 32 )
2658 		DRM_ERROR("counter exceeds %d\n", counter);
2659 
2660 	fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2661 	fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2662 	WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2663 
2664 }
2665 
2666 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2667 {
2668 	u32 crtc_enabled, tmp, frame_count, blackout;
2669 	int i, j;
2670 	unsigned dig_fe;
2671 
2672 	if (!ASIC_IS_NODCE(rdev)) {
2673 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2674 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2675 
2676 		/* disable VGA render */
2677 		WREG32(VGA_RENDER_CONTROL, 0);
2678 	}
2679 	/* blank the display controllers */
2680 	for (i = 0; i < rdev->num_crtc; i++) {
2681 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2682 		if (crtc_enabled) {
2683 			save->crtc_enabled[i] = true;
2684 			if (ASIC_IS_DCE6(rdev)) {
2685 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2686 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2687 					radeon_wait_for_vblank(rdev, i);
2688 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2689 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2690 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2691 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2692 				}
2693 			} else {
2694 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2695 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2696 					radeon_wait_for_vblank(rdev, i);
2697 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2698 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2699 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2700 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2701 				}
2702 			}
2703 			/* wait for the next frame */
2704 			frame_count = radeon_get_vblank_counter(rdev, i);
2705 			for (j = 0; j < rdev->usec_timeout; j++) {
2706 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2707 					break;
2708 				udelay(1);
2709 			}
2710 			/*we should disable dig if it drives dp sst*/
2711 			/*but we are in radeon_device_init and the topology is unknown*/
2712 			/*and it is available after radeon_modeset_init*/
2713 			/*the following method radeon_atom_encoder_dpms_dig*/
2714 			/*does the job if we initialize it properly*/
2715 			/*for now we do it this manually*/
2716 			/**/
2717 			if (ASIC_IS_DCE5(rdev) &&
2718 			    evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2719 				evergreen_blank_dp_output(rdev, dig_fe);
2720 			/*we could remove 6 lines below*/
2721 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2722 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2723 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2724 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2725 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2726 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2727 			save->crtc_enabled[i] = false;
2728 			/* ***** */
2729 		} else {
2730 			save->crtc_enabled[i] = false;
2731 		}
2732 	}
2733 
2734 	radeon_mc_wait_for_idle(rdev);
2735 
2736 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2737 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2738 		/* Block CPU access */
2739 		WREG32(BIF_FB_EN, 0);
2740 		/* blackout the MC */
2741 		blackout &= ~BLACKOUT_MODE_MASK;
2742 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2743 	}
2744 	/* wait for the MC to settle */
2745 	udelay(100);
2746 
2747 	/* lock double buffered regs */
2748 	for (i = 0; i < rdev->num_crtc; i++) {
2749 		if (save->crtc_enabled[i]) {
2750 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2751 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2752 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2753 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2754 			}
2755 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2756 			if (!(tmp & 1)) {
2757 				tmp |= 1;
2758 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2759 			}
2760 		}
2761 	}
2762 }
2763 
2764 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2765 {
2766 	u32 tmp, frame_count;
2767 	int i, j;
2768 
2769 	/* update crtc base addresses */
2770 	for (i = 0; i < rdev->num_crtc; i++) {
2771 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2772 		       upper_32_bits(rdev->mc.vram_start));
2773 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2774 		       upper_32_bits(rdev->mc.vram_start));
2775 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2776 		       (u32)rdev->mc.vram_start);
2777 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2778 		       (u32)rdev->mc.vram_start);
2779 	}
2780 
2781 	if (!ASIC_IS_NODCE(rdev)) {
2782 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2783 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2784 	}
2785 
2786 	/* unlock regs and wait for update */
2787 	for (i = 0; i < rdev->num_crtc; i++) {
2788 		if (save->crtc_enabled[i]) {
2789 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2790 			if ((tmp & 0x7) != 0) {
2791 				tmp &= ~0x7;
2792 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2793 			}
2794 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2795 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2796 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2797 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2798 			}
2799 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2800 			if (tmp & 1) {
2801 				tmp &= ~1;
2802 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2803 			}
2804 			for (j = 0; j < rdev->usec_timeout; j++) {
2805 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2806 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2807 					break;
2808 				udelay(1);
2809 			}
2810 		}
2811 	}
2812 
2813 	/* unblackout the MC */
2814 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2815 	tmp &= ~BLACKOUT_MODE_MASK;
2816 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2817 	/* allow CPU access */
2818 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2819 
2820 	for (i = 0; i < rdev->num_crtc; i++) {
2821 		if (save->crtc_enabled[i]) {
2822 			if (ASIC_IS_DCE6(rdev)) {
2823 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2824 				tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2825 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2826 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2827 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2828 			} else {
2829 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2830 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2831 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2832 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2833 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2834 			}
2835 			/* wait for the next frame */
2836 			frame_count = radeon_get_vblank_counter(rdev, i);
2837 			for (j = 0; j < rdev->usec_timeout; j++) {
2838 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2839 					break;
2840 				udelay(1);
2841 			}
2842 		}
2843 	}
2844 	if (!ASIC_IS_NODCE(rdev)) {
2845 		/* Unlock vga access */
2846 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2847 		mdelay(1);
2848 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2849 	}
2850 }
2851 
2852 void evergreen_mc_program(struct radeon_device *rdev)
2853 {
2854 	struct evergreen_mc_save save;
2855 	u32 tmp;
2856 	int i, j;
2857 
2858 	/* Initialize HDP */
2859 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2860 		WREG32((0x2c14 + j), 0x00000000);
2861 		WREG32((0x2c18 + j), 0x00000000);
2862 		WREG32((0x2c1c + j), 0x00000000);
2863 		WREG32((0x2c20 + j), 0x00000000);
2864 		WREG32((0x2c24 + j), 0x00000000);
2865 	}
2866 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2867 
2868 	evergreen_mc_stop(rdev, &save);
2869 	if (evergreen_mc_wait_for_idle(rdev)) {
2870 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2871 	}
2872 	/* Lockout access through VGA aperture*/
2873 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2874 	/* Update configuration */
2875 	if (rdev->flags & RADEON_IS_AGP) {
2876 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2877 			/* VRAM before AGP */
2878 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2879 				rdev->mc.vram_start >> 12);
2880 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2881 				rdev->mc.gtt_end >> 12);
2882 		} else {
2883 			/* VRAM after AGP */
2884 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2885 				rdev->mc.gtt_start >> 12);
2886 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2887 				rdev->mc.vram_end >> 12);
2888 		}
2889 	} else {
2890 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2891 			rdev->mc.vram_start >> 12);
2892 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2893 			rdev->mc.vram_end >> 12);
2894 	}
2895 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2896 	/* llano/ontario only */
2897 	if ((rdev->family == CHIP_PALM) ||
2898 	    (rdev->family == CHIP_SUMO) ||
2899 	    (rdev->family == CHIP_SUMO2)) {
2900 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2901 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2902 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2903 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2904 	}
2905 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2906 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2907 	WREG32(MC_VM_FB_LOCATION, tmp);
2908 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2909 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2910 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2911 	if (rdev->flags & RADEON_IS_AGP) {
2912 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2913 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2914 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2915 	} else {
2916 		WREG32(MC_VM_AGP_BASE, 0);
2917 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2918 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2919 	}
2920 	if (evergreen_mc_wait_for_idle(rdev)) {
2921 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2922 	}
2923 	evergreen_mc_resume(rdev, &save);
2924 	/* we need to own VRAM, so turn off the VGA renderer here
2925 	 * to stop it overwriting our objects */
2926 	rv515_vga_render_disable(rdev);
2927 }
2928 
2929 /*
2930  * CP.
2931  */
2932 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2933 {
2934 	struct radeon_ring *ring = &rdev->ring[ib->ring];
2935 	u32 next_rptr;
2936 
2937 	/* set to DX10/11 mode */
2938 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2939 	radeon_ring_write(ring, 1);
2940 
2941 	if (ring->rptr_save_reg) {
2942 		next_rptr = ring->wptr + 3 + 4;
2943 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2944 		radeon_ring_write(ring, ((ring->rptr_save_reg -
2945 					  PACKET3_SET_CONFIG_REG_START) >> 2));
2946 		radeon_ring_write(ring, next_rptr);
2947 	} else if (rdev->wb.enabled) {
2948 		next_rptr = ring->wptr + 5 + 4;
2949 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2950 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2951 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2952 		radeon_ring_write(ring, next_rptr);
2953 		radeon_ring_write(ring, 0);
2954 	}
2955 
2956 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2957 	radeon_ring_write(ring,
2958 #ifdef __BIG_ENDIAN
2959 			  (2 << 0) |
2960 #endif
2961 			  (ib->gpu_addr & 0xFFFFFFFC));
2962 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2963 	radeon_ring_write(ring, ib->length_dw);
2964 }
2965 
2966 
2967 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2968 {
2969 	const __be32 *fw_data;
2970 	int i;
2971 
2972 	if (!rdev->me_fw || !rdev->pfp_fw)
2973 		return -EINVAL;
2974 
2975 	r700_cp_stop(rdev);
2976 	WREG32(CP_RB_CNTL,
2977 #ifdef __BIG_ENDIAN
2978 	       BUF_SWAP_32BIT |
2979 #endif
2980 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2981 
2982 	fw_data = (const __be32 *)rdev->pfp_fw->data;
2983 	WREG32(CP_PFP_UCODE_ADDR, 0);
2984 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2985 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2986 	WREG32(CP_PFP_UCODE_ADDR, 0);
2987 
2988 	fw_data = (const __be32 *)rdev->me_fw->data;
2989 	WREG32(CP_ME_RAM_WADDR, 0);
2990 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2991 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2992 
2993 	WREG32(CP_PFP_UCODE_ADDR, 0);
2994 	WREG32(CP_ME_RAM_WADDR, 0);
2995 	WREG32(CP_ME_RAM_RADDR, 0);
2996 	return 0;
2997 }
2998 
2999 static int evergreen_cp_start(struct radeon_device *rdev)
3000 {
3001 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3002 	int r, i;
3003 	uint32_t cp_me;
3004 
3005 	r = radeon_ring_lock(rdev, ring, 7);
3006 	if (r) {
3007 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3008 		return r;
3009 	}
3010 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3011 	radeon_ring_write(ring, 0x1);
3012 	radeon_ring_write(ring, 0x0);
3013 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3014 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3015 	radeon_ring_write(ring, 0);
3016 	radeon_ring_write(ring, 0);
3017 	radeon_ring_unlock_commit(rdev, ring, false);
3018 
3019 	cp_me = 0xff;
3020 	WREG32(CP_ME_CNTL, cp_me);
3021 
3022 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3023 	if (r) {
3024 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3025 		return r;
3026 	}
3027 
3028 	/* setup clear context state */
3029 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3030 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3031 
3032 	for (i = 0; i < evergreen_default_size; i++)
3033 		radeon_ring_write(ring, evergreen_default_state[i]);
3034 
3035 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3036 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3037 
3038 	/* set clear context state */
3039 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3040 	radeon_ring_write(ring, 0);
3041 
3042 	/* SQ_VTX_BASE_VTX_LOC */
3043 	radeon_ring_write(ring, 0xc0026f00);
3044 	radeon_ring_write(ring, 0x00000000);
3045 	radeon_ring_write(ring, 0x00000000);
3046 	radeon_ring_write(ring, 0x00000000);
3047 
3048 	/* Clear consts */
3049 	radeon_ring_write(ring, 0xc0036f00);
3050 	radeon_ring_write(ring, 0x00000bc4);
3051 	radeon_ring_write(ring, 0xffffffff);
3052 	radeon_ring_write(ring, 0xffffffff);
3053 	radeon_ring_write(ring, 0xffffffff);
3054 
3055 	radeon_ring_write(ring, 0xc0026900);
3056 	radeon_ring_write(ring, 0x00000316);
3057 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3058 	radeon_ring_write(ring, 0x00000010); /*  */
3059 
3060 	radeon_ring_unlock_commit(rdev, ring, false);
3061 
3062 	return 0;
3063 }
3064 
3065 static int evergreen_cp_resume(struct radeon_device *rdev)
3066 {
3067 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3068 	u32 tmp;
3069 	u32 rb_bufsz;
3070 	int r;
3071 
3072 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3073 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3074 				 SOFT_RESET_PA |
3075 				 SOFT_RESET_SH |
3076 				 SOFT_RESET_VGT |
3077 				 SOFT_RESET_SPI |
3078 				 SOFT_RESET_SX));
3079 	RREG32(GRBM_SOFT_RESET);
3080 	mdelay(15);
3081 	WREG32(GRBM_SOFT_RESET, 0);
3082 	RREG32(GRBM_SOFT_RESET);
3083 
3084 	/* Set ring buffer size */
3085 	rb_bufsz = order_base_2(ring->ring_size / 8);
3086 	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3087 #ifdef __BIG_ENDIAN
3088 	tmp |= BUF_SWAP_32BIT;
3089 #endif
3090 	WREG32(CP_RB_CNTL, tmp);
3091 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
3092 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3093 
3094 	/* Set the write pointer delay */
3095 	WREG32(CP_RB_WPTR_DELAY, 0);
3096 
3097 	/* Initialize the ring buffer's read and write pointers */
3098 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3099 	WREG32(CP_RB_RPTR_WR, 0);
3100 	ring->wptr = 0;
3101 	WREG32(CP_RB_WPTR, ring->wptr);
3102 
3103 	/* set the wb address whether it's enabled or not */
3104 	WREG32(CP_RB_RPTR_ADDR,
3105 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3106 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3107 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3108 
3109 	if (rdev->wb.enabled)
3110 		WREG32(SCRATCH_UMSK, 0xff);
3111 	else {
3112 		tmp |= RB_NO_UPDATE;
3113 		WREG32(SCRATCH_UMSK, 0);
3114 	}
3115 
3116 	mdelay(1);
3117 	WREG32(CP_RB_CNTL, tmp);
3118 
3119 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3120 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3121 
3122 	evergreen_cp_start(rdev);
3123 	ring->ready = true;
3124 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3125 	if (r) {
3126 		ring->ready = false;
3127 		return r;
3128 	}
3129 	return 0;
3130 }
3131 
3132 /*
3133  * Core functions
3134  */
3135 static void evergreen_gpu_init(struct radeon_device *rdev)
3136 {
3137 	u32 gb_addr_config;
3138 	u32 mc_arb_ramcfg;
3139 	u32 sx_debug_1;
3140 	u32 smx_dc_ctl0;
3141 	u32 sq_config;
3142 	u32 sq_lds_resource_mgmt;
3143 	u32 sq_gpr_resource_mgmt_1;
3144 	u32 sq_gpr_resource_mgmt_2;
3145 	u32 sq_gpr_resource_mgmt_3;
3146 	u32 sq_thread_resource_mgmt;
3147 	u32 sq_thread_resource_mgmt_2;
3148 	u32 sq_stack_resource_mgmt_1;
3149 	u32 sq_stack_resource_mgmt_2;
3150 	u32 sq_stack_resource_mgmt_3;
3151 	u32 vgt_cache_invalidation;
3152 	u32 hdp_host_path_cntl, tmp;
3153 	u32 disabled_rb_mask;
3154 	int i, j, ps_thread_count;
3155 
3156 	switch (rdev->family) {
3157 	case CHIP_CYPRESS:
3158 	case CHIP_HEMLOCK:
3159 		rdev->config.evergreen.num_ses = 2;
3160 		rdev->config.evergreen.max_pipes = 4;
3161 		rdev->config.evergreen.max_tile_pipes = 8;
3162 		rdev->config.evergreen.max_simds = 10;
3163 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3164 		rdev->config.evergreen.max_gprs = 256;
3165 		rdev->config.evergreen.max_threads = 248;
3166 		rdev->config.evergreen.max_gs_threads = 32;
3167 		rdev->config.evergreen.max_stack_entries = 512;
3168 		rdev->config.evergreen.sx_num_of_sets = 4;
3169 		rdev->config.evergreen.sx_max_export_size = 256;
3170 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3171 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3172 		rdev->config.evergreen.max_hw_contexts = 8;
3173 		rdev->config.evergreen.sq_num_cf_insts = 2;
3174 
3175 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3176 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3177 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3178 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3179 		break;
3180 	case CHIP_JUNIPER:
3181 		rdev->config.evergreen.num_ses = 1;
3182 		rdev->config.evergreen.max_pipes = 4;
3183 		rdev->config.evergreen.max_tile_pipes = 4;
3184 		rdev->config.evergreen.max_simds = 10;
3185 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3186 		rdev->config.evergreen.max_gprs = 256;
3187 		rdev->config.evergreen.max_threads = 248;
3188 		rdev->config.evergreen.max_gs_threads = 32;
3189 		rdev->config.evergreen.max_stack_entries = 512;
3190 		rdev->config.evergreen.sx_num_of_sets = 4;
3191 		rdev->config.evergreen.sx_max_export_size = 256;
3192 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3193 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3194 		rdev->config.evergreen.max_hw_contexts = 8;
3195 		rdev->config.evergreen.sq_num_cf_insts = 2;
3196 
3197 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3198 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3199 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3200 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3201 		break;
3202 	case CHIP_REDWOOD:
3203 		rdev->config.evergreen.num_ses = 1;
3204 		rdev->config.evergreen.max_pipes = 4;
3205 		rdev->config.evergreen.max_tile_pipes = 4;
3206 		rdev->config.evergreen.max_simds = 5;
3207 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3208 		rdev->config.evergreen.max_gprs = 256;
3209 		rdev->config.evergreen.max_threads = 248;
3210 		rdev->config.evergreen.max_gs_threads = 32;
3211 		rdev->config.evergreen.max_stack_entries = 256;
3212 		rdev->config.evergreen.sx_num_of_sets = 4;
3213 		rdev->config.evergreen.sx_max_export_size = 256;
3214 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3215 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3216 		rdev->config.evergreen.max_hw_contexts = 8;
3217 		rdev->config.evergreen.sq_num_cf_insts = 2;
3218 
3219 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3220 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3221 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3222 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3223 		break;
3224 	case CHIP_CEDAR:
3225 	default:
3226 		rdev->config.evergreen.num_ses = 1;
3227 		rdev->config.evergreen.max_pipes = 2;
3228 		rdev->config.evergreen.max_tile_pipes = 2;
3229 		rdev->config.evergreen.max_simds = 2;
3230 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3231 		rdev->config.evergreen.max_gprs = 256;
3232 		rdev->config.evergreen.max_threads = 192;
3233 		rdev->config.evergreen.max_gs_threads = 16;
3234 		rdev->config.evergreen.max_stack_entries = 256;
3235 		rdev->config.evergreen.sx_num_of_sets = 4;
3236 		rdev->config.evergreen.sx_max_export_size = 128;
3237 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3238 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3239 		rdev->config.evergreen.max_hw_contexts = 4;
3240 		rdev->config.evergreen.sq_num_cf_insts = 1;
3241 
3242 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3243 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3244 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3245 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3246 		break;
3247 	case CHIP_PALM:
3248 		rdev->config.evergreen.num_ses = 1;
3249 		rdev->config.evergreen.max_pipes = 2;
3250 		rdev->config.evergreen.max_tile_pipes = 2;
3251 		rdev->config.evergreen.max_simds = 2;
3252 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3253 		rdev->config.evergreen.max_gprs = 256;
3254 		rdev->config.evergreen.max_threads = 192;
3255 		rdev->config.evergreen.max_gs_threads = 16;
3256 		rdev->config.evergreen.max_stack_entries = 256;
3257 		rdev->config.evergreen.sx_num_of_sets = 4;
3258 		rdev->config.evergreen.sx_max_export_size = 128;
3259 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3260 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3261 		rdev->config.evergreen.max_hw_contexts = 4;
3262 		rdev->config.evergreen.sq_num_cf_insts = 1;
3263 
3264 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3265 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3266 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3267 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3268 		break;
3269 	case CHIP_SUMO:
3270 		rdev->config.evergreen.num_ses = 1;
3271 		rdev->config.evergreen.max_pipes = 4;
3272 		rdev->config.evergreen.max_tile_pipes = 4;
3273 		if (rdev->pdev->device == 0x9648)
3274 			rdev->config.evergreen.max_simds = 3;
3275 		else if ((rdev->pdev->device == 0x9647) ||
3276 			 (rdev->pdev->device == 0x964a))
3277 			rdev->config.evergreen.max_simds = 4;
3278 		else
3279 			rdev->config.evergreen.max_simds = 5;
3280 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3281 		rdev->config.evergreen.max_gprs = 256;
3282 		rdev->config.evergreen.max_threads = 248;
3283 		rdev->config.evergreen.max_gs_threads = 32;
3284 		rdev->config.evergreen.max_stack_entries = 256;
3285 		rdev->config.evergreen.sx_num_of_sets = 4;
3286 		rdev->config.evergreen.sx_max_export_size = 256;
3287 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3288 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3289 		rdev->config.evergreen.max_hw_contexts = 8;
3290 		rdev->config.evergreen.sq_num_cf_insts = 2;
3291 
3292 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3293 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3294 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3295 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3296 		break;
3297 	case CHIP_SUMO2:
3298 		rdev->config.evergreen.num_ses = 1;
3299 		rdev->config.evergreen.max_pipes = 4;
3300 		rdev->config.evergreen.max_tile_pipes = 4;
3301 		rdev->config.evergreen.max_simds = 2;
3302 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3303 		rdev->config.evergreen.max_gprs = 256;
3304 		rdev->config.evergreen.max_threads = 248;
3305 		rdev->config.evergreen.max_gs_threads = 32;
3306 		rdev->config.evergreen.max_stack_entries = 512;
3307 		rdev->config.evergreen.sx_num_of_sets = 4;
3308 		rdev->config.evergreen.sx_max_export_size = 256;
3309 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3310 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3311 		rdev->config.evergreen.max_hw_contexts = 4;
3312 		rdev->config.evergreen.sq_num_cf_insts = 2;
3313 
3314 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3315 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3316 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3317 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3318 		break;
3319 	case CHIP_BARTS:
3320 		rdev->config.evergreen.num_ses = 2;
3321 		rdev->config.evergreen.max_pipes = 4;
3322 		rdev->config.evergreen.max_tile_pipes = 8;
3323 		rdev->config.evergreen.max_simds = 7;
3324 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3325 		rdev->config.evergreen.max_gprs = 256;
3326 		rdev->config.evergreen.max_threads = 248;
3327 		rdev->config.evergreen.max_gs_threads = 32;
3328 		rdev->config.evergreen.max_stack_entries = 512;
3329 		rdev->config.evergreen.sx_num_of_sets = 4;
3330 		rdev->config.evergreen.sx_max_export_size = 256;
3331 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3332 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3333 		rdev->config.evergreen.max_hw_contexts = 8;
3334 		rdev->config.evergreen.sq_num_cf_insts = 2;
3335 
3336 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3337 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3338 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3339 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3340 		break;
3341 	case CHIP_TURKS:
3342 		rdev->config.evergreen.num_ses = 1;
3343 		rdev->config.evergreen.max_pipes = 4;
3344 		rdev->config.evergreen.max_tile_pipes = 4;
3345 		rdev->config.evergreen.max_simds = 6;
3346 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3347 		rdev->config.evergreen.max_gprs = 256;
3348 		rdev->config.evergreen.max_threads = 248;
3349 		rdev->config.evergreen.max_gs_threads = 32;
3350 		rdev->config.evergreen.max_stack_entries = 256;
3351 		rdev->config.evergreen.sx_num_of_sets = 4;
3352 		rdev->config.evergreen.sx_max_export_size = 256;
3353 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3354 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3355 		rdev->config.evergreen.max_hw_contexts = 8;
3356 		rdev->config.evergreen.sq_num_cf_insts = 2;
3357 
3358 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3359 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3360 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3361 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3362 		break;
3363 	case CHIP_CAICOS:
3364 		rdev->config.evergreen.num_ses = 1;
3365 		rdev->config.evergreen.max_pipes = 2;
3366 		rdev->config.evergreen.max_tile_pipes = 2;
3367 		rdev->config.evergreen.max_simds = 2;
3368 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3369 		rdev->config.evergreen.max_gprs = 256;
3370 		rdev->config.evergreen.max_threads = 192;
3371 		rdev->config.evergreen.max_gs_threads = 16;
3372 		rdev->config.evergreen.max_stack_entries = 256;
3373 		rdev->config.evergreen.sx_num_of_sets = 4;
3374 		rdev->config.evergreen.sx_max_export_size = 128;
3375 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3376 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3377 		rdev->config.evergreen.max_hw_contexts = 4;
3378 		rdev->config.evergreen.sq_num_cf_insts = 1;
3379 
3380 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3381 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3382 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3383 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3384 		break;
3385 	}
3386 
3387 	/* Initialize HDP */
3388 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3389 		WREG32((0x2c14 + j), 0x00000000);
3390 		WREG32((0x2c18 + j), 0x00000000);
3391 		WREG32((0x2c1c + j), 0x00000000);
3392 		WREG32((0x2c20 + j), 0x00000000);
3393 		WREG32((0x2c24 + j), 0x00000000);
3394 	}
3395 
3396 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3397 	WREG32(SRBM_INT_CNTL, 0x1);
3398 	WREG32(SRBM_INT_ACK, 0x1);
3399 
3400 	evergreen_fix_pci_max_read_req_size(rdev);
3401 
3402 	RREG32(MC_SHARED_CHMAP);
3403 	if ((rdev->family == CHIP_PALM) ||
3404 	    (rdev->family == CHIP_SUMO) ||
3405 	    (rdev->family == CHIP_SUMO2))
3406 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3407 	else
3408 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3409 
3410 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3411 	 * not have bank info, so create a custom tiling dword.
3412 	 * bits 3:0   num_pipes
3413 	 * bits 7:4   num_banks
3414 	 * bits 11:8  group_size
3415 	 * bits 15:12 row_size
3416 	 */
3417 	rdev->config.evergreen.tile_config = 0;
3418 	switch (rdev->config.evergreen.max_tile_pipes) {
3419 	case 1:
3420 	default:
3421 		rdev->config.evergreen.tile_config |= (0 << 0);
3422 		break;
3423 	case 2:
3424 		rdev->config.evergreen.tile_config |= (1 << 0);
3425 		break;
3426 	case 4:
3427 		rdev->config.evergreen.tile_config |= (2 << 0);
3428 		break;
3429 	case 8:
3430 		rdev->config.evergreen.tile_config |= (3 << 0);
3431 		break;
3432 	}
3433 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3434 	if (rdev->flags & RADEON_IS_IGP)
3435 		rdev->config.evergreen.tile_config |= 1 << 4;
3436 	else {
3437 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3438 		case 0: /* four banks */
3439 			rdev->config.evergreen.tile_config |= 0 << 4;
3440 			break;
3441 		case 1: /* eight banks */
3442 			rdev->config.evergreen.tile_config |= 1 << 4;
3443 			break;
3444 		case 2: /* sixteen banks */
3445 		default:
3446 			rdev->config.evergreen.tile_config |= 2 << 4;
3447 			break;
3448 		}
3449 	}
3450 	rdev->config.evergreen.tile_config |= 0 << 8;
3451 	rdev->config.evergreen.tile_config |=
3452 		((gb_addr_config & 0x30000000) >> 28) << 12;
3453 
3454 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3455 		u32 efuse_straps_4;
3456 		u32 efuse_straps_3;
3457 
3458 		efuse_straps_4 = RREG32_RCU(0x204);
3459 		efuse_straps_3 = RREG32_RCU(0x203);
3460 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3461 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3462 	} else {
3463 		tmp = 0;
3464 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3465 			u32 rb_disable_bitmap;
3466 
3467 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3468 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3469 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3470 			tmp <<= 4;
3471 			tmp |= rb_disable_bitmap;
3472 		}
3473 	}
3474 	/* enabled rb are just the one not disabled :) */
3475 	disabled_rb_mask = tmp;
3476 	tmp = 0;
3477 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3478 		tmp |= (1 << i);
3479 	/* if all the backends are disabled, fix it up here */
3480 	if ((disabled_rb_mask & tmp) == tmp) {
3481 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3482 			disabled_rb_mask &= ~(1 << i);
3483 	}
3484 
3485 	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3486 		u32 simd_disable_bitmap;
3487 
3488 		WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3489 		WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3490 		simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3491 		simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3492 		tmp <<= 16;
3493 		tmp |= simd_disable_bitmap;
3494 	}
3495 	rdev->config.evergreen.active_simds = hweight32(~tmp);
3496 
3497 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3498 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3499 
3500 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3501 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3502 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3503 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3504 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3505 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3506 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3507 
3508 	if ((rdev->config.evergreen.max_backends == 1) &&
3509 	    (rdev->flags & RADEON_IS_IGP)) {
3510 		if ((disabled_rb_mask & 3) == 1) {
3511 			/* RB0 disabled, RB1 enabled */
3512 			tmp = 0x11111111;
3513 		} else {
3514 			/* RB1 disabled, RB0 enabled */
3515 			tmp = 0x00000000;
3516 		}
3517 	} else {
3518 		tmp = gb_addr_config & NUM_PIPES_MASK;
3519 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3520 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3521 	}
3522 	rdev->config.evergreen.backend_map = tmp;
3523 	WREG32(GB_BACKEND_MAP, tmp);
3524 
3525 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3526 	WREG32(CGTS_TCC_DISABLE, 0);
3527 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3528 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3529 
3530 	/* set HW defaults for 3D engine */
3531 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3532 				     ROQ_IB2_START(0x2b)));
3533 
3534 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3535 
3536 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3537 			     SYNC_GRADIENT |
3538 			     SYNC_WALKER |
3539 			     SYNC_ALIGNER));
3540 
3541 	sx_debug_1 = RREG32(SX_DEBUG_1);
3542 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3543 	WREG32(SX_DEBUG_1, sx_debug_1);
3544 
3545 
3546 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3547 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3548 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3549 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3550 
3551 	if (rdev->family <= CHIP_SUMO2)
3552 		WREG32(SMX_SAR_CTL0, 0x00010000);
3553 
3554 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3555 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3556 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3557 
3558 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3559 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3560 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3561 
3562 	WREG32(VGT_NUM_INSTANCES, 1);
3563 	WREG32(SPI_CONFIG_CNTL, 0);
3564 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3565 	WREG32(CP_PERFMON_CNTL, 0);
3566 
3567 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3568 				  FETCH_FIFO_HIWATER(0x4) |
3569 				  DONE_FIFO_HIWATER(0xe0) |
3570 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3571 
3572 	sq_config = RREG32(SQ_CONFIG);
3573 	sq_config &= ~(PS_PRIO(3) |
3574 		       VS_PRIO(3) |
3575 		       GS_PRIO(3) |
3576 		       ES_PRIO(3));
3577 	sq_config |= (VC_ENABLE |
3578 		      EXPORT_SRC_C |
3579 		      PS_PRIO(0) |
3580 		      VS_PRIO(1) |
3581 		      GS_PRIO(2) |
3582 		      ES_PRIO(3));
3583 
3584 	switch (rdev->family) {
3585 	case CHIP_CEDAR:
3586 	case CHIP_PALM:
3587 	case CHIP_SUMO:
3588 	case CHIP_SUMO2:
3589 	case CHIP_CAICOS:
3590 		/* no vertex cache */
3591 		sq_config &= ~VC_ENABLE;
3592 		break;
3593 	default:
3594 		break;
3595 	}
3596 
3597 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3598 
3599 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3600 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3601 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3602 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3603 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3604 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3605 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3606 
3607 	switch (rdev->family) {
3608 	case CHIP_CEDAR:
3609 	case CHIP_PALM:
3610 	case CHIP_SUMO:
3611 	case CHIP_SUMO2:
3612 		ps_thread_count = 96;
3613 		break;
3614 	default:
3615 		ps_thread_count = 128;
3616 		break;
3617 	}
3618 
3619 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3620 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3621 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3622 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3623 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3624 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3625 
3626 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3627 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3628 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3629 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3630 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3631 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3632 
3633 	WREG32(SQ_CONFIG, sq_config);
3634 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3635 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3636 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3637 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3638 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3639 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3640 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3641 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3642 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3643 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3644 
3645 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3646 					  FORCE_EOV_MAX_REZ_CNT(255)));
3647 
3648 	switch (rdev->family) {
3649 	case CHIP_CEDAR:
3650 	case CHIP_PALM:
3651 	case CHIP_SUMO:
3652 	case CHIP_SUMO2:
3653 	case CHIP_CAICOS:
3654 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3655 		break;
3656 	default:
3657 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3658 		break;
3659 	}
3660 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3661 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3662 
3663 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3664 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3665 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3666 
3667 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3668 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3669 
3670 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3671 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3672 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3673 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3674 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3675 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3676 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3677 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3678 
3679 	/* clear render buffer base addresses */
3680 	WREG32(CB_COLOR0_BASE, 0);
3681 	WREG32(CB_COLOR1_BASE, 0);
3682 	WREG32(CB_COLOR2_BASE, 0);
3683 	WREG32(CB_COLOR3_BASE, 0);
3684 	WREG32(CB_COLOR4_BASE, 0);
3685 	WREG32(CB_COLOR5_BASE, 0);
3686 	WREG32(CB_COLOR6_BASE, 0);
3687 	WREG32(CB_COLOR7_BASE, 0);
3688 	WREG32(CB_COLOR8_BASE, 0);
3689 	WREG32(CB_COLOR9_BASE, 0);
3690 	WREG32(CB_COLOR10_BASE, 0);
3691 	WREG32(CB_COLOR11_BASE, 0);
3692 
3693 	/* set the shader const cache sizes to 0 */
3694 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3695 		WREG32(i, 0);
3696 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3697 		WREG32(i, 0);
3698 
3699 	tmp = RREG32(HDP_MISC_CNTL);
3700 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3701 	WREG32(HDP_MISC_CNTL, tmp);
3702 
3703 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3704 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3705 
3706 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3707 
3708 	udelay(50);
3709 
3710 }
3711 
3712 int evergreen_mc_init(struct radeon_device *rdev)
3713 {
3714 	u32 tmp;
3715 	int chansize, numchan;
3716 
3717 	/* Get VRAM informations */
3718 	rdev->mc.vram_is_ddr = true;
3719 	if ((rdev->family == CHIP_PALM) ||
3720 	    (rdev->family == CHIP_SUMO) ||
3721 	    (rdev->family == CHIP_SUMO2))
3722 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3723 	else
3724 		tmp = RREG32(MC_ARB_RAMCFG);
3725 	if (tmp & CHANSIZE_OVERRIDE) {
3726 		chansize = 16;
3727 	} else if (tmp & CHANSIZE_MASK) {
3728 		chansize = 64;
3729 	} else {
3730 		chansize = 32;
3731 	}
3732 	tmp = RREG32(MC_SHARED_CHMAP);
3733 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3734 	case 0:
3735 	default:
3736 		numchan = 1;
3737 		break;
3738 	case 1:
3739 		numchan = 2;
3740 		break;
3741 	case 2:
3742 		numchan = 4;
3743 		break;
3744 	case 3:
3745 		numchan = 8;
3746 		break;
3747 	}
3748 	rdev->mc.vram_width = numchan * chansize;
3749 	/* Could aper size report 0 ? */
3750 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3751 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3752 	/* Setup GPU memory space */
3753 	if ((rdev->family == CHIP_PALM) ||
3754 	    (rdev->family == CHIP_SUMO) ||
3755 	    (rdev->family == CHIP_SUMO2)) {
3756 		/* size in bytes on fusion */
3757 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3758 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3759 	} else {
3760 		/* size in MB on evergreen/cayman/tn */
3761 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3762 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3763 	}
3764 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3765 	r700_vram_gtt_location(rdev, &rdev->mc);
3766 	radeon_update_bandwidth_info(rdev);
3767 
3768 	return 0;
3769 }
3770 
3771 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3772 {
3773 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3774 		RREG32(GRBM_STATUS));
3775 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3776 		RREG32(GRBM_STATUS_SE0));
3777 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3778 		RREG32(GRBM_STATUS_SE1));
3779 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3780 		RREG32(SRBM_STATUS));
3781 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3782 		RREG32(SRBM_STATUS2));
3783 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3784 		RREG32(CP_STALLED_STAT1));
3785 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3786 		RREG32(CP_STALLED_STAT2));
3787 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3788 		RREG32(CP_BUSY_STAT));
3789 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3790 		RREG32(CP_STAT));
3791 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3792 		RREG32(DMA_STATUS_REG));
3793 	if (rdev->family >= CHIP_CAYMAN) {
3794 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3795 			 RREG32(DMA_STATUS_REG + 0x800));
3796 	}
3797 }
3798 
3799 bool evergreen_is_display_hung(struct radeon_device *rdev)
3800 {
3801 	u32 crtc_hung = 0;
3802 	u32 crtc_status[6];
3803 	u32 i, j, tmp;
3804 
3805 	for (i = 0; i < rdev->num_crtc; i++) {
3806 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3807 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3808 			crtc_hung |= (1 << i);
3809 		}
3810 	}
3811 
3812 	for (j = 0; j < 10; j++) {
3813 		for (i = 0; i < rdev->num_crtc; i++) {
3814 			if (crtc_hung & (1 << i)) {
3815 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3816 				if (tmp != crtc_status[i])
3817 					crtc_hung &= ~(1 << i);
3818 			}
3819 		}
3820 		if (crtc_hung == 0)
3821 			return false;
3822 		udelay(100);
3823 	}
3824 
3825 	return true;
3826 }
3827 
3828 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3829 {
3830 	u32 reset_mask = 0;
3831 	u32 tmp;
3832 
3833 	/* GRBM_STATUS */
3834 	tmp = RREG32(GRBM_STATUS);
3835 	if (tmp & (PA_BUSY | SC_BUSY |
3836 		   SH_BUSY | SX_BUSY |
3837 		   TA_BUSY | VGT_BUSY |
3838 		   DB_BUSY | CB_BUSY |
3839 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3840 		reset_mask |= RADEON_RESET_GFX;
3841 
3842 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3843 		   CP_BUSY | CP_COHERENCY_BUSY))
3844 		reset_mask |= RADEON_RESET_CP;
3845 
3846 	if (tmp & GRBM_EE_BUSY)
3847 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3848 
3849 	/* DMA_STATUS_REG */
3850 	tmp = RREG32(DMA_STATUS_REG);
3851 	if (!(tmp & DMA_IDLE))
3852 		reset_mask |= RADEON_RESET_DMA;
3853 
3854 	/* SRBM_STATUS2 */
3855 	tmp = RREG32(SRBM_STATUS2);
3856 	if (tmp & DMA_BUSY)
3857 		reset_mask |= RADEON_RESET_DMA;
3858 
3859 	/* SRBM_STATUS */
3860 	tmp = RREG32(SRBM_STATUS);
3861 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3862 		reset_mask |= RADEON_RESET_RLC;
3863 
3864 	if (tmp & IH_BUSY)
3865 		reset_mask |= RADEON_RESET_IH;
3866 
3867 	if (tmp & SEM_BUSY)
3868 		reset_mask |= RADEON_RESET_SEM;
3869 
3870 	if (tmp & GRBM_RQ_PENDING)
3871 		reset_mask |= RADEON_RESET_GRBM;
3872 
3873 	if (tmp & VMC_BUSY)
3874 		reset_mask |= RADEON_RESET_VMC;
3875 
3876 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3877 		   MCC_BUSY | MCD_BUSY))
3878 		reset_mask |= RADEON_RESET_MC;
3879 
3880 	if (evergreen_is_display_hung(rdev))
3881 		reset_mask |= RADEON_RESET_DISPLAY;
3882 
3883 	/* VM_L2_STATUS */
3884 	tmp = RREG32(VM_L2_STATUS);
3885 	if (tmp & L2_BUSY)
3886 		reset_mask |= RADEON_RESET_VMC;
3887 
3888 	/* Skip MC reset as it's mostly likely not hung, just busy */
3889 	if (reset_mask & RADEON_RESET_MC) {
3890 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3891 		reset_mask &= ~RADEON_RESET_MC;
3892 	}
3893 
3894 	return reset_mask;
3895 }
3896 
3897 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3898 {
3899 	struct evergreen_mc_save save;
3900 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3901 	u32 tmp;
3902 
3903 	if (reset_mask == 0)
3904 		return;
3905 
3906 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3907 
3908 	evergreen_print_gpu_status_regs(rdev);
3909 
3910 	/* Disable CP parsing/prefetching */
3911 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3912 
3913 	if (reset_mask & RADEON_RESET_DMA) {
3914 		/* Disable DMA */
3915 		tmp = RREG32(DMA_RB_CNTL);
3916 		tmp &= ~DMA_RB_ENABLE;
3917 		WREG32(DMA_RB_CNTL, tmp);
3918 	}
3919 
3920 	udelay(50);
3921 
3922 	evergreen_mc_stop(rdev, &save);
3923 	if (evergreen_mc_wait_for_idle(rdev)) {
3924 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3925 	}
3926 
3927 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3928 		grbm_soft_reset |= SOFT_RESET_DB |
3929 			SOFT_RESET_CB |
3930 			SOFT_RESET_PA |
3931 			SOFT_RESET_SC |
3932 			SOFT_RESET_SPI |
3933 			SOFT_RESET_SX |
3934 			SOFT_RESET_SH |
3935 			SOFT_RESET_TC |
3936 			SOFT_RESET_TA |
3937 			SOFT_RESET_VC |
3938 			SOFT_RESET_VGT;
3939 	}
3940 
3941 	if (reset_mask & RADEON_RESET_CP) {
3942 		grbm_soft_reset |= SOFT_RESET_CP |
3943 			SOFT_RESET_VGT;
3944 
3945 		srbm_soft_reset |= SOFT_RESET_GRBM;
3946 	}
3947 
3948 	if (reset_mask & RADEON_RESET_DMA)
3949 		srbm_soft_reset |= SOFT_RESET_DMA;
3950 
3951 	if (reset_mask & RADEON_RESET_DISPLAY)
3952 		srbm_soft_reset |= SOFT_RESET_DC;
3953 
3954 	if (reset_mask & RADEON_RESET_RLC)
3955 		srbm_soft_reset |= SOFT_RESET_RLC;
3956 
3957 	if (reset_mask & RADEON_RESET_SEM)
3958 		srbm_soft_reset |= SOFT_RESET_SEM;
3959 
3960 	if (reset_mask & RADEON_RESET_IH)
3961 		srbm_soft_reset |= SOFT_RESET_IH;
3962 
3963 	if (reset_mask & RADEON_RESET_GRBM)
3964 		srbm_soft_reset |= SOFT_RESET_GRBM;
3965 
3966 	if (reset_mask & RADEON_RESET_VMC)
3967 		srbm_soft_reset |= SOFT_RESET_VMC;
3968 
3969 	if (!(rdev->flags & RADEON_IS_IGP)) {
3970 		if (reset_mask & RADEON_RESET_MC)
3971 			srbm_soft_reset |= SOFT_RESET_MC;
3972 	}
3973 
3974 	if (grbm_soft_reset) {
3975 		tmp = RREG32(GRBM_SOFT_RESET);
3976 		tmp |= grbm_soft_reset;
3977 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3978 		WREG32(GRBM_SOFT_RESET, tmp);
3979 		tmp = RREG32(GRBM_SOFT_RESET);
3980 
3981 		udelay(50);
3982 
3983 		tmp &= ~grbm_soft_reset;
3984 		WREG32(GRBM_SOFT_RESET, tmp);
3985 		tmp = RREG32(GRBM_SOFT_RESET);
3986 	}
3987 
3988 	if (srbm_soft_reset) {
3989 		tmp = RREG32(SRBM_SOFT_RESET);
3990 		tmp |= srbm_soft_reset;
3991 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3992 		WREG32(SRBM_SOFT_RESET, tmp);
3993 		tmp = RREG32(SRBM_SOFT_RESET);
3994 
3995 		udelay(50);
3996 
3997 		tmp &= ~srbm_soft_reset;
3998 		WREG32(SRBM_SOFT_RESET, tmp);
3999 		tmp = RREG32(SRBM_SOFT_RESET);
4000 	}
4001 
4002 	/* Wait a little for things to settle down */
4003 	udelay(50);
4004 
4005 	evergreen_mc_resume(rdev, &save);
4006 	udelay(50);
4007 
4008 	evergreen_print_gpu_status_regs(rdev);
4009 }
4010 
4011 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4012 {
4013 	struct evergreen_mc_save save;
4014 	u32 tmp, i;
4015 
4016 	dev_info(rdev->dev, "GPU pci config reset\n");
4017 
4018 	/* disable dpm? */
4019 
4020 	/* Disable CP parsing/prefetching */
4021 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4022 	udelay(50);
4023 	/* Disable DMA */
4024 	tmp = RREG32(DMA_RB_CNTL);
4025 	tmp &= ~DMA_RB_ENABLE;
4026 	WREG32(DMA_RB_CNTL, tmp);
4027 	/* XXX other engines? */
4028 
4029 	/* halt the rlc */
4030 	r600_rlc_stop(rdev);
4031 
4032 	udelay(50);
4033 
4034 	/* set mclk/sclk to bypass */
4035 	rv770_set_clk_bypass_mode(rdev);
4036 	/* disable BM */
4037 	pci_clear_master(rdev->pdev);
4038 	/* disable mem access */
4039 	evergreen_mc_stop(rdev, &save);
4040 	if (evergreen_mc_wait_for_idle(rdev)) {
4041 		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4042 	}
4043 	/* reset */
4044 	radeon_pci_config_reset(rdev);
4045 	/* wait for asic to come out of reset */
4046 	for (i = 0; i < rdev->usec_timeout; i++) {
4047 		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4048 			break;
4049 		udelay(1);
4050 	}
4051 }
4052 
4053 int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
4054 {
4055 	u32 reset_mask;
4056 
4057 	if (hard) {
4058 		evergreen_gpu_pci_config_reset(rdev);
4059 		return 0;
4060 	}
4061 
4062 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4063 
4064 	if (reset_mask)
4065 		r600_set_bios_scratch_engine_hung(rdev, true);
4066 
4067 	/* try soft reset */
4068 	evergreen_gpu_soft_reset(rdev, reset_mask);
4069 
4070 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4071 
4072 	/* try pci config reset */
4073 	if (reset_mask && radeon_hard_reset)
4074 		evergreen_gpu_pci_config_reset(rdev);
4075 
4076 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
4077 
4078 	if (!reset_mask)
4079 		r600_set_bios_scratch_engine_hung(rdev, false);
4080 
4081 	return 0;
4082 }
4083 
4084 /**
4085  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4086  *
4087  * @rdev: radeon_device pointer
4088  * @ring: radeon_ring structure holding ring information
4089  *
4090  * Check if the GFX engine is locked up.
4091  * Returns true if the engine appears to be locked up, false if not.
4092  */
4093 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4094 {
4095 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4096 
4097 	if (!(reset_mask & (RADEON_RESET_GFX |
4098 			    RADEON_RESET_COMPUTE |
4099 			    RADEON_RESET_CP))) {
4100 		radeon_ring_lockup_update(rdev, ring);
4101 		return false;
4102 	}
4103 	return radeon_ring_test_lockup(rdev, ring);
4104 }
4105 
4106 /*
4107  * RLC
4108  */
4109 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
4110 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
4111 
4112 void sumo_rlc_fini(struct radeon_device *rdev)
4113 {
4114 	int r;
4115 
4116 	/* save restore block */
4117 	if (rdev->rlc.save_restore_obj) {
4118 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4119 		if (unlikely(r != 0))
4120 			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4121 		radeon_bo_unpin(rdev->rlc.save_restore_obj);
4122 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4123 
4124 		radeon_bo_unref(&rdev->rlc.save_restore_obj);
4125 		rdev->rlc.save_restore_obj = NULL;
4126 	}
4127 
4128 	/* clear state block */
4129 	if (rdev->rlc.clear_state_obj) {
4130 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4131 		if (unlikely(r != 0))
4132 			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4133 		radeon_bo_unpin(rdev->rlc.clear_state_obj);
4134 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4135 
4136 		radeon_bo_unref(&rdev->rlc.clear_state_obj);
4137 		rdev->rlc.clear_state_obj = NULL;
4138 	}
4139 
4140 	/* clear state block */
4141 	if (rdev->rlc.cp_table_obj) {
4142 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4143 		if (unlikely(r != 0))
4144 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4145 		radeon_bo_unpin(rdev->rlc.cp_table_obj);
4146 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4147 
4148 		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4149 		rdev->rlc.cp_table_obj = NULL;
4150 	}
4151 }
4152 
4153 #define CP_ME_TABLE_SIZE    96
4154 
4155 int sumo_rlc_init(struct radeon_device *rdev)
4156 {
4157 	const u32 *src_ptr;
4158 	volatile u32 *dst_ptr;
4159 	u32 dws, data, i, j, k, reg_num;
4160 	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4161 	u64 reg_list_mc_addr;
4162 	const struct cs_section_def *cs_data;
4163 	int r;
4164 
4165 	src_ptr = rdev->rlc.reg_list;
4166 	dws = rdev->rlc.reg_list_size;
4167 	if (rdev->family >= CHIP_BONAIRE) {
4168 		dws += (5 * 16) + 48 + 48 + 64;
4169 	}
4170 	cs_data = rdev->rlc.cs_data;
4171 
4172 	if (src_ptr) {
4173 		/* save restore block */
4174 		if (rdev->rlc.save_restore_obj == NULL) {
4175 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4176 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4177 					     NULL, &rdev->rlc.save_restore_obj);
4178 			if (r) {
4179 				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4180 				return r;
4181 			}
4182 		}
4183 
4184 		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4185 		if (unlikely(r != 0)) {
4186 			sumo_rlc_fini(rdev);
4187 			return r;
4188 		}
4189 		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4190 				  &rdev->rlc.save_restore_gpu_addr);
4191 		if (r) {
4192 			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4193 			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4194 			sumo_rlc_fini(rdev);
4195 			return r;
4196 		}
4197 
4198 		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4199 		if (r) {
4200 			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4201 			sumo_rlc_fini(rdev);
4202 			return r;
4203 		}
4204 		/* write the sr buffer */
4205 		dst_ptr = rdev->rlc.sr_ptr;
4206 		if (rdev->family >= CHIP_TAHITI) {
4207 			/* SI */
4208 			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4209 				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4210 		} else {
4211 			/* ON/LN/TN */
4212 			/* format:
4213 			 * dw0: (reg2 << 16) | reg1
4214 			 * dw1: reg1 save space
4215 			 * dw2: reg2 save space
4216 			 */
4217 			for (i = 0; i < dws; i++) {
4218 				data = src_ptr[i] >> 2;
4219 				i++;
4220 				if (i < dws)
4221 					data |= (src_ptr[i] >> 2) << 16;
4222 				j = (((i - 1) * 3) / 2);
4223 				dst_ptr[j] = cpu_to_le32(data);
4224 			}
4225 			j = ((i * 3) / 2);
4226 			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4227 		}
4228 		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4229 		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4230 	}
4231 
4232 	if (cs_data) {
4233 		/* clear state block */
4234 		if (rdev->family >= CHIP_BONAIRE) {
4235 			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4236 		} else if (rdev->family >= CHIP_TAHITI) {
4237 			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4238 			dws = rdev->rlc.clear_state_size + (256 / 4);
4239 		} else {
4240 			reg_list_num = 0;
4241 			dws = 0;
4242 			for (i = 0; cs_data[i].section != NULL; i++) {
4243 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4244 					reg_list_num++;
4245 					dws += cs_data[i].section[j].reg_count;
4246 				}
4247 			}
4248 			reg_list_blk_index = (3 * reg_list_num + 2);
4249 			dws += reg_list_blk_index;
4250 			rdev->rlc.clear_state_size = dws;
4251 		}
4252 
4253 		if (rdev->rlc.clear_state_obj == NULL) {
4254 			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4255 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4256 					     NULL, &rdev->rlc.clear_state_obj);
4257 			if (r) {
4258 				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4259 				sumo_rlc_fini(rdev);
4260 				return r;
4261 			}
4262 		}
4263 		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4264 		if (unlikely(r != 0)) {
4265 			sumo_rlc_fini(rdev);
4266 			return r;
4267 		}
4268 		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4269 				  &rdev->rlc.clear_state_gpu_addr);
4270 		if (r) {
4271 			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4272 			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4273 			sumo_rlc_fini(rdev);
4274 			return r;
4275 		}
4276 
4277 		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4278 		if (r) {
4279 			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4280 			sumo_rlc_fini(rdev);
4281 			return r;
4282 		}
4283 		/* set up the cs buffer */
4284 		dst_ptr = rdev->rlc.cs_ptr;
4285 		if (rdev->family >= CHIP_BONAIRE) {
4286 			cik_get_csb_buffer(rdev, dst_ptr);
4287 		} else if (rdev->family >= CHIP_TAHITI) {
4288 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4289 			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4290 			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4291 			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4292 			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4293 		} else {
4294 			reg_list_hdr_blk_index = 0;
4295 			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4296 			data = upper_32_bits(reg_list_mc_addr);
4297 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4298 			reg_list_hdr_blk_index++;
4299 			for (i = 0; cs_data[i].section != NULL; i++) {
4300 				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4301 					reg_num = cs_data[i].section[j].reg_count;
4302 					data = reg_list_mc_addr & 0xffffffff;
4303 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4304 					reg_list_hdr_blk_index++;
4305 
4306 					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4307 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4308 					reg_list_hdr_blk_index++;
4309 
4310 					data = 0x08000000 | (reg_num * 4);
4311 					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4312 					reg_list_hdr_blk_index++;
4313 
4314 					for (k = 0; k < reg_num; k++) {
4315 						data = cs_data[i].section[j].extent[k];
4316 						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4317 					}
4318 					reg_list_mc_addr += reg_num * 4;
4319 					reg_list_blk_index += reg_num;
4320 				}
4321 			}
4322 			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4323 		}
4324 		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4325 		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4326 	}
4327 
4328 	if (rdev->rlc.cp_table_size) {
4329 		if (rdev->rlc.cp_table_obj == NULL) {
4330 			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4331 					     PAGE_SIZE, true,
4332 					     RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4333 					     NULL, &rdev->rlc.cp_table_obj);
4334 			if (r) {
4335 				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4336 				sumo_rlc_fini(rdev);
4337 				return r;
4338 			}
4339 		}
4340 
4341 		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4342 		if (unlikely(r != 0)) {
4343 			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4344 			sumo_rlc_fini(rdev);
4345 			return r;
4346 		}
4347 		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4348 				  &rdev->rlc.cp_table_gpu_addr);
4349 		if (r) {
4350 			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4351 			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4352 			sumo_rlc_fini(rdev);
4353 			return r;
4354 		}
4355 		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4356 		if (r) {
4357 			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4358 			sumo_rlc_fini(rdev);
4359 			return r;
4360 		}
4361 
4362 		cik_init_cp_pg_table(rdev);
4363 
4364 		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4365 		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4366 
4367 	}
4368 
4369 	return 0;
4370 }
4371 
4372 static void evergreen_rlc_start(struct radeon_device *rdev)
4373 {
4374 	u32 mask = RLC_ENABLE;
4375 
4376 	if (rdev->flags & RADEON_IS_IGP) {
4377 		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4378 	}
4379 
4380 	WREG32(RLC_CNTL, mask);
4381 }
4382 
4383 int evergreen_rlc_resume(struct radeon_device *rdev)
4384 {
4385 	u32 i;
4386 	const __be32 *fw_data;
4387 
4388 	if (!rdev->rlc_fw)
4389 		return -EINVAL;
4390 
4391 	r600_rlc_stop(rdev);
4392 
4393 	WREG32(RLC_HB_CNTL, 0);
4394 
4395 	if (rdev->flags & RADEON_IS_IGP) {
4396 		if (rdev->family == CHIP_ARUBA) {
4397 			u32 always_on_bitmap =
4398 				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4399 			/* find out the number of active simds */
4400 			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4401 			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4402 			tmp = hweight32(~tmp);
4403 			if (tmp == rdev->config.cayman.max_simds_per_se) {
4404 				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4405 				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4406 				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4407 				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4408 				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4409 			}
4410 		} else {
4411 			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4412 			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4413 		}
4414 		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4415 		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4416 	} else {
4417 		WREG32(RLC_HB_BASE, 0);
4418 		WREG32(RLC_HB_RPTR, 0);
4419 		WREG32(RLC_HB_WPTR, 0);
4420 		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4421 		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4422 	}
4423 	WREG32(RLC_MC_CNTL, 0);
4424 	WREG32(RLC_UCODE_CNTL, 0);
4425 
4426 	fw_data = (const __be32 *)rdev->rlc_fw->data;
4427 	if (rdev->family >= CHIP_ARUBA) {
4428 		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4429 			WREG32(RLC_UCODE_ADDR, i);
4430 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4431 		}
4432 	} else if (rdev->family >= CHIP_CAYMAN) {
4433 		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4434 			WREG32(RLC_UCODE_ADDR, i);
4435 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4436 		}
4437 	} else {
4438 		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4439 			WREG32(RLC_UCODE_ADDR, i);
4440 			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4441 		}
4442 	}
4443 	WREG32(RLC_UCODE_ADDR, 0);
4444 
4445 	evergreen_rlc_start(rdev);
4446 
4447 	return 0;
4448 }
4449 
4450 /* Interrupts */
4451 
4452 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4453 {
4454 	if (crtc >= rdev->num_crtc)
4455 		return 0;
4456 	else
4457 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4458 }
4459 
4460 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4461 {
4462 	int i;
4463 	u32 tmp;
4464 
4465 	if (rdev->family >= CHIP_CAYMAN) {
4466 		cayman_cp_int_cntl_setup(rdev, 0,
4467 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4468 		cayman_cp_int_cntl_setup(rdev, 1, 0);
4469 		cayman_cp_int_cntl_setup(rdev, 2, 0);
4470 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4471 		WREG32(CAYMAN_DMA1_CNTL, tmp);
4472 	} else
4473 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4474 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4475 	WREG32(DMA_CNTL, tmp);
4476 	WREG32(GRBM_INT_CNTL, 0);
4477 	WREG32(SRBM_INT_CNTL, 0);
4478 	for (i = 0; i < rdev->num_crtc; i++)
4479 		WREG32(INT_MASK + crtc_offsets[i], 0);
4480 	for (i = 0; i < rdev->num_crtc; i++)
4481 		WREG32(GRPH_INT_CONTROL + crtc_offsets[i], 0);
4482 
4483 	/* only one DAC on DCE5 */
4484 	if (!ASIC_IS_DCE5(rdev))
4485 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4486 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4487 
4488 	for (i = 0; i < 6; i++)
4489 		WREG32_AND(DC_HPDx_INT_CONTROL(i), DC_HPDx_INT_POLARITY);
4490 }
4491 
4492 /* Note that the order we write back regs here is important */
4493 int evergreen_irq_set(struct radeon_device *rdev)
4494 {
4495 	int i;
4496 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4497 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4498 	u32 grbm_int_cntl = 0;
4499 	u32 dma_cntl, dma_cntl1 = 0;
4500 	u32 thermal_int = 0;
4501 
4502 	if (!rdev->irq.installed) {
4503 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4504 		return -EINVAL;
4505 	}
4506 	/* don't enable anything if the ih is disabled */
4507 	if (!rdev->ih.enabled) {
4508 		r600_disable_interrupts(rdev);
4509 		/* force the active interrupt state to all disabled */
4510 		evergreen_disable_interrupt_state(rdev);
4511 		return 0;
4512 	}
4513 
4514 	if (rdev->family == CHIP_ARUBA)
4515 		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4516 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4517 	else
4518 		thermal_int = RREG32(CG_THERMAL_INT) &
4519 			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4520 
4521 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4522 
4523 	if (rdev->family >= CHIP_CAYMAN) {
4524 		/* enable CP interrupts on all rings */
4525 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4526 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4527 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4528 		}
4529 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4530 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4531 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4532 		}
4533 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4534 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4535 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4536 		}
4537 	} else {
4538 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4539 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4540 			cp_int_cntl |= RB_INT_ENABLE;
4541 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4542 		}
4543 	}
4544 
4545 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4546 		DRM_DEBUG("r600_irq_set: sw int dma\n");
4547 		dma_cntl |= TRAP_ENABLE;
4548 	}
4549 
4550 	if (rdev->family >= CHIP_CAYMAN) {
4551 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4552 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4553 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4554 			dma_cntl1 |= TRAP_ENABLE;
4555 		}
4556 	}
4557 
4558 	if (rdev->irq.dpm_thermal) {
4559 		DRM_DEBUG("dpm thermal\n");
4560 		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4561 	}
4562 
4563 	if (rdev->family >= CHIP_CAYMAN) {
4564 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4565 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4566 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4567 	} else
4568 		WREG32(CP_INT_CNTL, cp_int_cntl);
4569 
4570 	WREG32(DMA_CNTL, dma_cntl);
4571 
4572 	if (rdev->family >= CHIP_CAYMAN)
4573 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4574 
4575 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4576 
4577 	for (i = 0; i < rdev->num_crtc; i++) {
4578 		radeon_irq_kms_set_irq_n_enabled(
4579 		    rdev, INT_MASK + crtc_offsets[i],
4580 		    VBLANK_INT_MASK,
4581 		    rdev->irq.crtc_vblank_int[i] ||
4582 		    atomic_read(&rdev->irq.pflip[i]), "vblank", i);
4583 	}
4584 
4585 	for (i = 0; i < rdev->num_crtc; i++)
4586 		WREG32(GRPH_INT_CONTROL + crtc_offsets[i], GRPH_PFLIP_INT_MASK);
4587 
4588 	for (i = 0; i < 6; i++) {
4589 		radeon_irq_kms_set_irq_n_enabled(
4590 		    rdev, DC_HPDx_INT_CONTROL(i),
4591 		    DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN,
4592 		    rdev->irq.hpd[i], "HPD", i);
4593 	}
4594 
4595 	if (rdev->family == CHIP_ARUBA)
4596 		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4597 	else
4598 		WREG32(CG_THERMAL_INT, thermal_int);
4599 
4600 	for (i = 0; i < 6; i++) {
4601 		radeon_irq_kms_set_irq_n_enabled(
4602 		    rdev, AFMT_AUDIO_PACKET_CONTROL + crtc_offsets[i],
4603 		    AFMT_AZ_FORMAT_WTRIG_MASK,
4604 		    rdev->irq.afmt[i], "HDMI", i);
4605 	}
4606 
4607 	/* posting read */
4608 	RREG32(SRBM_STATUS);
4609 
4610 	return 0;
4611 }
4612 
4613 /* Note that the order we write back regs here is important */
4614 static void evergreen_irq_ack(struct radeon_device *rdev)
4615 {
4616 	int i, j;
4617 	u32 *grph_int = rdev->irq.stat_regs.evergreen.grph_int;
4618 	u32 *disp_int = rdev->irq.stat_regs.evergreen.disp_int;
4619 	u32 *afmt_status = rdev->irq.stat_regs.evergreen.afmt_status;
4620 
4621 	for (i = 0; i < 6; i++) {
4622 		disp_int[i] = RREG32(evergreen_disp_int_status[i]);
4623 		afmt_status[i] = RREG32(AFMT_STATUS + crtc_offsets[i]);
4624 		if (i < rdev->num_crtc)
4625 			grph_int[i] = RREG32(GRPH_INT_STATUS + crtc_offsets[i]);
4626 	}
4627 
4628 	/* We write back each interrupt register in pairs of two */
4629 	for (i = 0; i < rdev->num_crtc; i += 2) {
4630 		for (j = i; j < (i + 2); j++) {
4631 			if (grph_int[j] & GRPH_PFLIP_INT_OCCURRED)
4632 				WREG32(GRPH_INT_STATUS + crtc_offsets[j],
4633 				       GRPH_PFLIP_INT_CLEAR);
4634 		}
4635 
4636 		for (j = i; j < (i + 2); j++) {
4637 			if (disp_int[j] & LB_D1_VBLANK_INTERRUPT)
4638 				WREG32(VBLANK_STATUS + crtc_offsets[j],
4639 				       VBLANK_ACK);
4640 			if (disp_int[j] & LB_D1_VLINE_INTERRUPT)
4641 				WREG32(VLINE_STATUS + crtc_offsets[j],
4642 				       VLINE_ACK);
4643 		}
4644 	}
4645 
4646 	for (i = 0; i < 6; i++) {
4647 		if (disp_int[i] & DC_HPD1_INTERRUPT)
4648 			WREG32_OR(DC_HPDx_INT_CONTROL(i), DC_HPDx_INT_ACK);
4649 	}
4650 
4651 	for (i = 0; i < 6; i++) {
4652 		if (disp_int[i] & DC_HPD1_RX_INTERRUPT)
4653 			WREG32_OR(DC_HPDx_INT_CONTROL(i), DC_HPDx_RX_INT_ACK);
4654 	}
4655 
4656 	for (i = 0; i < 6; i++) {
4657 		if (afmt_status[i] & AFMT_AZ_FORMAT_WTRIG)
4658 			WREG32_OR(AFMT_AUDIO_PACKET_CONTROL + crtc_offsets[i],
4659 				  AFMT_AZ_FORMAT_WTRIG_ACK);
4660 	}
4661 }
4662 
4663 static void evergreen_irq_disable(struct radeon_device *rdev)
4664 {
4665 	r600_disable_interrupts(rdev);
4666 	/* Wait and acknowledge irq */
4667 	mdelay(1);
4668 	evergreen_irq_ack(rdev);
4669 	evergreen_disable_interrupt_state(rdev);
4670 }
4671 
4672 void evergreen_irq_suspend(struct radeon_device *rdev)
4673 {
4674 	evergreen_irq_disable(rdev);
4675 	r600_rlc_stop(rdev);
4676 }
4677 
4678 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4679 {
4680 	u32 wptr, tmp;
4681 
4682 	if (rdev->wb.enabled)
4683 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4684 	else
4685 		wptr = RREG32(IH_RB_WPTR);
4686 
4687 	if (wptr & RB_OVERFLOW) {
4688 		wptr &= ~RB_OVERFLOW;
4689 		/* When a ring buffer overflow happen start parsing interrupt
4690 		 * from the last not overwritten vector (wptr + 16). Hopefully
4691 		 * this should allow us to catchup.
4692 		 */
4693 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4694 			 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4695 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4696 		tmp = RREG32(IH_RB_CNTL);
4697 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
4698 		WREG32(IH_RB_CNTL, tmp);
4699 	}
4700 	return (wptr & rdev->ih.ptr_mask);
4701 }
4702 
4703 int evergreen_irq_process(struct radeon_device *rdev)
4704 {
4705 	u32 *disp_int = rdev->irq.stat_regs.evergreen.disp_int;
4706 	u32 *afmt_status = rdev->irq.stat_regs.evergreen.afmt_status;
4707 	u32 crtc_idx, hpd_idx, afmt_idx;
4708 	u32 mask;
4709 	u32 wptr;
4710 	u32 rptr;
4711 	u32 src_id, src_data;
4712 	u32 ring_index;
4713 	bool queue_hotplug = false;
4714 	bool queue_hdmi = false;
4715 	bool queue_dp = false;
4716 	bool queue_thermal = false;
4717 	u32 status, addr;
4718 	const char *event_name;
4719 
4720 	if (!rdev->ih.enabled || rdev->shutdown)
4721 		return IRQ_NONE;
4722 
4723 	wptr = evergreen_get_ih_wptr(rdev);
4724 
4725 restart_ih:
4726 	/* is somebody else already processing irqs? */
4727 	if (atomic_xchg(&rdev->ih.lock, 1))
4728 		return IRQ_NONE;
4729 
4730 	rptr = rdev->ih.rptr;
4731 	DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4732 
4733 	/* Order reading of wptr vs. reading of IH ring data */
4734 	rmb();
4735 
4736 	/* display interrupts */
4737 	evergreen_irq_ack(rdev);
4738 
4739 	while (rptr != wptr) {
4740 		/* wptr/rptr are in bytes! */
4741 		ring_index = rptr / 4;
4742 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4743 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4744 
4745 		switch (src_id) {
4746 		case 1: /* D1 vblank/vline */
4747 		case 2: /* D2 vblank/vline */
4748 		case 3: /* D3 vblank/vline */
4749 		case 4: /* D4 vblank/vline */
4750 		case 5: /* D5 vblank/vline */
4751 		case 6: /* D6 vblank/vline */
4752 			crtc_idx = src_id - 1;
4753 
4754 			if (src_data == 0) { /* vblank */
4755 				mask = LB_D1_VBLANK_INTERRUPT;
4756 				event_name = "vblank";
4757 
4758 				if (rdev->irq.crtc_vblank_int[crtc_idx]) {
4759 					drm_handle_vblank(rdev->ddev, crtc_idx);
4760 					rdev->pm.vblank_sync = true;
4761 					wake_up(&rdev->irq.vblank_queue);
4762 				}
4763 				if (atomic_read(&rdev->irq.pflip[crtc_idx])) {
4764 					radeon_crtc_handle_vblank(rdev,
4765 								  crtc_idx);
4766 				}
4767 
4768 			} else if (src_data == 1) { /* vline */
4769 				mask = LB_D1_VLINE_INTERRUPT;
4770 				event_name = "vline";
4771 			} else {
4772 				DRM_DEBUG("Unhandled interrupt: %d %d\n",
4773 					  src_id, src_data);
4774 				break;
4775 			}
4776 
4777 			if (!(disp_int[crtc_idx] & mask)) {
4778 				DRM_DEBUG("IH: D%d %s - IH event w/o asserted irq bit?\n",
4779 					  crtc_idx + 1, event_name);
4780 			}
4781 
4782 			disp_int[crtc_idx] &= ~mask;
4783 			DRM_DEBUG("IH: D%d %s\n", crtc_idx + 1, event_name);
4784 
4785 			break;
4786 		case 8: /* D1 page flip */
4787 		case 10: /* D2 page flip */
4788 		case 12: /* D3 page flip */
4789 		case 14: /* D4 page flip */
4790 		case 16: /* D5 page flip */
4791 		case 18: /* D6 page flip */
4792 			DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
4793 			if (radeon_use_pflipirq > 0)
4794 				radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
4795 			break;
4796 		case 42: /* HPD hotplug */
4797 			if (src_data <= 5) {
4798 				hpd_idx = src_data;
4799 				mask = DC_HPD1_INTERRUPT;
4800 				queue_hotplug = true;
4801 				event_name = "HPD";
4802 
4803 			} else if (src_data <= 11) {
4804 				hpd_idx = src_data - 6;
4805 				mask = DC_HPD1_RX_INTERRUPT;
4806 				queue_dp = true;
4807 				event_name = "HPD_RX";
4808 
4809 			} else {
4810 				DRM_DEBUG("Unhandled interrupt: %d %d\n",
4811 					  src_id, src_data);
4812 				break;
4813 			}
4814 
4815 			if (!(disp_int[hpd_idx] & mask))
4816 				DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
4817 
4818 			disp_int[hpd_idx] &= ~mask;
4819 			DRM_DEBUG("IH: %s%d\n", event_name, hpd_idx + 1);
4820 
4821 			break;
4822 		case 44: /* hdmi */
4823 			afmt_idx = src_data;
4824 			if (!(afmt_status[afmt_idx] & AFMT_AZ_FORMAT_WTRIG))
4825 				DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
4826 
4827 			if (afmt_idx > 5) {
4828 				DRM_ERROR("Unhandled interrupt: %d %d\n",
4829 					  src_id, src_data);
4830 				break;
4831 			}
4832 			afmt_status[afmt_idx] &= ~AFMT_AZ_FORMAT_WTRIG;
4833 			queue_hdmi = true;
4834 			DRM_DEBUG("IH: HDMI%d\n", afmt_idx + 1);
4835 			break;
4836 		case 96:
4837 			DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
4838 			WREG32(SRBM_INT_ACK, 0x1);
4839 			break;
4840 		case 124: /* UVD */
4841 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4842 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4843 			break;
4844 		case 146:
4845 		case 147:
4846 			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
4847 			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
4848 			/* reset addr and status */
4849 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4850 			if (addr == 0x0 && status == 0x0)
4851 				break;
4852 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4853 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
4854 				addr);
4855 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4856 				status);
4857 			cayman_vm_decode_fault(rdev, status, addr);
4858 			break;
4859 		case 176: /* CP_INT in ring buffer */
4860 		case 177: /* CP_INT in IB1 */
4861 		case 178: /* CP_INT in IB2 */
4862 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4863 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4864 			break;
4865 		case 181: /* CP EOP event */
4866 			DRM_DEBUG("IH: CP EOP\n");
4867 			if (rdev->family >= CHIP_CAYMAN) {
4868 				switch (src_data) {
4869 				case 0:
4870 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4871 					break;
4872 				case 1:
4873 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4874 					break;
4875 				case 2:
4876 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4877 					break;
4878 				}
4879 			} else
4880 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4881 			break;
4882 		case 224: /* DMA trap event */
4883 			DRM_DEBUG("IH: DMA trap\n");
4884 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4885 			break;
4886 		case 230: /* thermal low to high */
4887 			DRM_DEBUG("IH: thermal low to high\n");
4888 			rdev->pm.dpm.thermal.high_to_low = false;
4889 			queue_thermal = true;
4890 			break;
4891 		case 231: /* thermal high to low */
4892 			DRM_DEBUG("IH: thermal high to low\n");
4893 			rdev->pm.dpm.thermal.high_to_low = true;
4894 			queue_thermal = true;
4895 			break;
4896 		case 233: /* GUI IDLE */
4897 			DRM_DEBUG("IH: GUI idle\n");
4898 			break;
4899 		case 244: /* DMA trap event */
4900 			if (rdev->family >= CHIP_CAYMAN) {
4901 				DRM_DEBUG("IH: DMA1 trap\n");
4902 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4903 			}
4904 			break;
4905 		default:
4906 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4907 			break;
4908 		}
4909 
4910 		/* wptr/rptr are in bytes! */
4911 		rptr += 16;
4912 		rptr &= rdev->ih.ptr_mask;
4913 		WREG32(IH_RB_RPTR, rptr);
4914 	}
4915 	if (queue_dp)
4916 		schedule_work(&rdev->dp_work);
4917 	if (queue_hotplug)
4918 		schedule_delayed_work(&rdev->hotplug_work, 0);
4919 	if (queue_hdmi)
4920 		schedule_work(&rdev->audio_work);
4921 	if (queue_thermal && rdev->pm.dpm_enabled)
4922 		schedule_work(&rdev->pm.dpm.thermal.work);
4923 	rdev->ih.rptr = rptr;
4924 	atomic_set(&rdev->ih.lock, 0);
4925 
4926 	/* make sure wptr hasn't changed while processing */
4927 	wptr = evergreen_get_ih_wptr(rdev);
4928 	if (wptr != rptr)
4929 		goto restart_ih;
4930 
4931 	return IRQ_HANDLED;
4932 }
4933 
4934 static void evergreen_uvd_init(struct radeon_device *rdev)
4935 {
4936 	int r;
4937 
4938 	if (!rdev->has_uvd)
4939 		return;
4940 
4941 	r = radeon_uvd_init(rdev);
4942 	if (r) {
4943 		dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
4944 		/*
4945 		 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
4946 		 * to early fails uvd_v2_2_resume() and thus nothing happens
4947 		 * there. So it is pointless to try to go through that code
4948 		 * hence why we disable uvd here.
4949 		 */
4950 		rdev->has_uvd = false;
4951 		return;
4952 	}
4953 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
4954 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
4955 }
4956 
4957 static void evergreen_uvd_start(struct radeon_device *rdev)
4958 {
4959 	int r;
4960 
4961 	if (!rdev->has_uvd)
4962 		return;
4963 
4964 	r = uvd_v2_2_resume(rdev);
4965 	if (r) {
4966 		dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
4967 		goto error;
4968 	}
4969 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
4970 	if (r) {
4971 		dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
4972 		goto error;
4973 	}
4974 	return;
4975 
4976 error:
4977 	rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
4978 }
4979 
4980 static void evergreen_uvd_resume(struct radeon_device *rdev)
4981 {
4982 	struct radeon_ring *ring;
4983 	int r;
4984 
4985 	if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
4986 		return;
4987 
4988 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
4989 	r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
4990 	if (r) {
4991 		dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
4992 		return;
4993 	}
4994 	r = uvd_v1_0_init(rdev);
4995 	if (r) {
4996 		dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
4997 		return;
4998 	}
4999 }
5000 
5001 static int evergreen_startup(struct radeon_device *rdev)
5002 {
5003 	struct radeon_ring *ring;
5004 	int r;
5005 
5006 	/* enable pcie gen2 link */
5007 	evergreen_pcie_gen2_enable(rdev);
5008 	/* enable aspm */
5009 	evergreen_program_aspm(rdev);
5010 
5011 	/* scratch needs to be initialized before MC */
5012 	r = r600_vram_scratch_init(rdev);
5013 	if (r)
5014 		return r;
5015 
5016 	evergreen_mc_program(rdev);
5017 
5018 	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5019 		r = ni_mc_load_microcode(rdev);
5020 		if (r) {
5021 			DRM_ERROR("Failed to load MC firmware!\n");
5022 			return r;
5023 		}
5024 	}
5025 
5026 	if (rdev->flags & RADEON_IS_AGP) {
5027 		evergreen_agp_enable(rdev);
5028 	} else {
5029 		r = evergreen_pcie_gart_enable(rdev);
5030 		if (r)
5031 			return r;
5032 	}
5033 	evergreen_gpu_init(rdev);
5034 
5035 	/* allocate rlc buffers */
5036 	if (rdev->flags & RADEON_IS_IGP) {
5037 		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5038 		rdev->rlc.reg_list_size =
5039 			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5040 		rdev->rlc.cs_data = evergreen_cs_data;
5041 		r = sumo_rlc_init(rdev);
5042 		if (r) {
5043 			DRM_ERROR("Failed to init rlc BOs!\n");
5044 			return r;
5045 		}
5046 	}
5047 
5048 	/* allocate wb buffer */
5049 	r = radeon_wb_init(rdev);
5050 	if (r)
5051 		return r;
5052 
5053 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5054 	if (r) {
5055 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5056 		return r;
5057 	}
5058 
5059 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5060 	if (r) {
5061 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5062 		return r;
5063 	}
5064 
5065 	evergreen_uvd_start(rdev);
5066 
5067 	/* Enable IRQ */
5068 	if (!rdev->irq.installed) {
5069 		r = radeon_irq_kms_init(rdev);
5070 		if (r)
5071 			return r;
5072 	}
5073 
5074 	r = r600_irq_init(rdev);
5075 	if (r) {
5076 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5077 		radeon_irq_kms_fini(rdev);
5078 		return r;
5079 	}
5080 	evergreen_irq_set(rdev);
5081 
5082 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5083 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5084 			     RADEON_CP_PACKET2);
5085 	if (r)
5086 		return r;
5087 
5088 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5089 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5090 			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5091 	if (r)
5092 		return r;
5093 
5094 	r = evergreen_cp_load_microcode(rdev);
5095 	if (r)
5096 		return r;
5097 	r = evergreen_cp_resume(rdev);
5098 	if (r)
5099 		return r;
5100 	r = r600_dma_resume(rdev);
5101 	if (r)
5102 		return r;
5103 
5104 	evergreen_uvd_resume(rdev);
5105 
5106 	r = radeon_ib_pool_init(rdev);
5107 	if (r) {
5108 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5109 		return r;
5110 	}
5111 
5112 	r = radeon_audio_init(rdev);
5113 	if (r) {
5114 		DRM_ERROR("radeon: audio init failed\n");
5115 		return r;
5116 	}
5117 
5118 	return 0;
5119 }
5120 
5121 int evergreen_resume(struct radeon_device *rdev)
5122 {
5123 	int r;
5124 
5125 	/* reset the asic, the gfx blocks are often in a bad state
5126 	 * after the driver is unloaded or after a resume
5127 	 */
5128 	if (radeon_asic_reset(rdev))
5129 		dev_warn(rdev->dev, "GPU reset failed !\n");
5130 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5131 	 * posting will perform necessary task to bring back GPU into good
5132 	 * shape.
5133 	 */
5134 	/* post card */
5135 	atom_asic_init(rdev->mode_info.atom_context);
5136 
5137 	/* init golden registers */
5138 	evergreen_init_golden_registers(rdev);
5139 
5140 	if (rdev->pm.pm_method == PM_METHOD_DPM)
5141 		radeon_pm_resume(rdev);
5142 
5143 	rdev->accel_working = true;
5144 	r = evergreen_startup(rdev);
5145 	if (r) {
5146 		DRM_ERROR("evergreen startup failed on resume\n");
5147 		rdev->accel_working = false;
5148 		return r;
5149 	}
5150 
5151 	return r;
5152 
5153 }
5154 
5155 int evergreen_suspend(struct radeon_device *rdev)
5156 {
5157 	radeon_pm_suspend(rdev);
5158 	radeon_audio_fini(rdev);
5159 	if (rdev->has_uvd) {
5160 		radeon_uvd_suspend(rdev);
5161 		uvd_v1_0_fini(rdev);
5162 	}
5163 	r700_cp_stop(rdev);
5164 	r600_dma_stop(rdev);
5165 	evergreen_irq_suspend(rdev);
5166 	radeon_wb_disable(rdev);
5167 	evergreen_pcie_gart_disable(rdev);
5168 
5169 	return 0;
5170 }
5171 
5172 /* Plan is to move initialization in that function and use
5173  * helper function so that radeon_device_init pretty much
5174  * do nothing more than calling asic specific function. This
5175  * should also allow to remove a bunch of callback function
5176  * like vram_info.
5177  */
5178 int evergreen_init(struct radeon_device *rdev)
5179 {
5180 	int r;
5181 
5182 	/* Read BIOS */
5183 	if (!radeon_get_bios(rdev)) {
5184 		if (ASIC_IS_AVIVO(rdev))
5185 			return -EINVAL;
5186 	}
5187 	/* Must be an ATOMBIOS */
5188 	if (!rdev->is_atom_bios) {
5189 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5190 		return -EINVAL;
5191 	}
5192 	r = radeon_atombios_init(rdev);
5193 	if (r)
5194 		return r;
5195 	/* reset the asic, the gfx blocks are often in a bad state
5196 	 * after the driver is unloaded or after a resume
5197 	 */
5198 	if (radeon_asic_reset(rdev))
5199 		dev_warn(rdev->dev, "GPU reset failed !\n");
5200 	/* Post card if necessary */
5201 	if (!radeon_card_posted(rdev)) {
5202 		if (!rdev->bios) {
5203 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5204 			return -EINVAL;
5205 		}
5206 		DRM_INFO("GPU not posted. posting now...\n");
5207 		atom_asic_init(rdev->mode_info.atom_context);
5208 	}
5209 	/* init golden registers */
5210 	evergreen_init_golden_registers(rdev);
5211 	/* Initialize scratch registers */
5212 	r600_scratch_init(rdev);
5213 	/* Initialize surface registers */
5214 	radeon_surface_init(rdev);
5215 	/* Initialize clocks */
5216 	radeon_get_clock_info(rdev->ddev);
5217 	/* Fence driver */
5218 	radeon_fence_driver_init(rdev);
5219 	/* initialize AGP */
5220 	if (rdev->flags & RADEON_IS_AGP) {
5221 		r = radeon_agp_init(rdev);
5222 		if (r)
5223 			radeon_agp_disable(rdev);
5224 	}
5225 	/* initialize memory controller */
5226 	r = evergreen_mc_init(rdev);
5227 	if (r)
5228 		return r;
5229 	/* Memory manager */
5230 	r = radeon_bo_init(rdev);
5231 	if (r)
5232 		return r;
5233 
5234 	if (ASIC_IS_DCE5(rdev)) {
5235 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5236 			r = ni_init_microcode(rdev);
5237 			if (r) {
5238 				DRM_ERROR("Failed to load firmware!\n");
5239 				return r;
5240 			}
5241 		}
5242 	} else {
5243 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5244 			r = r600_init_microcode(rdev);
5245 			if (r) {
5246 				DRM_ERROR("Failed to load firmware!\n");
5247 				return r;
5248 			}
5249 		}
5250 	}
5251 
5252 	/* Initialize power management */
5253 	radeon_pm_init(rdev);
5254 
5255 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5256 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5257 
5258 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5259 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5260 
5261 	evergreen_uvd_init(rdev);
5262 
5263 	rdev->ih.ring_obj = NULL;
5264 	r600_ih_ring_init(rdev, 64 * 1024);
5265 
5266 	r = r600_pcie_gart_init(rdev);
5267 	if (r)
5268 		return r;
5269 
5270 	rdev->accel_working = true;
5271 	r = evergreen_startup(rdev);
5272 	if (r) {
5273 		dev_err(rdev->dev, "disabling GPU acceleration\n");
5274 		r700_cp_fini(rdev);
5275 		r600_dma_fini(rdev);
5276 		r600_irq_fini(rdev);
5277 		if (rdev->flags & RADEON_IS_IGP)
5278 			sumo_rlc_fini(rdev);
5279 		radeon_wb_fini(rdev);
5280 		radeon_ib_pool_fini(rdev);
5281 		radeon_irq_kms_fini(rdev);
5282 		evergreen_pcie_gart_fini(rdev);
5283 		rdev->accel_working = false;
5284 	}
5285 
5286 	/* Don't start up if the MC ucode is missing on BTC parts.
5287 	 * The default clocks and voltages before the MC ucode
5288 	 * is loaded are not suffient for advanced operations.
5289 	 */
5290 	if (ASIC_IS_DCE5(rdev)) {
5291 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5292 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5293 			return -EINVAL;
5294 		}
5295 	}
5296 
5297 	return 0;
5298 }
5299 
5300 void evergreen_fini(struct radeon_device *rdev)
5301 {
5302 	radeon_pm_fini(rdev);
5303 	radeon_audio_fini(rdev);
5304 	r700_cp_fini(rdev);
5305 	r600_dma_fini(rdev);
5306 	r600_irq_fini(rdev);
5307 	if (rdev->flags & RADEON_IS_IGP)
5308 		sumo_rlc_fini(rdev);
5309 	radeon_wb_fini(rdev);
5310 	radeon_ib_pool_fini(rdev);
5311 	radeon_irq_kms_fini(rdev);
5312 	uvd_v1_0_fini(rdev);
5313 	radeon_uvd_fini(rdev);
5314 	evergreen_pcie_gart_fini(rdev);
5315 	r600_vram_scratch_fini(rdev);
5316 	radeon_gem_fini(rdev);
5317 	radeon_fence_driver_fini(rdev);
5318 	radeon_agp_fini(rdev);
5319 	radeon_bo_fini(rdev);
5320 	radeon_atombios_fini(rdev);
5321 	kfree(rdev->bios);
5322 	rdev->bios = NULL;
5323 }
5324 
5325 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5326 {
5327 	u32 link_width_cntl, speed_cntl;
5328 
5329 	if (radeon_pcie_gen2 == 0)
5330 		return;
5331 
5332 	if (rdev->flags & RADEON_IS_IGP)
5333 		return;
5334 
5335 	if (!(rdev->flags & RADEON_IS_PCIE))
5336 		return;
5337 
5338 	/* x2 cards have a special sequence */
5339 	if (ASIC_IS_X2(rdev))
5340 		return;
5341 
5342 	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5343 		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5344 		return;
5345 
5346 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5347 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5348 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5349 		return;
5350 	}
5351 
5352 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5353 
5354 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5355 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5356 
5357 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5358 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5359 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5360 
5361 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5362 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5363 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5364 
5365 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5366 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5367 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5368 
5369 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5370 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5371 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5372 
5373 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5374 		speed_cntl |= LC_GEN2_EN_STRAP;
5375 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5376 
5377 	} else {
5378 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5379 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5380 		if (1)
5381 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5382 		else
5383 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5384 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5385 	}
5386 }
5387 
5388 void evergreen_program_aspm(struct radeon_device *rdev)
5389 {
5390 	u32 data, orig;
5391 	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5392 	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5393 	/* fusion_platform = true
5394 	 * if the system is a fusion system
5395 	 * (APU or DGPU in a fusion system).
5396 	 * todo: check if the system is a fusion platform.
5397 	 */
5398 	bool fusion_platform = false;
5399 
5400 	if (radeon_aspm == 0)
5401 		return;
5402 
5403 	if (!(rdev->flags & RADEON_IS_PCIE))
5404 		return;
5405 
5406 	switch (rdev->family) {
5407 	case CHIP_CYPRESS:
5408 	case CHIP_HEMLOCK:
5409 	case CHIP_JUNIPER:
5410 	case CHIP_REDWOOD:
5411 	case CHIP_CEDAR:
5412 	case CHIP_SUMO:
5413 	case CHIP_SUMO2:
5414 	case CHIP_PALM:
5415 	case CHIP_ARUBA:
5416 		disable_l0s = true;
5417 		break;
5418 	default:
5419 		disable_l0s = false;
5420 		break;
5421 	}
5422 
5423 	if (rdev->flags & RADEON_IS_IGP)
5424 		fusion_platform = true; /* XXX also dGPUs in a fusion system */
5425 
5426 	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5427 	if (fusion_platform)
5428 		data &= ~MULTI_PIF;
5429 	else
5430 		data |= MULTI_PIF;
5431 	if (data != orig)
5432 		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5433 
5434 	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5435 	if (fusion_platform)
5436 		data &= ~MULTI_PIF;
5437 	else
5438 		data |= MULTI_PIF;
5439 	if (data != orig)
5440 		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5441 
5442 	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5443 	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5444 	if (!disable_l0s) {
5445 		if (rdev->family >= CHIP_BARTS)
5446 			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5447 		else
5448 			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5449 	}
5450 
5451 	if (!disable_l1) {
5452 		if (rdev->family >= CHIP_BARTS)
5453 			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5454 		else
5455 			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5456 
5457 		if (!disable_plloff_in_l1) {
5458 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5459 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5460 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5461 			if (data != orig)
5462 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5463 
5464 			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5465 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5466 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5467 			if (data != orig)
5468 				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5469 
5470 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5471 			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5472 			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5473 			if (data != orig)
5474 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5475 
5476 			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5477 			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5478 			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5479 			if (data != orig)
5480 				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5481 
5482 			if (rdev->family >= CHIP_BARTS) {
5483 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5484 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5485 				data |= PLL_RAMP_UP_TIME_0(4);
5486 				if (data != orig)
5487 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5488 
5489 				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5490 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5491 				data |= PLL_RAMP_UP_TIME_1(4);
5492 				if (data != orig)
5493 					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5494 
5495 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5496 				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5497 				data |= PLL_RAMP_UP_TIME_0(4);
5498 				if (data != orig)
5499 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5500 
5501 				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5502 				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5503 				data |= PLL_RAMP_UP_TIME_1(4);
5504 				if (data != orig)
5505 					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5506 			}
5507 
5508 			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5509 			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5510 			data |= LC_DYN_LANES_PWR_STATE(3);
5511 			if (data != orig)
5512 				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5513 
5514 			if (rdev->family >= CHIP_BARTS) {
5515 				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5516 				data &= ~LS2_EXIT_TIME_MASK;
5517 				data |= LS2_EXIT_TIME(1);
5518 				if (data != orig)
5519 					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5520 
5521 				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5522 				data &= ~LS2_EXIT_TIME_MASK;
5523 				data |= LS2_EXIT_TIME(1);
5524 				if (data != orig)
5525 					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5526 			}
5527 		}
5528 	}
5529 
5530 	/* evergreen parts only */
5531 	if (rdev->family < CHIP_BARTS)
5532 		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5533 
5534 	if (pcie_lc_cntl != pcie_lc_cntl_old)
5535 		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5536 }
5537