xref: /openbmc/linux/drivers/gpu/drm/radeon/evergreen.c (revision 24b1944f)
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/platform_device.h>
26 #include <linux/slab.h>
27 #include <drm/drmP.h>
28 #include "radeon.h"
29 #include "radeon_asic.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 
37 #define EVERGREEN_PFP_UCODE_SIZE 1120
38 #define EVERGREEN_PM4_UCODE_SIZE 1376
39 
40 static const u32 crtc_offsets[6] =
41 {
42 	EVERGREEN_CRTC0_REGISTER_OFFSET,
43 	EVERGREEN_CRTC1_REGISTER_OFFSET,
44 	EVERGREEN_CRTC2_REGISTER_OFFSET,
45 	EVERGREEN_CRTC3_REGISTER_OFFSET,
46 	EVERGREEN_CRTC4_REGISTER_OFFSET,
47 	EVERGREEN_CRTC5_REGISTER_OFFSET
48 };
49 
50 static void evergreen_gpu_init(struct radeon_device *rdev);
51 void evergreen_fini(struct radeon_device *rdev);
52 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
53 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
54 				     int ring, u32 cp_int_cntl);
55 
56 static const u32 evergreen_golden_registers[] =
57 {
58 	0x3f90, 0xffff0000, 0xff000000,
59 	0x9148, 0xffff0000, 0xff000000,
60 	0x3f94, 0xffff0000, 0xff000000,
61 	0x914c, 0xffff0000, 0xff000000,
62 	0x9b7c, 0xffffffff, 0x00000000,
63 	0x8a14, 0xffffffff, 0x00000007,
64 	0x8b10, 0xffffffff, 0x00000000,
65 	0x960c, 0xffffffff, 0x54763210,
66 	0x88c4, 0xffffffff, 0x000000c2,
67 	0x88d4, 0xffffffff, 0x00000010,
68 	0x8974, 0xffffffff, 0x00000000,
69 	0xc78, 0x00000080, 0x00000080,
70 	0x5eb4, 0xffffffff, 0x00000002,
71 	0x5e78, 0xffffffff, 0x001000f0,
72 	0x6104, 0x01000300, 0x00000000,
73 	0x5bc0, 0x00300000, 0x00000000,
74 	0x7030, 0xffffffff, 0x00000011,
75 	0x7c30, 0xffffffff, 0x00000011,
76 	0x10830, 0xffffffff, 0x00000011,
77 	0x11430, 0xffffffff, 0x00000011,
78 	0x12030, 0xffffffff, 0x00000011,
79 	0x12c30, 0xffffffff, 0x00000011,
80 	0xd02c, 0xffffffff, 0x08421000,
81 	0x240c, 0xffffffff, 0x00000380,
82 	0x8b24, 0xffffffff, 0x00ff0fff,
83 	0x28a4c, 0x06000000, 0x06000000,
84 	0x10c, 0x00000001, 0x00000001,
85 	0x8d00, 0xffffffff, 0x100e4848,
86 	0x8d04, 0xffffffff, 0x00164745,
87 	0x8c00, 0xffffffff, 0xe4000003,
88 	0x8c04, 0xffffffff, 0x40600060,
89 	0x8c08, 0xffffffff, 0x001c001c,
90 	0x8cf0, 0xffffffff, 0x08e00620,
91 	0x8c20, 0xffffffff, 0x00800080,
92 	0x8c24, 0xffffffff, 0x00800080,
93 	0x8c18, 0xffffffff, 0x20202078,
94 	0x8c1c, 0xffffffff, 0x00001010,
95 	0x28350, 0xffffffff, 0x00000000,
96 	0xa008, 0xffffffff, 0x00010000,
97 	0x5cc, 0xffffffff, 0x00000001,
98 	0x9508, 0xffffffff, 0x00000002,
99 	0x913c, 0x0000000f, 0x0000000a
100 };
101 
102 static const u32 evergreen_golden_registers2[] =
103 {
104 	0x2f4c, 0xffffffff, 0x00000000,
105 	0x54f4, 0xffffffff, 0x00000000,
106 	0x54f0, 0xffffffff, 0x00000000,
107 	0x5498, 0xffffffff, 0x00000000,
108 	0x549c, 0xffffffff, 0x00000000,
109 	0x5494, 0xffffffff, 0x00000000,
110 	0x53cc, 0xffffffff, 0x00000000,
111 	0x53c8, 0xffffffff, 0x00000000,
112 	0x53c4, 0xffffffff, 0x00000000,
113 	0x53c0, 0xffffffff, 0x00000000,
114 	0x53bc, 0xffffffff, 0x00000000,
115 	0x53b8, 0xffffffff, 0x00000000,
116 	0x53b4, 0xffffffff, 0x00000000,
117 	0x53b0, 0xffffffff, 0x00000000
118 };
119 
120 static const u32 cypress_mgcg_init[] =
121 {
122 	0x802c, 0xffffffff, 0xc0000000,
123 	0x5448, 0xffffffff, 0x00000100,
124 	0x55e4, 0xffffffff, 0x00000100,
125 	0x160c, 0xffffffff, 0x00000100,
126 	0x5644, 0xffffffff, 0x00000100,
127 	0xc164, 0xffffffff, 0x00000100,
128 	0x8a18, 0xffffffff, 0x00000100,
129 	0x897c, 0xffffffff, 0x06000100,
130 	0x8b28, 0xffffffff, 0x00000100,
131 	0x9144, 0xffffffff, 0x00000100,
132 	0x9a60, 0xffffffff, 0x00000100,
133 	0x9868, 0xffffffff, 0x00000100,
134 	0x8d58, 0xffffffff, 0x00000100,
135 	0x9510, 0xffffffff, 0x00000100,
136 	0x949c, 0xffffffff, 0x00000100,
137 	0x9654, 0xffffffff, 0x00000100,
138 	0x9030, 0xffffffff, 0x00000100,
139 	0x9034, 0xffffffff, 0x00000100,
140 	0x9038, 0xffffffff, 0x00000100,
141 	0x903c, 0xffffffff, 0x00000100,
142 	0x9040, 0xffffffff, 0x00000100,
143 	0xa200, 0xffffffff, 0x00000100,
144 	0xa204, 0xffffffff, 0x00000100,
145 	0xa208, 0xffffffff, 0x00000100,
146 	0xa20c, 0xffffffff, 0x00000100,
147 	0x971c, 0xffffffff, 0x00000100,
148 	0x977c, 0xffffffff, 0x00000100,
149 	0x3f80, 0xffffffff, 0x00000100,
150 	0xa210, 0xffffffff, 0x00000100,
151 	0xa214, 0xffffffff, 0x00000100,
152 	0x4d8, 0xffffffff, 0x00000100,
153 	0x9784, 0xffffffff, 0x00000100,
154 	0x9698, 0xffffffff, 0x00000100,
155 	0x4d4, 0xffffffff, 0x00000200,
156 	0x30cc, 0xffffffff, 0x00000100,
157 	0xd0c0, 0xffffffff, 0xff000100,
158 	0x802c, 0xffffffff, 0x40000000,
159 	0x915c, 0xffffffff, 0x00010000,
160 	0x9160, 0xffffffff, 0x00030002,
161 	0x9178, 0xffffffff, 0x00070000,
162 	0x917c, 0xffffffff, 0x00030002,
163 	0x9180, 0xffffffff, 0x00050004,
164 	0x918c, 0xffffffff, 0x00010006,
165 	0x9190, 0xffffffff, 0x00090008,
166 	0x9194, 0xffffffff, 0x00070000,
167 	0x9198, 0xffffffff, 0x00030002,
168 	0x919c, 0xffffffff, 0x00050004,
169 	0x91a8, 0xffffffff, 0x00010006,
170 	0x91ac, 0xffffffff, 0x00090008,
171 	0x91b0, 0xffffffff, 0x00070000,
172 	0x91b4, 0xffffffff, 0x00030002,
173 	0x91b8, 0xffffffff, 0x00050004,
174 	0x91c4, 0xffffffff, 0x00010006,
175 	0x91c8, 0xffffffff, 0x00090008,
176 	0x91cc, 0xffffffff, 0x00070000,
177 	0x91d0, 0xffffffff, 0x00030002,
178 	0x91d4, 0xffffffff, 0x00050004,
179 	0x91e0, 0xffffffff, 0x00010006,
180 	0x91e4, 0xffffffff, 0x00090008,
181 	0x91e8, 0xffffffff, 0x00000000,
182 	0x91ec, 0xffffffff, 0x00070000,
183 	0x91f0, 0xffffffff, 0x00030002,
184 	0x91f4, 0xffffffff, 0x00050004,
185 	0x9200, 0xffffffff, 0x00010006,
186 	0x9204, 0xffffffff, 0x00090008,
187 	0x9208, 0xffffffff, 0x00070000,
188 	0x920c, 0xffffffff, 0x00030002,
189 	0x9210, 0xffffffff, 0x00050004,
190 	0x921c, 0xffffffff, 0x00010006,
191 	0x9220, 0xffffffff, 0x00090008,
192 	0x9224, 0xffffffff, 0x00070000,
193 	0x9228, 0xffffffff, 0x00030002,
194 	0x922c, 0xffffffff, 0x00050004,
195 	0x9238, 0xffffffff, 0x00010006,
196 	0x923c, 0xffffffff, 0x00090008,
197 	0x9240, 0xffffffff, 0x00070000,
198 	0x9244, 0xffffffff, 0x00030002,
199 	0x9248, 0xffffffff, 0x00050004,
200 	0x9254, 0xffffffff, 0x00010006,
201 	0x9258, 0xffffffff, 0x00090008,
202 	0x925c, 0xffffffff, 0x00070000,
203 	0x9260, 0xffffffff, 0x00030002,
204 	0x9264, 0xffffffff, 0x00050004,
205 	0x9270, 0xffffffff, 0x00010006,
206 	0x9274, 0xffffffff, 0x00090008,
207 	0x9278, 0xffffffff, 0x00070000,
208 	0x927c, 0xffffffff, 0x00030002,
209 	0x9280, 0xffffffff, 0x00050004,
210 	0x928c, 0xffffffff, 0x00010006,
211 	0x9290, 0xffffffff, 0x00090008,
212 	0x9294, 0xffffffff, 0x00000000,
213 	0x929c, 0xffffffff, 0x00000001,
214 	0x802c, 0xffffffff, 0x40010000,
215 	0x915c, 0xffffffff, 0x00010000,
216 	0x9160, 0xffffffff, 0x00030002,
217 	0x9178, 0xffffffff, 0x00070000,
218 	0x917c, 0xffffffff, 0x00030002,
219 	0x9180, 0xffffffff, 0x00050004,
220 	0x918c, 0xffffffff, 0x00010006,
221 	0x9190, 0xffffffff, 0x00090008,
222 	0x9194, 0xffffffff, 0x00070000,
223 	0x9198, 0xffffffff, 0x00030002,
224 	0x919c, 0xffffffff, 0x00050004,
225 	0x91a8, 0xffffffff, 0x00010006,
226 	0x91ac, 0xffffffff, 0x00090008,
227 	0x91b0, 0xffffffff, 0x00070000,
228 	0x91b4, 0xffffffff, 0x00030002,
229 	0x91b8, 0xffffffff, 0x00050004,
230 	0x91c4, 0xffffffff, 0x00010006,
231 	0x91c8, 0xffffffff, 0x00090008,
232 	0x91cc, 0xffffffff, 0x00070000,
233 	0x91d0, 0xffffffff, 0x00030002,
234 	0x91d4, 0xffffffff, 0x00050004,
235 	0x91e0, 0xffffffff, 0x00010006,
236 	0x91e4, 0xffffffff, 0x00090008,
237 	0x91e8, 0xffffffff, 0x00000000,
238 	0x91ec, 0xffffffff, 0x00070000,
239 	0x91f0, 0xffffffff, 0x00030002,
240 	0x91f4, 0xffffffff, 0x00050004,
241 	0x9200, 0xffffffff, 0x00010006,
242 	0x9204, 0xffffffff, 0x00090008,
243 	0x9208, 0xffffffff, 0x00070000,
244 	0x920c, 0xffffffff, 0x00030002,
245 	0x9210, 0xffffffff, 0x00050004,
246 	0x921c, 0xffffffff, 0x00010006,
247 	0x9220, 0xffffffff, 0x00090008,
248 	0x9224, 0xffffffff, 0x00070000,
249 	0x9228, 0xffffffff, 0x00030002,
250 	0x922c, 0xffffffff, 0x00050004,
251 	0x9238, 0xffffffff, 0x00010006,
252 	0x923c, 0xffffffff, 0x00090008,
253 	0x9240, 0xffffffff, 0x00070000,
254 	0x9244, 0xffffffff, 0x00030002,
255 	0x9248, 0xffffffff, 0x00050004,
256 	0x9254, 0xffffffff, 0x00010006,
257 	0x9258, 0xffffffff, 0x00090008,
258 	0x925c, 0xffffffff, 0x00070000,
259 	0x9260, 0xffffffff, 0x00030002,
260 	0x9264, 0xffffffff, 0x00050004,
261 	0x9270, 0xffffffff, 0x00010006,
262 	0x9274, 0xffffffff, 0x00090008,
263 	0x9278, 0xffffffff, 0x00070000,
264 	0x927c, 0xffffffff, 0x00030002,
265 	0x9280, 0xffffffff, 0x00050004,
266 	0x928c, 0xffffffff, 0x00010006,
267 	0x9290, 0xffffffff, 0x00090008,
268 	0x9294, 0xffffffff, 0x00000000,
269 	0x929c, 0xffffffff, 0x00000001,
270 	0x802c, 0xffffffff, 0xc0000000
271 };
272 
273 static const u32 redwood_mgcg_init[] =
274 {
275 	0x802c, 0xffffffff, 0xc0000000,
276 	0x5448, 0xffffffff, 0x00000100,
277 	0x55e4, 0xffffffff, 0x00000100,
278 	0x160c, 0xffffffff, 0x00000100,
279 	0x5644, 0xffffffff, 0x00000100,
280 	0xc164, 0xffffffff, 0x00000100,
281 	0x8a18, 0xffffffff, 0x00000100,
282 	0x897c, 0xffffffff, 0x06000100,
283 	0x8b28, 0xffffffff, 0x00000100,
284 	0x9144, 0xffffffff, 0x00000100,
285 	0x9a60, 0xffffffff, 0x00000100,
286 	0x9868, 0xffffffff, 0x00000100,
287 	0x8d58, 0xffffffff, 0x00000100,
288 	0x9510, 0xffffffff, 0x00000100,
289 	0x949c, 0xffffffff, 0x00000100,
290 	0x9654, 0xffffffff, 0x00000100,
291 	0x9030, 0xffffffff, 0x00000100,
292 	0x9034, 0xffffffff, 0x00000100,
293 	0x9038, 0xffffffff, 0x00000100,
294 	0x903c, 0xffffffff, 0x00000100,
295 	0x9040, 0xffffffff, 0x00000100,
296 	0xa200, 0xffffffff, 0x00000100,
297 	0xa204, 0xffffffff, 0x00000100,
298 	0xa208, 0xffffffff, 0x00000100,
299 	0xa20c, 0xffffffff, 0x00000100,
300 	0x971c, 0xffffffff, 0x00000100,
301 	0x977c, 0xffffffff, 0x00000100,
302 	0x3f80, 0xffffffff, 0x00000100,
303 	0xa210, 0xffffffff, 0x00000100,
304 	0xa214, 0xffffffff, 0x00000100,
305 	0x4d8, 0xffffffff, 0x00000100,
306 	0x9784, 0xffffffff, 0x00000100,
307 	0x9698, 0xffffffff, 0x00000100,
308 	0x4d4, 0xffffffff, 0x00000200,
309 	0x30cc, 0xffffffff, 0x00000100,
310 	0xd0c0, 0xffffffff, 0xff000100,
311 	0x802c, 0xffffffff, 0x40000000,
312 	0x915c, 0xffffffff, 0x00010000,
313 	0x9160, 0xffffffff, 0x00030002,
314 	0x9178, 0xffffffff, 0x00070000,
315 	0x917c, 0xffffffff, 0x00030002,
316 	0x9180, 0xffffffff, 0x00050004,
317 	0x918c, 0xffffffff, 0x00010006,
318 	0x9190, 0xffffffff, 0x00090008,
319 	0x9194, 0xffffffff, 0x00070000,
320 	0x9198, 0xffffffff, 0x00030002,
321 	0x919c, 0xffffffff, 0x00050004,
322 	0x91a8, 0xffffffff, 0x00010006,
323 	0x91ac, 0xffffffff, 0x00090008,
324 	0x91b0, 0xffffffff, 0x00070000,
325 	0x91b4, 0xffffffff, 0x00030002,
326 	0x91b8, 0xffffffff, 0x00050004,
327 	0x91c4, 0xffffffff, 0x00010006,
328 	0x91c8, 0xffffffff, 0x00090008,
329 	0x91cc, 0xffffffff, 0x00070000,
330 	0x91d0, 0xffffffff, 0x00030002,
331 	0x91d4, 0xffffffff, 0x00050004,
332 	0x91e0, 0xffffffff, 0x00010006,
333 	0x91e4, 0xffffffff, 0x00090008,
334 	0x91e8, 0xffffffff, 0x00000000,
335 	0x91ec, 0xffffffff, 0x00070000,
336 	0x91f0, 0xffffffff, 0x00030002,
337 	0x91f4, 0xffffffff, 0x00050004,
338 	0x9200, 0xffffffff, 0x00010006,
339 	0x9204, 0xffffffff, 0x00090008,
340 	0x9294, 0xffffffff, 0x00000000,
341 	0x929c, 0xffffffff, 0x00000001,
342 	0x802c, 0xffffffff, 0xc0000000
343 };
344 
345 static const u32 cedar_golden_registers[] =
346 {
347 	0x3f90, 0xffff0000, 0xff000000,
348 	0x9148, 0xffff0000, 0xff000000,
349 	0x3f94, 0xffff0000, 0xff000000,
350 	0x914c, 0xffff0000, 0xff000000,
351 	0x9b7c, 0xffffffff, 0x00000000,
352 	0x8a14, 0xffffffff, 0x00000007,
353 	0x8b10, 0xffffffff, 0x00000000,
354 	0x960c, 0xffffffff, 0x54763210,
355 	0x88c4, 0xffffffff, 0x000000c2,
356 	0x88d4, 0xffffffff, 0x00000000,
357 	0x8974, 0xffffffff, 0x00000000,
358 	0xc78, 0x00000080, 0x00000080,
359 	0x5eb4, 0xffffffff, 0x00000002,
360 	0x5e78, 0xffffffff, 0x001000f0,
361 	0x6104, 0x01000300, 0x00000000,
362 	0x5bc0, 0x00300000, 0x00000000,
363 	0x7030, 0xffffffff, 0x00000011,
364 	0x7c30, 0xffffffff, 0x00000011,
365 	0x10830, 0xffffffff, 0x00000011,
366 	0x11430, 0xffffffff, 0x00000011,
367 	0xd02c, 0xffffffff, 0x08421000,
368 	0x240c, 0xffffffff, 0x00000380,
369 	0x8b24, 0xffffffff, 0x00ff0fff,
370 	0x28a4c, 0x06000000, 0x06000000,
371 	0x10c, 0x00000001, 0x00000001,
372 	0x8d00, 0xffffffff, 0x100e4848,
373 	0x8d04, 0xffffffff, 0x00164745,
374 	0x8c00, 0xffffffff, 0xe4000003,
375 	0x8c04, 0xffffffff, 0x40600060,
376 	0x8c08, 0xffffffff, 0x001c001c,
377 	0x8cf0, 0xffffffff, 0x08e00410,
378 	0x8c20, 0xffffffff, 0x00800080,
379 	0x8c24, 0xffffffff, 0x00800080,
380 	0x8c18, 0xffffffff, 0x20202078,
381 	0x8c1c, 0xffffffff, 0x00001010,
382 	0x28350, 0xffffffff, 0x00000000,
383 	0xa008, 0xffffffff, 0x00010000,
384 	0x5cc, 0xffffffff, 0x00000001,
385 	0x9508, 0xffffffff, 0x00000002
386 };
387 
388 static const u32 cedar_mgcg_init[] =
389 {
390 	0x802c, 0xffffffff, 0xc0000000,
391 	0x5448, 0xffffffff, 0x00000100,
392 	0x55e4, 0xffffffff, 0x00000100,
393 	0x160c, 0xffffffff, 0x00000100,
394 	0x5644, 0xffffffff, 0x00000100,
395 	0xc164, 0xffffffff, 0x00000100,
396 	0x8a18, 0xffffffff, 0x00000100,
397 	0x897c, 0xffffffff, 0x06000100,
398 	0x8b28, 0xffffffff, 0x00000100,
399 	0x9144, 0xffffffff, 0x00000100,
400 	0x9a60, 0xffffffff, 0x00000100,
401 	0x9868, 0xffffffff, 0x00000100,
402 	0x8d58, 0xffffffff, 0x00000100,
403 	0x9510, 0xffffffff, 0x00000100,
404 	0x949c, 0xffffffff, 0x00000100,
405 	0x9654, 0xffffffff, 0x00000100,
406 	0x9030, 0xffffffff, 0x00000100,
407 	0x9034, 0xffffffff, 0x00000100,
408 	0x9038, 0xffffffff, 0x00000100,
409 	0x903c, 0xffffffff, 0x00000100,
410 	0x9040, 0xffffffff, 0x00000100,
411 	0xa200, 0xffffffff, 0x00000100,
412 	0xa204, 0xffffffff, 0x00000100,
413 	0xa208, 0xffffffff, 0x00000100,
414 	0xa20c, 0xffffffff, 0x00000100,
415 	0x971c, 0xffffffff, 0x00000100,
416 	0x977c, 0xffffffff, 0x00000100,
417 	0x3f80, 0xffffffff, 0x00000100,
418 	0xa210, 0xffffffff, 0x00000100,
419 	0xa214, 0xffffffff, 0x00000100,
420 	0x4d8, 0xffffffff, 0x00000100,
421 	0x9784, 0xffffffff, 0x00000100,
422 	0x9698, 0xffffffff, 0x00000100,
423 	0x4d4, 0xffffffff, 0x00000200,
424 	0x30cc, 0xffffffff, 0x00000100,
425 	0xd0c0, 0xffffffff, 0xff000100,
426 	0x802c, 0xffffffff, 0x40000000,
427 	0x915c, 0xffffffff, 0x00010000,
428 	0x9178, 0xffffffff, 0x00050000,
429 	0x917c, 0xffffffff, 0x00030002,
430 	0x918c, 0xffffffff, 0x00010004,
431 	0x9190, 0xffffffff, 0x00070006,
432 	0x9194, 0xffffffff, 0x00050000,
433 	0x9198, 0xffffffff, 0x00030002,
434 	0x91a8, 0xffffffff, 0x00010004,
435 	0x91ac, 0xffffffff, 0x00070006,
436 	0x91e8, 0xffffffff, 0x00000000,
437 	0x9294, 0xffffffff, 0x00000000,
438 	0x929c, 0xffffffff, 0x00000001,
439 	0x802c, 0xffffffff, 0xc0000000
440 };
441 
442 static const u32 juniper_mgcg_init[] =
443 {
444 	0x802c, 0xffffffff, 0xc0000000,
445 	0x5448, 0xffffffff, 0x00000100,
446 	0x55e4, 0xffffffff, 0x00000100,
447 	0x160c, 0xffffffff, 0x00000100,
448 	0x5644, 0xffffffff, 0x00000100,
449 	0xc164, 0xffffffff, 0x00000100,
450 	0x8a18, 0xffffffff, 0x00000100,
451 	0x897c, 0xffffffff, 0x06000100,
452 	0x8b28, 0xffffffff, 0x00000100,
453 	0x9144, 0xffffffff, 0x00000100,
454 	0x9a60, 0xffffffff, 0x00000100,
455 	0x9868, 0xffffffff, 0x00000100,
456 	0x8d58, 0xffffffff, 0x00000100,
457 	0x9510, 0xffffffff, 0x00000100,
458 	0x949c, 0xffffffff, 0x00000100,
459 	0x9654, 0xffffffff, 0x00000100,
460 	0x9030, 0xffffffff, 0x00000100,
461 	0x9034, 0xffffffff, 0x00000100,
462 	0x9038, 0xffffffff, 0x00000100,
463 	0x903c, 0xffffffff, 0x00000100,
464 	0x9040, 0xffffffff, 0x00000100,
465 	0xa200, 0xffffffff, 0x00000100,
466 	0xa204, 0xffffffff, 0x00000100,
467 	0xa208, 0xffffffff, 0x00000100,
468 	0xa20c, 0xffffffff, 0x00000100,
469 	0x971c, 0xffffffff, 0x00000100,
470 	0xd0c0, 0xffffffff, 0xff000100,
471 	0x802c, 0xffffffff, 0x40000000,
472 	0x915c, 0xffffffff, 0x00010000,
473 	0x9160, 0xffffffff, 0x00030002,
474 	0x9178, 0xffffffff, 0x00070000,
475 	0x917c, 0xffffffff, 0x00030002,
476 	0x9180, 0xffffffff, 0x00050004,
477 	0x918c, 0xffffffff, 0x00010006,
478 	0x9190, 0xffffffff, 0x00090008,
479 	0x9194, 0xffffffff, 0x00070000,
480 	0x9198, 0xffffffff, 0x00030002,
481 	0x919c, 0xffffffff, 0x00050004,
482 	0x91a8, 0xffffffff, 0x00010006,
483 	0x91ac, 0xffffffff, 0x00090008,
484 	0x91b0, 0xffffffff, 0x00070000,
485 	0x91b4, 0xffffffff, 0x00030002,
486 	0x91b8, 0xffffffff, 0x00050004,
487 	0x91c4, 0xffffffff, 0x00010006,
488 	0x91c8, 0xffffffff, 0x00090008,
489 	0x91cc, 0xffffffff, 0x00070000,
490 	0x91d0, 0xffffffff, 0x00030002,
491 	0x91d4, 0xffffffff, 0x00050004,
492 	0x91e0, 0xffffffff, 0x00010006,
493 	0x91e4, 0xffffffff, 0x00090008,
494 	0x91e8, 0xffffffff, 0x00000000,
495 	0x91ec, 0xffffffff, 0x00070000,
496 	0x91f0, 0xffffffff, 0x00030002,
497 	0x91f4, 0xffffffff, 0x00050004,
498 	0x9200, 0xffffffff, 0x00010006,
499 	0x9204, 0xffffffff, 0x00090008,
500 	0x9208, 0xffffffff, 0x00070000,
501 	0x920c, 0xffffffff, 0x00030002,
502 	0x9210, 0xffffffff, 0x00050004,
503 	0x921c, 0xffffffff, 0x00010006,
504 	0x9220, 0xffffffff, 0x00090008,
505 	0x9224, 0xffffffff, 0x00070000,
506 	0x9228, 0xffffffff, 0x00030002,
507 	0x922c, 0xffffffff, 0x00050004,
508 	0x9238, 0xffffffff, 0x00010006,
509 	0x923c, 0xffffffff, 0x00090008,
510 	0x9240, 0xffffffff, 0x00070000,
511 	0x9244, 0xffffffff, 0x00030002,
512 	0x9248, 0xffffffff, 0x00050004,
513 	0x9254, 0xffffffff, 0x00010006,
514 	0x9258, 0xffffffff, 0x00090008,
515 	0x925c, 0xffffffff, 0x00070000,
516 	0x9260, 0xffffffff, 0x00030002,
517 	0x9264, 0xffffffff, 0x00050004,
518 	0x9270, 0xffffffff, 0x00010006,
519 	0x9274, 0xffffffff, 0x00090008,
520 	0x9278, 0xffffffff, 0x00070000,
521 	0x927c, 0xffffffff, 0x00030002,
522 	0x9280, 0xffffffff, 0x00050004,
523 	0x928c, 0xffffffff, 0x00010006,
524 	0x9290, 0xffffffff, 0x00090008,
525 	0x9294, 0xffffffff, 0x00000000,
526 	0x929c, 0xffffffff, 0x00000001,
527 	0x802c, 0xffffffff, 0xc0000000,
528 	0x977c, 0xffffffff, 0x00000100,
529 	0x3f80, 0xffffffff, 0x00000100,
530 	0xa210, 0xffffffff, 0x00000100,
531 	0xa214, 0xffffffff, 0x00000100,
532 	0x4d8, 0xffffffff, 0x00000100,
533 	0x9784, 0xffffffff, 0x00000100,
534 	0x9698, 0xffffffff, 0x00000100,
535 	0x4d4, 0xffffffff, 0x00000200,
536 	0x30cc, 0xffffffff, 0x00000100,
537 	0x802c, 0xffffffff, 0xc0000000
538 };
539 
540 static const u32 supersumo_golden_registers[] =
541 {
542 	0x5eb4, 0xffffffff, 0x00000002,
543 	0x5cc, 0xffffffff, 0x00000001,
544 	0x7030, 0xffffffff, 0x00000011,
545 	0x7c30, 0xffffffff, 0x00000011,
546 	0x6104, 0x01000300, 0x00000000,
547 	0x5bc0, 0x00300000, 0x00000000,
548 	0x8c04, 0xffffffff, 0x40600060,
549 	0x8c08, 0xffffffff, 0x001c001c,
550 	0x8c20, 0xffffffff, 0x00800080,
551 	0x8c24, 0xffffffff, 0x00800080,
552 	0x8c18, 0xffffffff, 0x20202078,
553 	0x8c1c, 0xffffffff, 0x00001010,
554 	0x918c, 0xffffffff, 0x00010006,
555 	0x91a8, 0xffffffff, 0x00010006,
556 	0x91c4, 0xffffffff, 0x00010006,
557 	0x91e0, 0xffffffff, 0x00010006,
558 	0x9200, 0xffffffff, 0x00010006,
559 	0x9150, 0xffffffff, 0x6e944040,
560 	0x917c, 0xffffffff, 0x00030002,
561 	0x9180, 0xffffffff, 0x00050004,
562 	0x9198, 0xffffffff, 0x00030002,
563 	0x919c, 0xffffffff, 0x00050004,
564 	0x91b4, 0xffffffff, 0x00030002,
565 	0x91b8, 0xffffffff, 0x00050004,
566 	0x91d0, 0xffffffff, 0x00030002,
567 	0x91d4, 0xffffffff, 0x00050004,
568 	0x91f0, 0xffffffff, 0x00030002,
569 	0x91f4, 0xffffffff, 0x00050004,
570 	0x915c, 0xffffffff, 0x00010000,
571 	0x9160, 0xffffffff, 0x00030002,
572 	0x3f90, 0xffff0000, 0xff000000,
573 	0x9178, 0xffffffff, 0x00070000,
574 	0x9194, 0xffffffff, 0x00070000,
575 	0x91b0, 0xffffffff, 0x00070000,
576 	0x91cc, 0xffffffff, 0x00070000,
577 	0x91ec, 0xffffffff, 0x00070000,
578 	0x9148, 0xffff0000, 0xff000000,
579 	0x9190, 0xffffffff, 0x00090008,
580 	0x91ac, 0xffffffff, 0x00090008,
581 	0x91c8, 0xffffffff, 0x00090008,
582 	0x91e4, 0xffffffff, 0x00090008,
583 	0x9204, 0xffffffff, 0x00090008,
584 	0x3f94, 0xffff0000, 0xff000000,
585 	0x914c, 0xffff0000, 0xff000000,
586 	0x929c, 0xffffffff, 0x00000001,
587 	0x8a18, 0xffffffff, 0x00000100,
588 	0x8b28, 0xffffffff, 0x00000100,
589 	0x9144, 0xffffffff, 0x00000100,
590 	0x5644, 0xffffffff, 0x00000100,
591 	0x9b7c, 0xffffffff, 0x00000000,
592 	0x8030, 0xffffffff, 0x0000100a,
593 	0x8a14, 0xffffffff, 0x00000007,
594 	0x8b24, 0xffffffff, 0x00ff0fff,
595 	0x8b10, 0xffffffff, 0x00000000,
596 	0x28a4c, 0x06000000, 0x06000000,
597 	0x4d8, 0xffffffff, 0x00000100,
598 	0x913c, 0xffff000f, 0x0100000a,
599 	0x960c, 0xffffffff, 0x54763210,
600 	0x88c4, 0xffffffff, 0x000000c2,
601 	0x88d4, 0xffffffff, 0x00000010,
602 	0x8974, 0xffffffff, 0x00000000,
603 	0xc78, 0x00000080, 0x00000080,
604 	0x5e78, 0xffffffff, 0x001000f0,
605 	0xd02c, 0xffffffff, 0x08421000,
606 	0xa008, 0xffffffff, 0x00010000,
607 	0x8d00, 0xffffffff, 0x100e4848,
608 	0x8d04, 0xffffffff, 0x00164745,
609 	0x8c00, 0xffffffff, 0xe4000003,
610 	0x8cf0, 0x1fffffff, 0x08e00620,
611 	0x28350, 0xffffffff, 0x00000000,
612 	0x9508, 0xffffffff, 0x00000002
613 };
614 
615 static const u32 sumo_golden_registers[] =
616 {
617 	0x900c, 0x00ffffff, 0x0017071f,
618 	0x8c18, 0xffffffff, 0x10101060,
619 	0x8c1c, 0xffffffff, 0x00001010,
620 	0x8c30, 0x0000000f, 0x00000005,
621 	0x9688, 0x0000000f, 0x00000007
622 };
623 
624 static const u32 wrestler_golden_registers[] =
625 {
626 	0x5eb4, 0xffffffff, 0x00000002,
627 	0x5cc, 0xffffffff, 0x00000001,
628 	0x7030, 0xffffffff, 0x00000011,
629 	0x7c30, 0xffffffff, 0x00000011,
630 	0x6104, 0x01000300, 0x00000000,
631 	0x5bc0, 0x00300000, 0x00000000,
632 	0x918c, 0xffffffff, 0x00010006,
633 	0x91a8, 0xffffffff, 0x00010006,
634 	0x9150, 0xffffffff, 0x6e944040,
635 	0x917c, 0xffffffff, 0x00030002,
636 	0x9198, 0xffffffff, 0x00030002,
637 	0x915c, 0xffffffff, 0x00010000,
638 	0x3f90, 0xffff0000, 0xff000000,
639 	0x9178, 0xffffffff, 0x00070000,
640 	0x9194, 0xffffffff, 0x00070000,
641 	0x9148, 0xffff0000, 0xff000000,
642 	0x9190, 0xffffffff, 0x00090008,
643 	0x91ac, 0xffffffff, 0x00090008,
644 	0x3f94, 0xffff0000, 0xff000000,
645 	0x914c, 0xffff0000, 0xff000000,
646 	0x929c, 0xffffffff, 0x00000001,
647 	0x8a18, 0xffffffff, 0x00000100,
648 	0x8b28, 0xffffffff, 0x00000100,
649 	0x9144, 0xffffffff, 0x00000100,
650 	0x9b7c, 0xffffffff, 0x00000000,
651 	0x8030, 0xffffffff, 0x0000100a,
652 	0x8a14, 0xffffffff, 0x00000001,
653 	0x8b24, 0xffffffff, 0x00ff0fff,
654 	0x8b10, 0xffffffff, 0x00000000,
655 	0x28a4c, 0x06000000, 0x06000000,
656 	0x4d8, 0xffffffff, 0x00000100,
657 	0x913c, 0xffff000f, 0x0100000a,
658 	0x960c, 0xffffffff, 0x54763210,
659 	0x88c4, 0xffffffff, 0x000000c2,
660 	0x88d4, 0xffffffff, 0x00000010,
661 	0x8974, 0xffffffff, 0x00000000,
662 	0xc78, 0x00000080, 0x00000080,
663 	0x5e78, 0xffffffff, 0x001000f0,
664 	0xd02c, 0xffffffff, 0x08421000,
665 	0xa008, 0xffffffff, 0x00010000,
666 	0x8d00, 0xffffffff, 0x100e4848,
667 	0x8d04, 0xffffffff, 0x00164745,
668 	0x8c00, 0xffffffff, 0xe4000003,
669 	0x8cf0, 0x1fffffff, 0x08e00410,
670 	0x28350, 0xffffffff, 0x00000000,
671 	0x9508, 0xffffffff, 0x00000002,
672 	0x900c, 0xffffffff, 0x0017071f,
673 	0x8c18, 0xffffffff, 0x10101060,
674 	0x8c1c, 0xffffffff, 0x00001010
675 };
676 
677 static const u32 barts_golden_registers[] =
678 {
679 	0x5eb4, 0xffffffff, 0x00000002,
680 	0x5e78, 0x8f311ff1, 0x001000f0,
681 	0x3f90, 0xffff0000, 0xff000000,
682 	0x9148, 0xffff0000, 0xff000000,
683 	0x3f94, 0xffff0000, 0xff000000,
684 	0x914c, 0xffff0000, 0xff000000,
685 	0xc78, 0x00000080, 0x00000080,
686 	0xbd4, 0x70073777, 0x00010001,
687 	0xd02c, 0xbfffff1f, 0x08421000,
688 	0xd0b8, 0x03773777, 0x02011003,
689 	0x5bc0, 0x00200000, 0x50100000,
690 	0x98f8, 0x33773777, 0x02011003,
691 	0x98fc, 0xffffffff, 0x76543210,
692 	0x7030, 0x31000311, 0x00000011,
693 	0x2f48, 0x00000007, 0x02011003,
694 	0x6b28, 0x00000010, 0x00000012,
695 	0x7728, 0x00000010, 0x00000012,
696 	0x10328, 0x00000010, 0x00000012,
697 	0x10f28, 0x00000010, 0x00000012,
698 	0x11b28, 0x00000010, 0x00000012,
699 	0x12728, 0x00000010, 0x00000012,
700 	0x240c, 0x000007ff, 0x00000380,
701 	0x8a14, 0xf000001f, 0x00000007,
702 	0x8b24, 0x3fff3fff, 0x00ff0fff,
703 	0x8b10, 0x0000ff0f, 0x00000000,
704 	0x28a4c, 0x07ffffff, 0x06000000,
705 	0x10c, 0x00000001, 0x00010003,
706 	0xa02c, 0xffffffff, 0x0000009b,
707 	0x913c, 0x0000000f, 0x0100000a,
708 	0x8d00, 0xffff7f7f, 0x100e4848,
709 	0x8d04, 0x00ffffff, 0x00164745,
710 	0x8c00, 0xfffc0003, 0xe4000003,
711 	0x8c04, 0xf8ff00ff, 0x40600060,
712 	0x8c08, 0x00ff00ff, 0x001c001c,
713 	0x8cf0, 0x1fff1fff, 0x08e00620,
714 	0x8c20, 0x0fff0fff, 0x00800080,
715 	0x8c24, 0x0fff0fff, 0x00800080,
716 	0x8c18, 0xffffffff, 0x20202078,
717 	0x8c1c, 0x0000ffff, 0x00001010,
718 	0x28350, 0x00000f01, 0x00000000,
719 	0x9508, 0x3700001f, 0x00000002,
720 	0x960c, 0xffffffff, 0x54763210,
721 	0x88c4, 0x001f3ae3, 0x000000c2,
722 	0x88d4, 0x0000001f, 0x00000010,
723 	0x8974, 0xffffffff, 0x00000000
724 };
725 
726 static const u32 turks_golden_registers[] =
727 {
728 	0x5eb4, 0xffffffff, 0x00000002,
729 	0x5e78, 0x8f311ff1, 0x001000f0,
730 	0x8c8, 0x00003000, 0x00001070,
731 	0x8cc, 0x000fffff, 0x00040035,
732 	0x3f90, 0xffff0000, 0xfff00000,
733 	0x9148, 0xffff0000, 0xfff00000,
734 	0x3f94, 0xffff0000, 0xfff00000,
735 	0x914c, 0xffff0000, 0xfff00000,
736 	0xc78, 0x00000080, 0x00000080,
737 	0xbd4, 0x00073007, 0x00010002,
738 	0xd02c, 0xbfffff1f, 0x08421000,
739 	0xd0b8, 0x03773777, 0x02010002,
740 	0x5bc0, 0x00200000, 0x50100000,
741 	0x98f8, 0x33773777, 0x00010002,
742 	0x98fc, 0xffffffff, 0x33221100,
743 	0x7030, 0x31000311, 0x00000011,
744 	0x2f48, 0x33773777, 0x00010002,
745 	0x6b28, 0x00000010, 0x00000012,
746 	0x7728, 0x00000010, 0x00000012,
747 	0x10328, 0x00000010, 0x00000012,
748 	0x10f28, 0x00000010, 0x00000012,
749 	0x11b28, 0x00000010, 0x00000012,
750 	0x12728, 0x00000010, 0x00000012,
751 	0x240c, 0x000007ff, 0x00000380,
752 	0x8a14, 0xf000001f, 0x00000007,
753 	0x8b24, 0x3fff3fff, 0x00ff0fff,
754 	0x8b10, 0x0000ff0f, 0x00000000,
755 	0x28a4c, 0x07ffffff, 0x06000000,
756 	0x10c, 0x00000001, 0x00010003,
757 	0xa02c, 0xffffffff, 0x0000009b,
758 	0x913c, 0x0000000f, 0x0100000a,
759 	0x8d00, 0xffff7f7f, 0x100e4848,
760 	0x8d04, 0x00ffffff, 0x00164745,
761 	0x8c00, 0xfffc0003, 0xe4000003,
762 	0x8c04, 0xf8ff00ff, 0x40600060,
763 	0x8c08, 0x00ff00ff, 0x001c001c,
764 	0x8cf0, 0x1fff1fff, 0x08e00410,
765 	0x8c20, 0x0fff0fff, 0x00800080,
766 	0x8c24, 0x0fff0fff, 0x00800080,
767 	0x8c18, 0xffffffff, 0x20202078,
768 	0x8c1c, 0x0000ffff, 0x00001010,
769 	0x28350, 0x00000f01, 0x00000000,
770 	0x9508, 0x3700001f, 0x00000002,
771 	0x960c, 0xffffffff, 0x54763210,
772 	0x88c4, 0x001f3ae3, 0x000000c2,
773 	0x88d4, 0x0000001f, 0x00000010,
774 	0x8974, 0xffffffff, 0x00000000
775 };
776 
777 static const u32 caicos_golden_registers[] =
778 {
779 	0x5eb4, 0xffffffff, 0x00000002,
780 	0x5e78, 0x8f311ff1, 0x001000f0,
781 	0x8c8, 0x00003420, 0x00001450,
782 	0x8cc, 0x000fffff, 0x00040035,
783 	0x3f90, 0xffff0000, 0xfffc0000,
784 	0x9148, 0xffff0000, 0xfffc0000,
785 	0x3f94, 0xffff0000, 0xfffc0000,
786 	0x914c, 0xffff0000, 0xfffc0000,
787 	0xc78, 0x00000080, 0x00000080,
788 	0xbd4, 0x00073007, 0x00010001,
789 	0xd02c, 0xbfffff1f, 0x08421000,
790 	0xd0b8, 0x03773777, 0x02010001,
791 	0x5bc0, 0x00200000, 0x50100000,
792 	0x98f8, 0x33773777, 0x02010001,
793 	0x98fc, 0xffffffff, 0x33221100,
794 	0x7030, 0x31000311, 0x00000011,
795 	0x2f48, 0x33773777, 0x02010001,
796 	0x6b28, 0x00000010, 0x00000012,
797 	0x7728, 0x00000010, 0x00000012,
798 	0x10328, 0x00000010, 0x00000012,
799 	0x10f28, 0x00000010, 0x00000012,
800 	0x11b28, 0x00000010, 0x00000012,
801 	0x12728, 0x00000010, 0x00000012,
802 	0x240c, 0x000007ff, 0x00000380,
803 	0x8a14, 0xf000001f, 0x00000001,
804 	0x8b24, 0x3fff3fff, 0x00ff0fff,
805 	0x8b10, 0x0000ff0f, 0x00000000,
806 	0x28a4c, 0x07ffffff, 0x06000000,
807 	0x10c, 0x00000001, 0x00010003,
808 	0xa02c, 0xffffffff, 0x0000009b,
809 	0x913c, 0x0000000f, 0x0100000a,
810 	0x8d00, 0xffff7f7f, 0x100e4848,
811 	0x8d04, 0x00ffffff, 0x00164745,
812 	0x8c00, 0xfffc0003, 0xe4000003,
813 	0x8c04, 0xf8ff00ff, 0x40600060,
814 	0x8c08, 0x00ff00ff, 0x001c001c,
815 	0x8cf0, 0x1fff1fff, 0x08e00410,
816 	0x8c20, 0x0fff0fff, 0x00800080,
817 	0x8c24, 0x0fff0fff, 0x00800080,
818 	0x8c18, 0xffffffff, 0x20202078,
819 	0x8c1c, 0x0000ffff, 0x00001010,
820 	0x28350, 0x00000f01, 0x00000000,
821 	0x9508, 0x3700001f, 0x00000002,
822 	0x960c, 0xffffffff, 0x54763210,
823 	0x88c4, 0x001f3ae3, 0x000000c2,
824 	0x88d4, 0x0000001f, 0x00000010,
825 	0x8974, 0xffffffff, 0x00000000
826 };
827 
828 static void evergreen_init_golden_registers(struct radeon_device *rdev)
829 {
830 	switch (rdev->family) {
831 	case CHIP_CYPRESS:
832 	case CHIP_HEMLOCK:
833 		radeon_program_register_sequence(rdev,
834 						 evergreen_golden_registers,
835 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
836 		radeon_program_register_sequence(rdev,
837 						 evergreen_golden_registers2,
838 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
839 		radeon_program_register_sequence(rdev,
840 						 cypress_mgcg_init,
841 						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
842 		break;
843 	case CHIP_JUNIPER:
844 		radeon_program_register_sequence(rdev,
845 						 evergreen_golden_registers,
846 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
847 		radeon_program_register_sequence(rdev,
848 						 evergreen_golden_registers2,
849 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
850 		radeon_program_register_sequence(rdev,
851 						 juniper_mgcg_init,
852 						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
853 		break;
854 	case CHIP_REDWOOD:
855 		radeon_program_register_sequence(rdev,
856 						 evergreen_golden_registers,
857 						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
858 		radeon_program_register_sequence(rdev,
859 						 evergreen_golden_registers2,
860 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
861 		radeon_program_register_sequence(rdev,
862 						 redwood_mgcg_init,
863 						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
864 		break;
865 	case CHIP_CEDAR:
866 		radeon_program_register_sequence(rdev,
867 						 cedar_golden_registers,
868 						 (const u32)ARRAY_SIZE(cedar_golden_registers));
869 		radeon_program_register_sequence(rdev,
870 						 evergreen_golden_registers2,
871 						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
872 		radeon_program_register_sequence(rdev,
873 						 cedar_mgcg_init,
874 						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
875 		break;
876 	case CHIP_PALM:
877 		radeon_program_register_sequence(rdev,
878 						 wrestler_golden_registers,
879 						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
880 		break;
881 	case CHIP_SUMO:
882 		radeon_program_register_sequence(rdev,
883 						 supersumo_golden_registers,
884 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
885 		break;
886 	case CHIP_SUMO2:
887 		radeon_program_register_sequence(rdev,
888 						 supersumo_golden_registers,
889 						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
890 		radeon_program_register_sequence(rdev,
891 						 sumo_golden_registers,
892 						 (const u32)ARRAY_SIZE(sumo_golden_registers));
893 		break;
894 	case CHIP_BARTS:
895 		radeon_program_register_sequence(rdev,
896 						 barts_golden_registers,
897 						 (const u32)ARRAY_SIZE(barts_golden_registers));
898 		break;
899 	case CHIP_TURKS:
900 		radeon_program_register_sequence(rdev,
901 						 turks_golden_registers,
902 						 (const u32)ARRAY_SIZE(turks_golden_registers));
903 		break;
904 	case CHIP_CAICOS:
905 		radeon_program_register_sequence(rdev,
906 						 caicos_golden_registers,
907 						 (const u32)ARRAY_SIZE(caicos_golden_registers));
908 		break;
909 	default:
910 		break;
911 	}
912 }
913 
914 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
915 			     unsigned *bankh, unsigned *mtaspect,
916 			     unsigned *tile_split)
917 {
918 	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
919 	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
920 	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
921 	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
922 	switch (*bankw) {
923 	default:
924 	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
925 	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
926 	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
927 	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
928 	}
929 	switch (*bankh) {
930 	default:
931 	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
932 	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
933 	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
934 	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
935 	}
936 	switch (*mtaspect) {
937 	default:
938 	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
939 	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
940 	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
941 	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
942 	}
943 }
944 
945 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
946 			      u32 cntl_reg, u32 status_reg)
947 {
948 	int r, i;
949 	struct atom_clock_dividers dividers;
950 
951         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
952 					   clock, false, &dividers);
953 	if (r)
954 		return r;
955 
956 	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
957 
958 	for (i = 0; i < 100; i++) {
959 		if (RREG32(status_reg) & DCLK_STATUS)
960 			break;
961 		mdelay(10);
962 	}
963 	if (i == 100)
964 		return -ETIMEDOUT;
965 
966 	return 0;
967 }
968 
969 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
970 {
971 	int r = 0;
972 	u32 cg_scratch = RREG32(CG_SCRATCH1);
973 
974 	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
975 	if (r)
976 		goto done;
977 	cg_scratch &= 0xffff0000;
978 	cg_scratch |= vclk / 100; /* Mhz */
979 
980 	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
981 	if (r)
982 		goto done;
983 	cg_scratch &= 0x0000ffff;
984 	cg_scratch |= (dclk / 100) << 16; /* Mhz */
985 
986 done:
987 	WREG32(CG_SCRATCH1, cg_scratch);
988 
989 	return r;
990 }
991 
992 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
993 {
994 	/* start off with something large */
995 	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
996 	int r;
997 
998 	/* bypass vclk and dclk with bclk */
999 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1000 		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1001 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1002 
1003 	/* put PLL in bypass mode */
1004 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1005 
1006 	if (!vclk || !dclk) {
1007 		/* keep the Bypass mode, put PLL to sleep */
1008 		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1009 		return 0;
1010 	}
1011 
1012 	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1013 					  16384, 0x03FFFFFF, 0, 128, 5,
1014 					  &fb_div, &vclk_div, &dclk_div);
1015 	if (r)
1016 		return r;
1017 
1018 	/* set VCO_MODE to 1 */
1019 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1020 
1021 	/* toggle UPLL_SLEEP to 1 then back to 0 */
1022 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1023 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1024 
1025 	/* deassert UPLL_RESET */
1026 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1027 
1028 	mdelay(1);
1029 
1030 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1031 	if (r)
1032 		return r;
1033 
1034 	/* assert UPLL_RESET again */
1035 	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1036 
1037 	/* disable spread spectrum. */
1038 	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1039 
1040 	/* set feedback divider */
1041 	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1042 
1043 	/* set ref divider to 0 */
1044 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1045 
1046 	if (fb_div < 307200)
1047 		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1048 	else
1049 		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1050 
1051 	/* set PDIV_A and PDIV_B */
1052 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1053 		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1054 		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1055 
1056 	/* give the PLL some time to settle */
1057 	mdelay(15);
1058 
1059 	/* deassert PLL_RESET */
1060 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1061 
1062 	mdelay(15);
1063 
1064 	/* switch from bypass mode to normal mode */
1065 	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1066 
1067 	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1068 	if (r)
1069 		return r;
1070 
1071 	/* switch VCLK and DCLK selection */
1072 	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1073 		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1074 		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1075 
1076 	mdelay(100);
1077 
1078 	return 0;
1079 }
1080 
1081 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1082 {
1083 	u16 ctl, v;
1084 	int err;
1085 
1086 	err = pcie_capability_read_word(rdev->pdev, PCI_EXP_DEVCTL, &ctl);
1087 	if (err)
1088 		return;
1089 
1090 	v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
1091 
1092 	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1093 	 * to avoid hangs or perfomance issues
1094 	 */
1095 	if ((v == 0) || (v == 6) || (v == 7)) {
1096 		ctl &= ~PCI_EXP_DEVCTL_READRQ;
1097 		ctl |= (2 << 12);
1098 		pcie_capability_write_word(rdev->pdev, PCI_EXP_DEVCTL, ctl);
1099 	}
1100 }
1101 
1102 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1103 {
1104 	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1105 		return true;
1106 	else
1107 		return false;
1108 }
1109 
1110 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1111 {
1112 	u32 pos1, pos2;
1113 
1114 	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1115 	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1116 
1117 	if (pos1 != pos2)
1118 		return true;
1119 	else
1120 		return false;
1121 }
1122 
1123 /**
1124  * dce4_wait_for_vblank - vblank wait asic callback.
1125  *
1126  * @rdev: radeon_device pointer
1127  * @crtc: crtc to wait for vblank on
1128  *
1129  * Wait for vblank on the requested crtc (evergreen+).
1130  */
1131 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1132 {
1133 	unsigned i = 0;
1134 
1135 	if (crtc >= rdev->num_crtc)
1136 		return;
1137 
1138 	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1139 		return;
1140 
1141 	/* depending on when we hit vblank, we may be close to active; if so,
1142 	 * wait for another frame.
1143 	 */
1144 	while (dce4_is_in_vblank(rdev, crtc)) {
1145 		if (i++ % 100 == 0) {
1146 			if (!dce4_is_counter_moving(rdev, crtc))
1147 				break;
1148 		}
1149 	}
1150 
1151 	while (!dce4_is_in_vblank(rdev, crtc)) {
1152 		if (i++ % 100 == 0) {
1153 			if (!dce4_is_counter_moving(rdev, crtc))
1154 				break;
1155 		}
1156 	}
1157 }
1158 
1159 /**
1160  * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1161  *
1162  * @rdev: radeon_device pointer
1163  * @crtc: crtc to prepare for pageflip on
1164  *
1165  * Pre-pageflip callback (evergreen+).
1166  * Enables the pageflip irq (vblank irq).
1167  */
1168 void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1169 {
1170 	/* enable the pflip int */
1171 	radeon_irq_kms_pflip_irq_get(rdev, crtc);
1172 }
1173 
1174 /**
1175  * evergreen_post_page_flip - pos-pageflip callback.
1176  *
1177  * @rdev: radeon_device pointer
1178  * @crtc: crtc to cleanup pageflip on
1179  *
1180  * Post-pageflip callback (evergreen+).
1181  * Disables the pageflip irq (vblank irq).
1182  */
1183 void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1184 {
1185 	/* disable the pflip int */
1186 	radeon_irq_kms_pflip_irq_put(rdev, crtc);
1187 }
1188 
1189 /**
1190  * evergreen_page_flip - pageflip callback.
1191  *
1192  * @rdev: radeon_device pointer
1193  * @crtc_id: crtc to cleanup pageflip on
1194  * @crtc_base: new address of the crtc (GPU MC address)
1195  *
1196  * Does the actual pageflip (evergreen+).
1197  * During vblank we take the crtc lock and wait for the update_pending
1198  * bit to go high, when it does, we release the lock, and allow the
1199  * double buffered update to take place.
1200  * Returns the current update pending status.
1201  */
1202 u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1203 {
1204 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1205 	u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1206 	int i;
1207 
1208 	/* Lock the graphics update lock */
1209 	tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1210 	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1211 
1212 	/* update the scanout addresses */
1213 	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1214 	       upper_32_bits(crtc_base));
1215 	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1216 	       (u32)crtc_base);
1217 
1218 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1219 	       upper_32_bits(crtc_base));
1220 	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1221 	       (u32)crtc_base);
1222 
1223 	/* Wait for update_pending to go high. */
1224 	for (i = 0; i < rdev->usec_timeout; i++) {
1225 		if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1226 			break;
1227 		udelay(1);
1228 	}
1229 	DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1230 
1231 	/* Unlock the lock, so double-buffering can take place inside vblank */
1232 	tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1233 	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1234 
1235 	/* Return current update_pending status: */
1236 	return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1237 }
1238 
1239 /* get temperature in millidegrees */
1240 int evergreen_get_temp(struct radeon_device *rdev)
1241 {
1242 	u32 temp, toffset;
1243 	int actual_temp = 0;
1244 
1245 	if (rdev->family == CHIP_JUNIPER) {
1246 		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1247 			TOFFSET_SHIFT;
1248 		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1249 			TS0_ADC_DOUT_SHIFT;
1250 
1251 		if (toffset & 0x100)
1252 			actual_temp = temp / 2 - (0x200 - toffset);
1253 		else
1254 			actual_temp = temp / 2 + toffset;
1255 
1256 		actual_temp = actual_temp * 1000;
1257 
1258 	} else {
1259 		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1260 			ASIC_T_SHIFT;
1261 
1262 		if (temp & 0x400)
1263 			actual_temp = -256;
1264 		else if (temp & 0x200)
1265 			actual_temp = 255;
1266 		else if (temp & 0x100) {
1267 			actual_temp = temp & 0x1ff;
1268 			actual_temp |= ~0x1ff;
1269 		} else
1270 			actual_temp = temp & 0xff;
1271 
1272 		actual_temp = (actual_temp * 1000) / 2;
1273 	}
1274 
1275 	return actual_temp;
1276 }
1277 
1278 int sumo_get_temp(struct radeon_device *rdev)
1279 {
1280 	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1281 	int actual_temp = temp - 49;
1282 
1283 	return actual_temp * 1000;
1284 }
1285 
1286 /**
1287  * sumo_pm_init_profile - Initialize power profiles callback.
1288  *
1289  * @rdev: radeon_device pointer
1290  *
1291  * Initialize the power states used in profile mode
1292  * (sumo, trinity, SI).
1293  * Used for profile mode only.
1294  */
1295 void sumo_pm_init_profile(struct radeon_device *rdev)
1296 {
1297 	int idx;
1298 
1299 	/* default */
1300 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1301 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1302 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1303 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1304 
1305 	/* low,mid sh/mh */
1306 	if (rdev->flags & RADEON_IS_MOBILITY)
1307 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1308 	else
1309 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1310 
1311 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1312 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1313 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1314 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1315 
1316 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1317 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1318 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1319 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1320 
1321 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1322 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1323 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1324 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1325 
1326 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1327 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1328 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1329 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1330 
1331 	/* high sh/mh */
1332 	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1333 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1334 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1335 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1336 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1337 		rdev->pm.power_state[idx].num_clock_modes - 1;
1338 
1339 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1340 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1341 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1342 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1343 		rdev->pm.power_state[idx].num_clock_modes - 1;
1344 }
1345 
1346 /**
1347  * btc_pm_init_profile - Initialize power profiles callback.
1348  *
1349  * @rdev: radeon_device pointer
1350  *
1351  * Initialize the power states used in profile mode
1352  * (BTC, cayman).
1353  * Used for profile mode only.
1354  */
1355 void btc_pm_init_profile(struct radeon_device *rdev)
1356 {
1357 	int idx;
1358 
1359 	/* default */
1360 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1361 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1362 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1363 	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1364 	/* starting with BTC, there is one state that is used for both
1365 	 * MH and SH.  Difference is that we always use the high clock index for
1366 	 * mclk.
1367 	 */
1368 	if (rdev->flags & RADEON_IS_MOBILITY)
1369 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1370 	else
1371 		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1372 	/* low sh */
1373 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1374 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1375 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1376 	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1377 	/* mid sh */
1378 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1379 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1380 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1381 	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1382 	/* high sh */
1383 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1384 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1385 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1386 	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1387 	/* low mh */
1388 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1389 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1390 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1391 	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1392 	/* mid mh */
1393 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1394 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1395 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1396 	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1397 	/* high mh */
1398 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1399 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1400 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1401 	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1402 }
1403 
1404 /**
1405  * evergreen_pm_misc - set additional pm hw parameters callback.
1406  *
1407  * @rdev: radeon_device pointer
1408  *
1409  * Set non-clock parameters associated with a power state
1410  * (voltage, etc.) (evergreen+).
1411  */
1412 void evergreen_pm_misc(struct radeon_device *rdev)
1413 {
1414 	int req_ps_idx = rdev->pm.requested_power_state_index;
1415 	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1416 	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1417 	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1418 
1419 	if (voltage->type == VOLTAGE_SW) {
1420 		/* 0xff01 is a flag rather then an actual voltage */
1421 		if (voltage->voltage == 0xff01)
1422 			return;
1423 		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1424 			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1425 			rdev->pm.current_vddc = voltage->voltage;
1426 			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1427 		}
1428 
1429 		/* starting with BTC, there is one state that is used for both
1430 		 * MH and SH.  Difference is that we always use the high clock index for
1431 		 * mclk and vddci.
1432 		 */
1433 		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1434 		    (rdev->family >= CHIP_BARTS) &&
1435 		    rdev->pm.active_crtc_count &&
1436 		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1437 		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1438 			voltage = &rdev->pm.power_state[req_ps_idx].
1439 				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1440 
1441 		/* 0xff01 is a flag rather then an actual voltage */
1442 		if (voltage->vddci == 0xff01)
1443 			return;
1444 		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1445 			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1446 			rdev->pm.current_vddci = voltage->vddci;
1447 			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1448 		}
1449 	}
1450 }
1451 
1452 /**
1453  * evergreen_pm_prepare - pre-power state change callback.
1454  *
1455  * @rdev: radeon_device pointer
1456  *
1457  * Prepare for a power state change (evergreen+).
1458  */
1459 void evergreen_pm_prepare(struct radeon_device *rdev)
1460 {
1461 	struct drm_device *ddev = rdev->ddev;
1462 	struct drm_crtc *crtc;
1463 	struct radeon_crtc *radeon_crtc;
1464 	u32 tmp;
1465 
1466 	/* disable any active CRTCs */
1467 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1468 		radeon_crtc = to_radeon_crtc(crtc);
1469 		if (radeon_crtc->enabled) {
1470 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1471 			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1472 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1473 		}
1474 	}
1475 }
1476 
1477 /**
1478  * evergreen_pm_finish - post-power state change callback.
1479  *
1480  * @rdev: radeon_device pointer
1481  *
1482  * Clean up after a power state change (evergreen+).
1483  */
1484 void evergreen_pm_finish(struct radeon_device *rdev)
1485 {
1486 	struct drm_device *ddev = rdev->ddev;
1487 	struct drm_crtc *crtc;
1488 	struct radeon_crtc *radeon_crtc;
1489 	u32 tmp;
1490 
1491 	/* enable any active CRTCs */
1492 	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1493 		radeon_crtc = to_radeon_crtc(crtc);
1494 		if (radeon_crtc->enabled) {
1495 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1496 			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1497 			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1498 		}
1499 	}
1500 }
1501 
1502 /**
1503  * evergreen_hpd_sense - hpd sense callback.
1504  *
1505  * @rdev: radeon_device pointer
1506  * @hpd: hpd (hotplug detect) pin
1507  *
1508  * Checks if a digital monitor is connected (evergreen+).
1509  * Returns true if connected, false if not connected.
1510  */
1511 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1512 {
1513 	bool connected = false;
1514 
1515 	switch (hpd) {
1516 	case RADEON_HPD_1:
1517 		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1518 			connected = true;
1519 		break;
1520 	case RADEON_HPD_2:
1521 		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1522 			connected = true;
1523 		break;
1524 	case RADEON_HPD_3:
1525 		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1526 			connected = true;
1527 		break;
1528 	case RADEON_HPD_4:
1529 		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1530 			connected = true;
1531 		break;
1532 	case RADEON_HPD_5:
1533 		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1534 			connected = true;
1535 		break;
1536 	case RADEON_HPD_6:
1537 		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1538 			connected = true;
1539 			break;
1540 	default:
1541 		break;
1542 	}
1543 
1544 	return connected;
1545 }
1546 
1547 /**
1548  * evergreen_hpd_set_polarity - hpd set polarity callback.
1549  *
1550  * @rdev: radeon_device pointer
1551  * @hpd: hpd (hotplug detect) pin
1552  *
1553  * Set the polarity of the hpd pin (evergreen+).
1554  */
1555 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1556 				enum radeon_hpd_id hpd)
1557 {
1558 	u32 tmp;
1559 	bool connected = evergreen_hpd_sense(rdev, hpd);
1560 
1561 	switch (hpd) {
1562 	case RADEON_HPD_1:
1563 		tmp = RREG32(DC_HPD1_INT_CONTROL);
1564 		if (connected)
1565 			tmp &= ~DC_HPDx_INT_POLARITY;
1566 		else
1567 			tmp |= DC_HPDx_INT_POLARITY;
1568 		WREG32(DC_HPD1_INT_CONTROL, tmp);
1569 		break;
1570 	case RADEON_HPD_2:
1571 		tmp = RREG32(DC_HPD2_INT_CONTROL);
1572 		if (connected)
1573 			tmp &= ~DC_HPDx_INT_POLARITY;
1574 		else
1575 			tmp |= DC_HPDx_INT_POLARITY;
1576 		WREG32(DC_HPD2_INT_CONTROL, tmp);
1577 		break;
1578 	case RADEON_HPD_3:
1579 		tmp = RREG32(DC_HPD3_INT_CONTROL);
1580 		if (connected)
1581 			tmp &= ~DC_HPDx_INT_POLARITY;
1582 		else
1583 			tmp |= DC_HPDx_INT_POLARITY;
1584 		WREG32(DC_HPD3_INT_CONTROL, tmp);
1585 		break;
1586 	case RADEON_HPD_4:
1587 		tmp = RREG32(DC_HPD4_INT_CONTROL);
1588 		if (connected)
1589 			tmp &= ~DC_HPDx_INT_POLARITY;
1590 		else
1591 			tmp |= DC_HPDx_INT_POLARITY;
1592 		WREG32(DC_HPD4_INT_CONTROL, tmp);
1593 		break;
1594 	case RADEON_HPD_5:
1595 		tmp = RREG32(DC_HPD5_INT_CONTROL);
1596 		if (connected)
1597 			tmp &= ~DC_HPDx_INT_POLARITY;
1598 		else
1599 			tmp |= DC_HPDx_INT_POLARITY;
1600 		WREG32(DC_HPD5_INT_CONTROL, tmp);
1601 			break;
1602 	case RADEON_HPD_6:
1603 		tmp = RREG32(DC_HPD6_INT_CONTROL);
1604 		if (connected)
1605 			tmp &= ~DC_HPDx_INT_POLARITY;
1606 		else
1607 			tmp |= DC_HPDx_INT_POLARITY;
1608 		WREG32(DC_HPD6_INT_CONTROL, tmp);
1609 		break;
1610 	default:
1611 		break;
1612 	}
1613 }
1614 
1615 /**
1616  * evergreen_hpd_init - hpd setup callback.
1617  *
1618  * @rdev: radeon_device pointer
1619  *
1620  * Setup the hpd pins used by the card (evergreen+).
1621  * Enable the pin, set the polarity, and enable the hpd interrupts.
1622  */
1623 void evergreen_hpd_init(struct radeon_device *rdev)
1624 {
1625 	struct drm_device *dev = rdev->ddev;
1626 	struct drm_connector *connector;
1627 	unsigned enabled = 0;
1628 	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1629 		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1630 
1631 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1632 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1633 
1634 		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1635 		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1636 			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1637 			 * aux dp channel on imac and help (but not completely fix)
1638 			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1639 			 * also avoid interrupt storms during dpms.
1640 			 */
1641 			continue;
1642 		}
1643 		switch (radeon_connector->hpd.hpd) {
1644 		case RADEON_HPD_1:
1645 			WREG32(DC_HPD1_CONTROL, tmp);
1646 			break;
1647 		case RADEON_HPD_2:
1648 			WREG32(DC_HPD2_CONTROL, tmp);
1649 			break;
1650 		case RADEON_HPD_3:
1651 			WREG32(DC_HPD3_CONTROL, tmp);
1652 			break;
1653 		case RADEON_HPD_4:
1654 			WREG32(DC_HPD4_CONTROL, tmp);
1655 			break;
1656 		case RADEON_HPD_5:
1657 			WREG32(DC_HPD5_CONTROL, tmp);
1658 			break;
1659 		case RADEON_HPD_6:
1660 			WREG32(DC_HPD6_CONTROL, tmp);
1661 			break;
1662 		default:
1663 			break;
1664 		}
1665 		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1666 		enabled |= 1 << radeon_connector->hpd.hpd;
1667 	}
1668 	radeon_irq_kms_enable_hpd(rdev, enabled);
1669 }
1670 
1671 /**
1672  * evergreen_hpd_fini - hpd tear down callback.
1673  *
1674  * @rdev: radeon_device pointer
1675  *
1676  * Tear down the hpd pins used by the card (evergreen+).
1677  * Disable the hpd interrupts.
1678  */
1679 void evergreen_hpd_fini(struct radeon_device *rdev)
1680 {
1681 	struct drm_device *dev = rdev->ddev;
1682 	struct drm_connector *connector;
1683 	unsigned disabled = 0;
1684 
1685 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1686 		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1687 		switch (radeon_connector->hpd.hpd) {
1688 		case RADEON_HPD_1:
1689 			WREG32(DC_HPD1_CONTROL, 0);
1690 			break;
1691 		case RADEON_HPD_2:
1692 			WREG32(DC_HPD2_CONTROL, 0);
1693 			break;
1694 		case RADEON_HPD_3:
1695 			WREG32(DC_HPD3_CONTROL, 0);
1696 			break;
1697 		case RADEON_HPD_4:
1698 			WREG32(DC_HPD4_CONTROL, 0);
1699 			break;
1700 		case RADEON_HPD_5:
1701 			WREG32(DC_HPD5_CONTROL, 0);
1702 			break;
1703 		case RADEON_HPD_6:
1704 			WREG32(DC_HPD6_CONTROL, 0);
1705 			break;
1706 		default:
1707 			break;
1708 		}
1709 		disabled |= 1 << radeon_connector->hpd.hpd;
1710 	}
1711 	radeon_irq_kms_disable_hpd(rdev, disabled);
1712 }
1713 
1714 /* watermark setup */
1715 
1716 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1717 					struct radeon_crtc *radeon_crtc,
1718 					struct drm_display_mode *mode,
1719 					struct drm_display_mode *other_mode)
1720 {
1721 	u32 tmp;
1722 	/*
1723 	 * Line Buffer Setup
1724 	 * There are 3 line buffers, each one shared by 2 display controllers.
1725 	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1726 	 * the display controllers.  The paritioning is done via one of four
1727 	 * preset allocations specified in bits 2:0:
1728 	 * first display controller
1729 	 *  0 - first half of lb (3840 * 2)
1730 	 *  1 - first 3/4 of lb (5760 * 2)
1731 	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1732 	 *  3 - first 1/4 of lb (1920 * 2)
1733 	 * second display controller
1734 	 *  4 - second half of lb (3840 * 2)
1735 	 *  5 - second 3/4 of lb (5760 * 2)
1736 	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1737 	 *  7 - last 1/4 of lb (1920 * 2)
1738 	 */
1739 	/* this can get tricky if we have two large displays on a paired group
1740 	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1741 	 * non-linked crtcs for maximum line buffer allocation.
1742 	 */
1743 	if (radeon_crtc->base.enabled && mode) {
1744 		if (other_mode)
1745 			tmp = 0; /* 1/2 */
1746 		else
1747 			tmp = 2; /* whole */
1748 	} else
1749 		tmp = 0;
1750 
1751 	/* second controller of the pair uses second half of the lb */
1752 	if (radeon_crtc->crtc_id % 2)
1753 		tmp += 4;
1754 	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1755 
1756 	if (radeon_crtc->base.enabled && mode) {
1757 		switch (tmp) {
1758 		case 0:
1759 		case 4:
1760 		default:
1761 			if (ASIC_IS_DCE5(rdev))
1762 				return 4096 * 2;
1763 			else
1764 				return 3840 * 2;
1765 		case 1:
1766 		case 5:
1767 			if (ASIC_IS_DCE5(rdev))
1768 				return 6144 * 2;
1769 			else
1770 				return 5760 * 2;
1771 		case 2:
1772 		case 6:
1773 			if (ASIC_IS_DCE5(rdev))
1774 				return 8192 * 2;
1775 			else
1776 				return 7680 * 2;
1777 		case 3:
1778 		case 7:
1779 			if (ASIC_IS_DCE5(rdev))
1780 				return 2048 * 2;
1781 			else
1782 				return 1920 * 2;
1783 		}
1784 	}
1785 
1786 	/* controller not enabled, so no lb used */
1787 	return 0;
1788 }
1789 
1790 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1791 {
1792 	u32 tmp = RREG32(MC_SHARED_CHMAP);
1793 
1794 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1795 	case 0:
1796 	default:
1797 		return 1;
1798 	case 1:
1799 		return 2;
1800 	case 2:
1801 		return 4;
1802 	case 3:
1803 		return 8;
1804 	}
1805 }
1806 
1807 struct evergreen_wm_params {
1808 	u32 dram_channels; /* number of dram channels */
1809 	u32 yclk;          /* bandwidth per dram data pin in kHz */
1810 	u32 sclk;          /* engine clock in kHz */
1811 	u32 disp_clk;      /* display clock in kHz */
1812 	u32 src_width;     /* viewport width */
1813 	u32 active_time;   /* active display time in ns */
1814 	u32 blank_time;    /* blank time in ns */
1815 	bool interlaced;    /* mode is interlaced */
1816 	fixed20_12 vsc;    /* vertical scale ratio */
1817 	u32 num_heads;     /* number of active crtcs */
1818 	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1819 	u32 lb_size;       /* line buffer allocated to pipe */
1820 	u32 vtaps;         /* vertical scaler taps */
1821 };
1822 
1823 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1824 {
1825 	/* Calculate DRAM Bandwidth and the part allocated to display. */
1826 	fixed20_12 dram_efficiency; /* 0.7 */
1827 	fixed20_12 yclk, dram_channels, bandwidth;
1828 	fixed20_12 a;
1829 
1830 	a.full = dfixed_const(1000);
1831 	yclk.full = dfixed_const(wm->yclk);
1832 	yclk.full = dfixed_div(yclk, a);
1833 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1834 	a.full = dfixed_const(10);
1835 	dram_efficiency.full = dfixed_const(7);
1836 	dram_efficiency.full = dfixed_div(dram_efficiency, a);
1837 	bandwidth.full = dfixed_mul(dram_channels, yclk);
1838 	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1839 
1840 	return dfixed_trunc(bandwidth);
1841 }
1842 
1843 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1844 {
1845 	/* Calculate DRAM Bandwidth and the part allocated to display. */
1846 	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1847 	fixed20_12 yclk, dram_channels, bandwidth;
1848 	fixed20_12 a;
1849 
1850 	a.full = dfixed_const(1000);
1851 	yclk.full = dfixed_const(wm->yclk);
1852 	yclk.full = dfixed_div(yclk, a);
1853 	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1854 	a.full = dfixed_const(10);
1855 	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1856 	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1857 	bandwidth.full = dfixed_mul(dram_channels, yclk);
1858 	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1859 
1860 	return dfixed_trunc(bandwidth);
1861 }
1862 
1863 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
1864 {
1865 	/* Calculate the display Data return Bandwidth */
1866 	fixed20_12 return_efficiency; /* 0.8 */
1867 	fixed20_12 sclk, bandwidth;
1868 	fixed20_12 a;
1869 
1870 	a.full = dfixed_const(1000);
1871 	sclk.full = dfixed_const(wm->sclk);
1872 	sclk.full = dfixed_div(sclk, a);
1873 	a.full = dfixed_const(10);
1874 	return_efficiency.full = dfixed_const(8);
1875 	return_efficiency.full = dfixed_div(return_efficiency, a);
1876 	a.full = dfixed_const(32);
1877 	bandwidth.full = dfixed_mul(a, sclk);
1878 	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1879 
1880 	return dfixed_trunc(bandwidth);
1881 }
1882 
1883 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
1884 {
1885 	/* Calculate the DMIF Request Bandwidth */
1886 	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1887 	fixed20_12 disp_clk, bandwidth;
1888 	fixed20_12 a;
1889 
1890 	a.full = dfixed_const(1000);
1891 	disp_clk.full = dfixed_const(wm->disp_clk);
1892 	disp_clk.full = dfixed_div(disp_clk, a);
1893 	a.full = dfixed_const(10);
1894 	disp_clk_request_efficiency.full = dfixed_const(8);
1895 	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1896 	a.full = dfixed_const(32);
1897 	bandwidth.full = dfixed_mul(a, disp_clk);
1898 	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
1899 
1900 	return dfixed_trunc(bandwidth);
1901 }
1902 
1903 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
1904 {
1905 	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1906 	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
1907 	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
1908 	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
1909 
1910 	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1911 }
1912 
1913 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
1914 {
1915 	/* Calculate the display mode Average Bandwidth
1916 	 * DisplayMode should contain the source and destination dimensions,
1917 	 * timing, etc.
1918 	 */
1919 	fixed20_12 bpp;
1920 	fixed20_12 line_time;
1921 	fixed20_12 src_width;
1922 	fixed20_12 bandwidth;
1923 	fixed20_12 a;
1924 
1925 	a.full = dfixed_const(1000);
1926 	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
1927 	line_time.full = dfixed_div(line_time, a);
1928 	bpp.full = dfixed_const(wm->bytes_per_pixel);
1929 	src_width.full = dfixed_const(wm->src_width);
1930 	bandwidth.full = dfixed_mul(src_width, bpp);
1931 	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
1932 	bandwidth.full = dfixed_div(bandwidth, line_time);
1933 
1934 	return dfixed_trunc(bandwidth);
1935 }
1936 
1937 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
1938 {
1939 	/* First calcualte the latency in ns */
1940 	u32 mc_latency = 2000; /* 2000 ns. */
1941 	u32 available_bandwidth = evergreen_available_bandwidth(wm);
1942 	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
1943 	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
1944 	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
1945 	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
1946 		(wm->num_heads * cursor_line_pair_return_time);
1947 	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
1948 	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
1949 	fixed20_12 a, b, c;
1950 
1951 	if (wm->num_heads == 0)
1952 		return 0;
1953 
1954 	a.full = dfixed_const(2);
1955 	b.full = dfixed_const(1);
1956 	if ((wm->vsc.full > a.full) ||
1957 	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
1958 	    (wm->vtaps >= 5) ||
1959 	    ((wm->vsc.full >= a.full) && wm->interlaced))
1960 		max_src_lines_per_dst_line = 4;
1961 	else
1962 		max_src_lines_per_dst_line = 2;
1963 
1964 	a.full = dfixed_const(available_bandwidth);
1965 	b.full = dfixed_const(wm->num_heads);
1966 	a.full = dfixed_div(a, b);
1967 
1968 	b.full = dfixed_const(1000);
1969 	c.full = dfixed_const(wm->disp_clk);
1970 	b.full = dfixed_div(c, b);
1971 	c.full = dfixed_const(wm->bytes_per_pixel);
1972 	b.full = dfixed_mul(b, c);
1973 
1974 	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
1975 
1976 	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
1977 	b.full = dfixed_const(1000);
1978 	c.full = dfixed_const(lb_fill_bw);
1979 	b.full = dfixed_div(c, b);
1980 	a.full = dfixed_div(a, b);
1981 	line_fill_time = dfixed_trunc(a);
1982 
1983 	if (line_fill_time < wm->active_time)
1984 		return latency;
1985 	else
1986 		return latency + (line_fill_time - wm->active_time);
1987 
1988 }
1989 
1990 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1991 {
1992 	if (evergreen_average_bandwidth(wm) <=
1993 	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
1994 		return true;
1995 	else
1996 		return false;
1997 };
1998 
1999 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2000 {
2001 	if (evergreen_average_bandwidth(wm) <=
2002 	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2003 		return true;
2004 	else
2005 		return false;
2006 };
2007 
2008 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2009 {
2010 	u32 lb_partitions = wm->lb_size / wm->src_width;
2011 	u32 line_time = wm->active_time + wm->blank_time;
2012 	u32 latency_tolerant_lines;
2013 	u32 latency_hiding;
2014 	fixed20_12 a;
2015 
2016 	a.full = dfixed_const(1);
2017 	if (wm->vsc.full > a.full)
2018 		latency_tolerant_lines = 1;
2019 	else {
2020 		if (lb_partitions <= (wm->vtaps + 1))
2021 			latency_tolerant_lines = 1;
2022 		else
2023 			latency_tolerant_lines = 2;
2024 	}
2025 
2026 	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2027 
2028 	if (evergreen_latency_watermark(wm) <= latency_hiding)
2029 		return true;
2030 	else
2031 		return false;
2032 }
2033 
2034 static void evergreen_program_watermarks(struct radeon_device *rdev,
2035 					 struct radeon_crtc *radeon_crtc,
2036 					 u32 lb_size, u32 num_heads)
2037 {
2038 	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2039 	struct evergreen_wm_params wm;
2040 	u32 pixel_period;
2041 	u32 line_time = 0;
2042 	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2043 	u32 priority_a_mark = 0, priority_b_mark = 0;
2044 	u32 priority_a_cnt = PRIORITY_OFF;
2045 	u32 priority_b_cnt = PRIORITY_OFF;
2046 	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2047 	u32 tmp, arb_control3;
2048 	fixed20_12 a, b, c;
2049 
2050 	if (radeon_crtc->base.enabled && num_heads && mode) {
2051 		pixel_period = 1000000 / (u32)mode->clock;
2052 		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2053 		priority_a_cnt = 0;
2054 		priority_b_cnt = 0;
2055 
2056 		wm.yclk = rdev->pm.current_mclk * 10;
2057 		wm.sclk = rdev->pm.current_sclk * 10;
2058 		wm.disp_clk = mode->clock;
2059 		wm.src_width = mode->crtc_hdisplay;
2060 		wm.active_time = mode->crtc_hdisplay * pixel_period;
2061 		wm.blank_time = line_time - wm.active_time;
2062 		wm.interlaced = false;
2063 		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2064 			wm.interlaced = true;
2065 		wm.vsc = radeon_crtc->vsc;
2066 		wm.vtaps = 1;
2067 		if (radeon_crtc->rmx_type != RMX_OFF)
2068 			wm.vtaps = 2;
2069 		wm.bytes_per_pixel = 4; /* XXX: get this from fb config */
2070 		wm.lb_size = lb_size;
2071 		wm.dram_channels = evergreen_get_number_of_dram_channels(rdev);
2072 		wm.num_heads = num_heads;
2073 
2074 		/* set for high clocks */
2075 		latency_watermark_a = min(evergreen_latency_watermark(&wm), (u32)65535);
2076 		/* set for low clocks */
2077 		/* wm.yclk = low clk; wm.sclk = low clk */
2078 		latency_watermark_b = min(evergreen_latency_watermark(&wm), (u32)65535);
2079 
2080 		/* possibly force display priority to high */
2081 		/* should really do this at mode validation time... */
2082 		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm) ||
2083 		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm) ||
2084 		    !evergreen_check_latency_hiding(&wm) ||
2085 		    (rdev->disp_priority == 2)) {
2086 			DRM_DEBUG_KMS("force priority to high\n");
2087 			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2088 			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2089 		}
2090 
2091 		a.full = dfixed_const(1000);
2092 		b.full = dfixed_const(mode->clock);
2093 		b.full = dfixed_div(b, a);
2094 		c.full = dfixed_const(latency_watermark_a);
2095 		c.full = dfixed_mul(c, b);
2096 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2097 		c.full = dfixed_div(c, a);
2098 		a.full = dfixed_const(16);
2099 		c.full = dfixed_div(c, a);
2100 		priority_a_mark = dfixed_trunc(c);
2101 		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2102 
2103 		a.full = dfixed_const(1000);
2104 		b.full = dfixed_const(mode->clock);
2105 		b.full = dfixed_div(b, a);
2106 		c.full = dfixed_const(latency_watermark_b);
2107 		c.full = dfixed_mul(c, b);
2108 		c.full = dfixed_mul(c, radeon_crtc->hsc);
2109 		c.full = dfixed_div(c, a);
2110 		a.full = dfixed_const(16);
2111 		c.full = dfixed_div(c, a);
2112 		priority_b_mark = dfixed_trunc(c);
2113 		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2114 	}
2115 
2116 	/* select wm A */
2117 	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2118 	tmp = arb_control3;
2119 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2120 	tmp |= LATENCY_WATERMARK_MASK(1);
2121 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2122 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2123 	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2124 		LATENCY_HIGH_WATERMARK(line_time)));
2125 	/* select wm B */
2126 	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2127 	tmp &= ~LATENCY_WATERMARK_MASK(3);
2128 	tmp |= LATENCY_WATERMARK_MASK(2);
2129 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2130 	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2131 	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2132 		LATENCY_HIGH_WATERMARK(line_time)));
2133 	/* restore original selection */
2134 	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2135 
2136 	/* write the priority marks */
2137 	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2138 	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2139 
2140 }
2141 
2142 /**
2143  * evergreen_bandwidth_update - update display watermarks callback.
2144  *
2145  * @rdev: radeon_device pointer
2146  *
2147  * Update the display watermarks based on the requested mode(s)
2148  * (evergreen+).
2149  */
2150 void evergreen_bandwidth_update(struct radeon_device *rdev)
2151 {
2152 	struct drm_display_mode *mode0 = NULL;
2153 	struct drm_display_mode *mode1 = NULL;
2154 	u32 num_heads = 0, lb_size;
2155 	int i;
2156 
2157 	radeon_update_display_priority(rdev);
2158 
2159 	for (i = 0; i < rdev->num_crtc; i++) {
2160 		if (rdev->mode_info.crtcs[i]->base.enabled)
2161 			num_heads++;
2162 	}
2163 	for (i = 0; i < rdev->num_crtc; i += 2) {
2164 		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2165 		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2166 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2167 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2168 		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2169 		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2170 	}
2171 }
2172 
2173 /**
2174  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2175  *
2176  * @rdev: radeon_device pointer
2177  *
2178  * Wait for the MC (memory controller) to be idle.
2179  * (evergreen+).
2180  * Returns 0 if the MC is idle, -1 if not.
2181  */
2182 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2183 {
2184 	unsigned i;
2185 	u32 tmp;
2186 
2187 	for (i = 0; i < rdev->usec_timeout; i++) {
2188 		/* read MC_STATUS */
2189 		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2190 		if (!tmp)
2191 			return 0;
2192 		udelay(1);
2193 	}
2194 	return -1;
2195 }
2196 
2197 /*
2198  * GART
2199  */
2200 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2201 {
2202 	unsigned i;
2203 	u32 tmp;
2204 
2205 	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2206 
2207 	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2208 	for (i = 0; i < rdev->usec_timeout; i++) {
2209 		/* read MC_STATUS */
2210 		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2211 		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2212 		if (tmp == 2) {
2213 			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2214 			return;
2215 		}
2216 		if (tmp) {
2217 			return;
2218 		}
2219 		udelay(1);
2220 	}
2221 }
2222 
2223 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2224 {
2225 	u32 tmp;
2226 	int r;
2227 
2228 	if (rdev->gart.robj == NULL) {
2229 		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2230 		return -EINVAL;
2231 	}
2232 	r = radeon_gart_table_vram_pin(rdev);
2233 	if (r)
2234 		return r;
2235 	radeon_gart_restore(rdev);
2236 	/* Setup L2 cache */
2237 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2238 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2239 				EFFECTIVE_L2_QUEUE_SIZE(7));
2240 	WREG32(VM_L2_CNTL2, 0);
2241 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2242 	/* Setup TLB control */
2243 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2244 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2245 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2246 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2247 	if (rdev->flags & RADEON_IS_IGP) {
2248 		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2249 		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2250 		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2251 	} else {
2252 		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2253 		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2254 		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2255 		if ((rdev->family == CHIP_JUNIPER) ||
2256 		    (rdev->family == CHIP_CYPRESS) ||
2257 		    (rdev->family == CHIP_HEMLOCK) ||
2258 		    (rdev->family == CHIP_BARTS))
2259 			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2260 	}
2261 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2262 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2263 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2264 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2265 	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2266 	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2267 	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2268 	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2269 				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2270 	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2271 			(u32)(rdev->dummy_page.addr >> 12));
2272 	WREG32(VM_CONTEXT1_CNTL, 0);
2273 
2274 	evergreen_pcie_gart_tlb_flush(rdev);
2275 	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2276 		 (unsigned)(rdev->mc.gtt_size >> 20),
2277 		 (unsigned long long)rdev->gart.table_addr);
2278 	rdev->gart.ready = true;
2279 	return 0;
2280 }
2281 
2282 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2283 {
2284 	u32 tmp;
2285 
2286 	/* Disable all tables */
2287 	WREG32(VM_CONTEXT0_CNTL, 0);
2288 	WREG32(VM_CONTEXT1_CNTL, 0);
2289 
2290 	/* Setup L2 cache */
2291 	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2292 				EFFECTIVE_L2_QUEUE_SIZE(7));
2293 	WREG32(VM_L2_CNTL2, 0);
2294 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2295 	/* Setup TLB control */
2296 	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2297 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2298 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2299 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2300 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2301 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2302 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2303 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2304 	radeon_gart_table_vram_unpin(rdev);
2305 }
2306 
2307 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2308 {
2309 	evergreen_pcie_gart_disable(rdev);
2310 	radeon_gart_table_vram_free(rdev);
2311 	radeon_gart_fini(rdev);
2312 }
2313 
2314 
2315 static void evergreen_agp_enable(struct radeon_device *rdev)
2316 {
2317 	u32 tmp;
2318 
2319 	/* Setup L2 cache */
2320 	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2321 				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2322 				EFFECTIVE_L2_QUEUE_SIZE(7));
2323 	WREG32(VM_L2_CNTL2, 0);
2324 	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2325 	/* Setup TLB control */
2326 	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2327 		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2328 		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2329 		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2330 	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2331 	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2332 	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2333 	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2334 	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2335 	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2336 	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2337 	WREG32(VM_CONTEXT0_CNTL, 0);
2338 	WREG32(VM_CONTEXT1_CNTL, 0);
2339 }
2340 
2341 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2342 {
2343 	u32 crtc_enabled, tmp, frame_count, blackout;
2344 	int i, j;
2345 
2346 	if (!ASIC_IS_NODCE(rdev)) {
2347 		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2348 		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2349 
2350 		/* disable VGA render */
2351 		WREG32(VGA_RENDER_CONTROL, 0);
2352 	}
2353 	/* blank the display controllers */
2354 	for (i = 0; i < rdev->num_crtc; i++) {
2355 		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2356 		if (crtc_enabled) {
2357 			save->crtc_enabled[i] = true;
2358 			if (ASIC_IS_DCE6(rdev)) {
2359 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2360 				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2361 					radeon_wait_for_vblank(rdev, i);
2362 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2363 					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2364 					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2365 				}
2366 			} else {
2367 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2368 				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2369 					radeon_wait_for_vblank(rdev, i);
2370 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2371 					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2372 					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2373 					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2374 				}
2375 			}
2376 			/* wait for the next frame */
2377 			frame_count = radeon_get_vblank_counter(rdev, i);
2378 			for (j = 0; j < rdev->usec_timeout; j++) {
2379 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2380 					break;
2381 				udelay(1);
2382 			}
2383 
2384 			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2385 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2386 			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2387 			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2388 			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2389 			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2390 			save->crtc_enabled[i] = false;
2391 			/* ***** */
2392 		} else {
2393 			save->crtc_enabled[i] = false;
2394 		}
2395 	}
2396 
2397 	radeon_mc_wait_for_idle(rdev);
2398 
2399 	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2400 	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2401 		/* Block CPU access */
2402 		WREG32(BIF_FB_EN, 0);
2403 		/* blackout the MC */
2404 		blackout &= ~BLACKOUT_MODE_MASK;
2405 		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2406 	}
2407 	/* wait for the MC to settle */
2408 	udelay(100);
2409 
2410 	/* lock double buffered regs */
2411 	for (i = 0; i < rdev->num_crtc; i++) {
2412 		if (save->crtc_enabled[i]) {
2413 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2414 			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2415 				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2416 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2417 			}
2418 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2419 			if (!(tmp & 1)) {
2420 				tmp |= 1;
2421 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2422 			}
2423 		}
2424 	}
2425 }
2426 
2427 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2428 {
2429 	u32 tmp, frame_count;
2430 	int i, j;
2431 
2432 	/* update crtc base addresses */
2433 	for (i = 0; i < rdev->num_crtc; i++) {
2434 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2435 		       upper_32_bits(rdev->mc.vram_start));
2436 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2437 		       upper_32_bits(rdev->mc.vram_start));
2438 		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2439 		       (u32)rdev->mc.vram_start);
2440 		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2441 		       (u32)rdev->mc.vram_start);
2442 	}
2443 
2444 	if (!ASIC_IS_NODCE(rdev)) {
2445 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2446 		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2447 	}
2448 
2449 	/* unlock regs and wait for update */
2450 	for (i = 0; i < rdev->num_crtc; i++) {
2451 		if (save->crtc_enabled[i]) {
2452 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2453 			if ((tmp & 0x3) != 0) {
2454 				tmp &= ~0x3;
2455 				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2456 			}
2457 			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2458 			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2459 				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2460 				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2461 			}
2462 			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2463 			if (tmp & 1) {
2464 				tmp &= ~1;
2465 				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2466 			}
2467 			for (j = 0; j < rdev->usec_timeout; j++) {
2468 				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2469 				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2470 					break;
2471 				udelay(1);
2472 			}
2473 		}
2474 	}
2475 
2476 	/* unblackout the MC */
2477 	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2478 	tmp &= ~BLACKOUT_MODE_MASK;
2479 	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2480 	/* allow CPU access */
2481 	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2482 
2483 	for (i = 0; i < rdev->num_crtc; i++) {
2484 		if (save->crtc_enabled[i]) {
2485 			if (ASIC_IS_DCE6(rdev)) {
2486 				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2487 				tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2488 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2489 				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2490 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2491 			} else {
2492 				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2493 				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2494 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2495 				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2496 				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2497 			}
2498 			/* wait for the next frame */
2499 			frame_count = radeon_get_vblank_counter(rdev, i);
2500 			for (j = 0; j < rdev->usec_timeout; j++) {
2501 				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2502 					break;
2503 				udelay(1);
2504 			}
2505 		}
2506 	}
2507 	if (!ASIC_IS_NODCE(rdev)) {
2508 		/* Unlock vga access */
2509 		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2510 		mdelay(1);
2511 		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2512 	}
2513 }
2514 
2515 void evergreen_mc_program(struct radeon_device *rdev)
2516 {
2517 	struct evergreen_mc_save save;
2518 	u32 tmp;
2519 	int i, j;
2520 
2521 	/* Initialize HDP */
2522 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2523 		WREG32((0x2c14 + j), 0x00000000);
2524 		WREG32((0x2c18 + j), 0x00000000);
2525 		WREG32((0x2c1c + j), 0x00000000);
2526 		WREG32((0x2c20 + j), 0x00000000);
2527 		WREG32((0x2c24 + j), 0x00000000);
2528 	}
2529 	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2530 
2531 	evergreen_mc_stop(rdev, &save);
2532 	if (evergreen_mc_wait_for_idle(rdev)) {
2533 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2534 	}
2535 	/* Lockout access through VGA aperture*/
2536 	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2537 	/* Update configuration */
2538 	if (rdev->flags & RADEON_IS_AGP) {
2539 		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2540 			/* VRAM before AGP */
2541 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2542 				rdev->mc.vram_start >> 12);
2543 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2544 				rdev->mc.gtt_end >> 12);
2545 		} else {
2546 			/* VRAM after AGP */
2547 			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2548 				rdev->mc.gtt_start >> 12);
2549 			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2550 				rdev->mc.vram_end >> 12);
2551 		}
2552 	} else {
2553 		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2554 			rdev->mc.vram_start >> 12);
2555 		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2556 			rdev->mc.vram_end >> 12);
2557 	}
2558 	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2559 	/* llano/ontario only */
2560 	if ((rdev->family == CHIP_PALM) ||
2561 	    (rdev->family == CHIP_SUMO) ||
2562 	    (rdev->family == CHIP_SUMO2)) {
2563 		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2564 		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2565 		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2566 		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2567 	}
2568 	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2569 	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2570 	WREG32(MC_VM_FB_LOCATION, tmp);
2571 	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2572 	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2573 	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2574 	if (rdev->flags & RADEON_IS_AGP) {
2575 		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2576 		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2577 		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2578 	} else {
2579 		WREG32(MC_VM_AGP_BASE, 0);
2580 		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2581 		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2582 	}
2583 	if (evergreen_mc_wait_for_idle(rdev)) {
2584 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2585 	}
2586 	evergreen_mc_resume(rdev, &save);
2587 	/* we need to own VRAM, so turn off the VGA renderer here
2588 	 * to stop it overwriting our objects */
2589 	rv515_vga_render_disable(rdev);
2590 }
2591 
2592 /*
2593  * CP.
2594  */
2595 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2596 {
2597 	struct radeon_ring *ring = &rdev->ring[ib->ring];
2598 	u32 next_rptr;
2599 
2600 	/* set to DX10/11 mode */
2601 	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2602 	radeon_ring_write(ring, 1);
2603 
2604 	if (ring->rptr_save_reg) {
2605 		next_rptr = ring->wptr + 3 + 4;
2606 		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2607 		radeon_ring_write(ring, ((ring->rptr_save_reg -
2608 					  PACKET3_SET_CONFIG_REG_START) >> 2));
2609 		radeon_ring_write(ring, next_rptr);
2610 	} else if (rdev->wb.enabled) {
2611 		next_rptr = ring->wptr + 5 + 4;
2612 		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2613 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2614 		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2615 		radeon_ring_write(ring, next_rptr);
2616 		radeon_ring_write(ring, 0);
2617 	}
2618 
2619 	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2620 	radeon_ring_write(ring,
2621 #ifdef __BIG_ENDIAN
2622 			  (2 << 0) |
2623 #endif
2624 			  (ib->gpu_addr & 0xFFFFFFFC));
2625 	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2626 	radeon_ring_write(ring, ib->length_dw);
2627 }
2628 
2629 
2630 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2631 {
2632 	const __be32 *fw_data;
2633 	int i;
2634 
2635 	if (!rdev->me_fw || !rdev->pfp_fw)
2636 		return -EINVAL;
2637 
2638 	r700_cp_stop(rdev);
2639 	WREG32(CP_RB_CNTL,
2640 #ifdef __BIG_ENDIAN
2641 	       BUF_SWAP_32BIT |
2642 #endif
2643 	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2644 
2645 	fw_data = (const __be32 *)rdev->pfp_fw->data;
2646 	WREG32(CP_PFP_UCODE_ADDR, 0);
2647 	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2648 		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2649 	WREG32(CP_PFP_UCODE_ADDR, 0);
2650 
2651 	fw_data = (const __be32 *)rdev->me_fw->data;
2652 	WREG32(CP_ME_RAM_WADDR, 0);
2653 	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2654 		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2655 
2656 	WREG32(CP_PFP_UCODE_ADDR, 0);
2657 	WREG32(CP_ME_RAM_WADDR, 0);
2658 	WREG32(CP_ME_RAM_RADDR, 0);
2659 	return 0;
2660 }
2661 
2662 static int evergreen_cp_start(struct radeon_device *rdev)
2663 {
2664 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2665 	int r, i;
2666 	uint32_t cp_me;
2667 
2668 	r = radeon_ring_lock(rdev, ring, 7);
2669 	if (r) {
2670 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2671 		return r;
2672 	}
2673 	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2674 	radeon_ring_write(ring, 0x1);
2675 	radeon_ring_write(ring, 0x0);
2676 	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2677 	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2678 	radeon_ring_write(ring, 0);
2679 	radeon_ring_write(ring, 0);
2680 	radeon_ring_unlock_commit(rdev, ring);
2681 
2682 	cp_me = 0xff;
2683 	WREG32(CP_ME_CNTL, cp_me);
2684 
2685 	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2686 	if (r) {
2687 		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2688 		return r;
2689 	}
2690 
2691 	/* setup clear context state */
2692 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2693 	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2694 
2695 	for (i = 0; i < evergreen_default_size; i++)
2696 		radeon_ring_write(ring, evergreen_default_state[i]);
2697 
2698 	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2699 	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2700 
2701 	/* set clear context state */
2702 	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2703 	radeon_ring_write(ring, 0);
2704 
2705 	/* SQ_VTX_BASE_VTX_LOC */
2706 	radeon_ring_write(ring, 0xc0026f00);
2707 	radeon_ring_write(ring, 0x00000000);
2708 	radeon_ring_write(ring, 0x00000000);
2709 	radeon_ring_write(ring, 0x00000000);
2710 
2711 	/* Clear consts */
2712 	radeon_ring_write(ring, 0xc0036f00);
2713 	radeon_ring_write(ring, 0x00000bc4);
2714 	radeon_ring_write(ring, 0xffffffff);
2715 	radeon_ring_write(ring, 0xffffffff);
2716 	radeon_ring_write(ring, 0xffffffff);
2717 
2718 	radeon_ring_write(ring, 0xc0026900);
2719 	radeon_ring_write(ring, 0x00000316);
2720 	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2721 	radeon_ring_write(ring, 0x00000010); /*  */
2722 
2723 	radeon_ring_unlock_commit(rdev, ring);
2724 
2725 	return 0;
2726 }
2727 
2728 static int evergreen_cp_resume(struct radeon_device *rdev)
2729 {
2730 	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2731 	u32 tmp;
2732 	u32 rb_bufsz;
2733 	int r;
2734 
2735 	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2736 	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2737 				 SOFT_RESET_PA |
2738 				 SOFT_RESET_SH |
2739 				 SOFT_RESET_VGT |
2740 				 SOFT_RESET_SPI |
2741 				 SOFT_RESET_SX));
2742 	RREG32(GRBM_SOFT_RESET);
2743 	mdelay(15);
2744 	WREG32(GRBM_SOFT_RESET, 0);
2745 	RREG32(GRBM_SOFT_RESET);
2746 
2747 	/* Set ring buffer size */
2748 	rb_bufsz = drm_order(ring->ring_size / 8);
2749 	tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2750 #ifdef __BIG_ENDIAN
2751 	tmp |= BUF_SWAP_32BIT;
2752 #endif
2753 	WREG32(CP_RB_CNTL, tmp);
2754 	WREG32(CP_SEM_WAIT_TIMER, 0x0);
2755 	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2756 
2757 	/* Set the write pointer delay */
2758 	WREG32(CP_RB_WPTR_DELAY, 0);
2759 
2760 	/* Initialize the ring buffer's read and write pointers */
2761 	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2762 	WREG32(CP_RB_RPTR_WR, 0);
2763 	ring->wptr = 0;
2764 	WREG32(CP_RB_WPTR, ring->wptr);
2765 
2766 	/* set the wb address whether it's enabled or not */
2767 	WREG32(CP_RB_RPTR_ADDR,
2768 	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2769 	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2770 	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2771 
2772 	if (rdev->wb.enabled)
2773 		WREG32(SCRATCH_UMSK, 0xff);
2774 	else {
2775 		tmp |= RB_NO_UPDATE;
2776 		WREG32(SCRATCH_UMSK, 0);
2777 	}
2778 
2779 	mdelay(1);
2780 	WREG32(CP_RB_CNTL, tmp);
2781 
2782 	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2783 	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2784 
2785 	ring->rptr = RREG32(CP_RB_RPTR);
2786 
2787 	evergreen_cp_start(rdev);
2788 	ring->ready = true;
2789 	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2790 	if (r) {
2791 		ring->ready = false;
2792 		return r;
2793 	}
2794 	return 0;
2795 }
2796 
2797 /*
2798  * Core functions
2799  */
2800 static void evergreen_gpu_init(struct radeon_device *rdev)
2801 {
2802 	u32 gb_addr_config;
2803 	u32 mc_shared_chmap, mc_arb_ramcfg;
2804 	u32 sx_debug_1;
2805 	u32 smx_dc_ctl0;
2806 	u32 sq_config;
2807 	u32 sq_lds_resource_mgmt;
2808 	u32 sq_gpr_resource_mgmt_1;
2809 	u32 sq_gpr_resource_mgmt_2;
2810 	u32 sq_gpr_resource_mgmt_3;
2811 	u32 sq_thread_resource_mgmt;
2812 	u32 sq_thread_resource_mgmt_2;
2813 	u32 sq_stack_resource_mgmt_1;
2814 	u32 sq_stack_resource_mgmt_2;
2815 	u32 sq_stack_resource_mgmt_3;
2816 	u32 vgt_cache_invalidation;
2817 	u32 hdp_host_path_cntl, tmp;
2818 	u32 disabled_rb_mask;
2819 	int i, j, num_shader_engines, ps_thread_count;
2820 
2821 	switch (rdev->family) {
2822 	case CHIP_CYPRESS:
2823 	case CHIP_HEMLOCK:
2824 		rdev->config.evergreen.num_ses = 2;
2825 		rdev->config.evergreen.max_pipes = 4;
2826 		rdev->config.evergreen.max_tile_pipes = 8;
2827 		rdev->config.evergreen.max_simds = 10;
2828 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2829 		rdev->config.evergreen.max_gprs = 256;
2830 		rdev->config.evergreen.max_threads = 248;
2831 		rdev->config.evergreen.max_gs_threads = 32;
2832 		rdev->config.evergreen.max_stack_entries = 512;
2833 		rdev->config.evergreen.sx_num_of_sets = 4;
2834 		rdev->config.evergreen.sx_max_export_size = 256;
2835 		rdev->config.evergreen.sx_max_export_pos_size = 64;
2836 		rdev->config.evergreen.sx_max_export_smx_size = 192;
2837 		rdev->config.evergreen.max_hw_contexts = 8;
2838 		rdev->config.evergreen.sq_num_cf_insts = 2;
2839 
2840 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2841 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2842 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2843 		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
2844 		break;
2845 	case CHIP_JUNIPER:
2846 		rdev->config.evergreen.num_ses = 1;
2847 		rdev->config.evergreen.max_pipes = 4;
2848 		rdev->config.evergreen.max_tile_pipes = 4;
2849 		rdev->config.evergreen.max_simds = 10;
2850 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2851 		rdev->config.evergreen.max_gprs = 256;
2852 		rdev->config.evergreen.max_threads = 248;
2853 		rdev->config.evergreen.max_gs_threads = 32;
2854 		rdev->config.evergreen.max_stack_entries = 512;
2855 		rdev->config.evergreen.sx_num_of_sets = 4;
2856 		rdev->config.evergreen.sx_max_export_size = 256;
2857 		rdev->config.evergreen.sx_max_export_pos_size = 64;
2858 		rdev->config.evergreen.sx_max_export_smx_size = 192;
2859 		rdev->config.evergreen.max_hw_contexts = 8;
2860 		rdev->config.evergreen.sq_num_cf_insts = 2;
2861 
2862 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2863 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2864 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2865 		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
2866 		break;
2867 	case CHIP_REDWOOD:
2868 		rdev->config.evergreen.num_ses = 1;
2869 		rdev->config.evergreen.max_pipes = 4;
2870 		rdev->config.evergreen.max_tile_pipes = 4;
2871 		rdev->config.evergreen.max_simds = 5;
2872 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2873 		rdev->config.evergreen.max_gprs = 256;
2874 		rdev->config.evergreen.max_threads = 248;
2875 		rdev->config.evergreen.max_gs_threads = 32;
2876 		rdev->config.evergreen.max_stack_entries = 256;
2877 		rdev->config.evergreen.sx_num_of_sets = 4;
2878 		rdev->config.evergreen.sx_max_export_size = 256;
2879 		rdev->config.evergreen.sx_max_export_pos_size = 64;
2880 		rdev->config.evergreen.sx_max_export_smx_size = 192;
2881 		rdev->config.evergreen.max_hw_contexts = 8;
2882 		rdev->config.evergreen.sq_num_cf_insts = 2;
2883 
2884 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
2885 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2886 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2887 		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
2888 		break;
2889 	case CHIP_CEDAR:
2890 	default:
2891 		rdev->config.evergreen.num_ses = 1;
2892 		rdev->config.evergreen.max_pipes = 2;
2893 		rdev->config.evergreen.max_tile_pipes = 2;
2894 		rdev->config.evergreen.max_simds = 2;
2895 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2896 		rdev->config.evergreen.max_gprs = 256;
2897 		rdev->config.evergreen.max_threads = 192;
2898 		rdev->config.evergreen.max_gs_threads = 16;
2899 		rdev->config.evergreen.max_stack_entries = 256;
2900 		rdev->config.evergreen.sx_num_of_sets = 4;
2901 		rdev->config.evergreen.sx_max_export_size = 128;
2902 		rdev->config.evergreen.sx_max_export_pos_size = 32;
2903 		rdev->config.evergreen.sx_max_export_smx_size = 96;
2904 		rdev->config.evergreen.max_hw_contexts = 4;
2905 		rdev->config.evergreen.sq_num_cf_insts = 1;
2906 
2907 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2908 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2909 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2910 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
2911 		break;
2912 	case CHIP_PALM:
2913 		rdev->config.evergreen.num_ses = 1;
2914 		rdev->config.evergreen.max_pipes = 2;
2915 		rdev->config.evergreen.max_tile_pipes = 2;
2916 		rdev->config.evergreen.max_simds = 2;
2917 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2918 		rdev->config.evergreen.max_gprs = 256;
2919 		rdev->config.evergreen.max_threads = 192;
2920 		rdev->config.evergreen.max_gs_threads = 16;
2921 		rdev->config.evergreen.max_stack_entries = 256;
2922 		rdev->config.evergreen.sx_num_of_sets = 4;
2923 		rdev->config.evergreen.sx_max_export_size = 128;
2924 		rdev->config.evergreen.sx_max_export_pos_size = 32;
2925 		rdev->config.evergreen.sx_max_export_smx_size = 96;
2926 		rdev->config.evergreen.max_hw_contexts = 4;
2927 		rdev->config.evergreen.sq_num_cf_insts = 1;
2928 
2929 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2930 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2931 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2932 		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
2933 		break;
2934 	case CHIP_SUMO:
2935 		rdev->config.evergreen.num_ses = 1;
2936 		rdev->config.evergreen.max_pipes = 4;
2937 		rdev->config.evergreen.max_tile_pipes = 4;
2938 		if (rdev->pdev->device == 0x9648)
2939 			rdev->config.evergreen.max_simds = 3;
2940 		else if ((rdev->pdev->device == 0x9647) ||
2941 			 (rdev->pdev->device == 0x964a))
2942 			rdev->config.evergreen.max_simds = 4;
2943 		else
2944 			rdev->config.evergreen.max_simds = 5;
2945 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
2946 		rdev->config.evergreen.max_gprs = 256;
2947 		rdev->config.evergreen.max_threads = 248;
2948 		rdev->config.evergreen.max_gs_threads = 32;
2949 		rdev->config.evergreen.max_stack_entries = 256;
2950 		rdev->config.evergreen.sx_num_of_sets = 4;
2951 		rdev->config.evergreen.sx_max_export_size = 256;
2952 		rdev->config.evergreen.sx_max_export_pos_size = 64;
2953 		rdev->config.evergreen.sx_max_export_smx_size = 192;
2954 		rdev->config.evergreen.max_hw_contexts = 8;
2955 		rdev->config.evergreen.sq_num_cf_insts = 2;
2956 
2957 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2958 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2959 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2960 		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
2961 		break;
2962 	case CHIP_SUMO2:
2963 		rdev->config.evergreen.num_ses = 1;
2964 		rdev->config.evergreen.max_pipes = 4;
2965 		rdev->config.evergreen.max_tile_pipes = 4;
2966 		rdev->config.evergreen.max_simds = 2;
2967 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
2968 		rdev->config.evergreen.max_gprs = 256;
2969 		rdev->config.evergreen.max_threads = 248;
2970 		rdev->config.evergreen.max_gs_threads = 32;
2971 		rdev->config.evergreen.max_stack_entries = 512;
2972 		rdev->config.evergreen.sx_num_of_sets = 4;
2973 		rdev->config.evergreen.sx_max_export_size = 256;
2974 		rdev->config.evergreen.sx_max_export_pos_size = 64;
2975 		rdev->config.evergreen.sx_max_export_smx_size = 192;
2976 		rdev->config.evergreen.max_hw_contexts = 8;
2977 		rdev->config.evergreen.sq_num_cf_insts = 2;
2978 
2979 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
2980 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
2981 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
2982 		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
2983 		break;
2984 	case CHIP_BARTS:
2985 		rdev->config.evergreen.num_ses = 2;
2986 		rdev->config.evergreen.max_pipes = 4;
2987 		rdev->config.evergreen.max_tile_pipes = 8;
2988 		rdev->config.evergreen.max_simds = 7;
2989 		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
2990 		rdev->config.evergreen.max_gprs = 256;
2991 		rdev->config.evergreen.max_threads = 248;
2992 		rdev->config.evergreen.max_gs_threads = 32;
2993 		rdev->config.evergreen.max_stack_entries = 512;
2994 		rdev->config.evergreen.sx_num_of_sets = 4;
2995 		rdev->config.evergreen.sx_max_export_size = 256;
2996 		rdev->config.evergreen.sx_max_export_pos_size = 64;
2997 		rdev->config.evergreen.sx_max_export_smx_size = 192;
2998 		rdev->config.evergreen.max_hw_contexts = 8;
2999 		rdev->config.evergreen.sq_num_cf_insts = 2;
3000 
3001 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3002 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3003 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3004 		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3005 		break;
3006 	case CHIP_TURKS:
3007 		rdev->config.evergreen.num_ses = 1;
3008 		rdev->config.evergreen.max_pipes = 4;
3009 		rdev->config.evergreen.max_tile_pipes = 4;
3010 		rdev->config.evergreen.max_simds = 6;
3011 		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3012 		rdev->config.evergreen.max_gprs = 256;
3013 		rdev->config.evergreen.max_threads = 248;
3014 		rdev->config.evergreen.max_gs_threads = 32;
3015 		rdev->config.evergreen.max_stack_entries = 256;
3016 		rdev->config.evergreen.sx_num_of_sets = 4;
3017 		rdev->config.evergreen.sx_max_export_size = 256;
3018 		rdev->config.evergreen.sx_max_export_pos_size = 64;
3019 		rdev->config.evergreen.sx_max_export_smx_size = 192;
3020 		rdev->config.evergreen.max_hw_contexts = 8;
3021 		rdev->config.evergreen.sq_num_cf_insts = 2;
3022 
3023 		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3024 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3025 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3026 		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3027 		break;
3028 	case CHIP_CAICOS:
3029 		rdev->config.evergreen.num_ses = 1;
3030 		rdev->config.evergreen.max_pipes = 2;
3031 		rdev->config.evergreen.max_tile_pipes = 2;
3032 		rdev->config.evergreen.max_simds = 2;
3033 		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3034 		rdev->config.evergreen.max_gprs = 256;
3035 		rdev->config.evergreen.max_threads = 192;
3036 		rdev->config.evergreen.max_gs_threads = 16;
3037 		rdev->config.evergreen.max_stack_entries = 256;
3038 		rdev->config.evergreen.sx_num_of_sets = 4;
3039 		rdev->config.evergreen.sx_max_export_size = 128;
3040 		rdev->config.evergreen.sx_max_export_pos_size = 32;
3041 		rdev->config.evergreen.sx_max_export_smx_size = 96;
3042 		rdev->config.evergreen.max_hw_contexts = 4;
3043 		rdev->config.evergreen.sq_num_cf_insts = 1;
3044 
3045 		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3046 		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3047 		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3048 		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3049 		break;
3050 	}
3051 
3052 	/* Initialize HDP */
3053 	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3054 		WREG32((0x2c14 + j), 0x00000000);
3055 		WREG32((0x2c18 + j), 0x00000000);
3056 		WREG32((0x2c1c + j), 0x00000000);
3057 		WREG32((0x2c20 + j), 0x00000000);
3058 		WREG32((0x2c24 + j), 0x00000000);
3059 	}
3060 
3061 	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3062 
3063 	evergreen_fix_pci_max_read_req_size(rdev);
3064 
3065 	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3066 	if ((rdev->family == CHIP_PALM) ||
3067 	    (rdev->family == CHIP_SUMO) ||
3068 	    (rdev->family == CHIP_SUMO2))
3069 		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3070 	else
3071 		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3072 
3073 	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3074 	 * not have bank info, so create a custom tiling dword.
3075 	 * bits 3:0   num_pipes
3076 	 * bits 7:4   num_banks
3077 	 * bits 11:8  group_size
3078 	 * bits 15:12 row_size
3079 	 */
3080 	rdev->config.evergreen.tile_config = 0;
3081 	switch (rdev->config.evergreen.max_tile_pipes) {
3082 	case 1:
3083 	default:
3084 		rdev->config.evergreen.tile_config |= (0 << 0);
3085 		break;
3086 	case 2:
3087 		rdev->config.evergreen.tile_config |= (1 << 0);
3088 		break;
3089 	case 4:
3090 		rdev->config.evergreen.tile_config |= (2 << 0);
3091 		break;
3092 	case 8:
3093 		rdev->config.evergreen.tile_config |= (3 << 0);
3094 		break;
3095 	}
3096 	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3097 	if (rdev->flags & RADEON_IS_IGP)
3098 		rdev->config.evergreen.tile_config |= 1 << 4;
3099 	else {
3100 		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3101 		case 0: /* four banks */
3102 			rdev->config.evergreen.tile_config |= 0 << 4;
3103 			break;
3104 		case 1: /* eight banks */
3105 			rdev->config.evergreen.tile_config |= 1 << 4;
3106 			break;
3107 		case 2: /* sixteen banks */
3108 		default:
3109 			rdev->config.evergreen.tile_config |= 2 << 4;
3110 			break;
3111 		}
3112 	}
3113 	rdev->config.evergreen.tile_config |= 0 << 8;
3114 	rdev->config.evergreen.tile_config |=
3115 		((gb_addr_config & 0x30000000) >> 28) << 12;
3116 
3117 	num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3118 
3119 	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3120 		u32 efuse_straps_4;
3121 		u32 efuse_straps_3;
3122 
3123 		WREG32(RCU_IND_INDEX, 0x204);
3124 		efuse_straps_4 = RREG32(RCU_IND_DATA);
3125 		WREG32(RCU_IND_INDEX, 0x203);
3126 		efuse_straps_3 = RREG32(RCU_IND_DATA);
3127 		tmp = (((efuse_straps_4 & 0xf) << 4) |
3128 		      ((efuse_straps_3 & 0xf0000000) >> 28));
3129 	} else {
3130 		tmp = 0;
3131 		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3132 			u32 rb_disable_bitmap;
3133 
3134 			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3135 			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3136 			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3137 			tmp <<= 4;
3138 			tmp |= rb_disable_bitmap;
3139 		}
3140 	}
3141 	/* enabled rb are just the one not disabled :) */
3142 	disabled_rb_mask = tmp;
3143 	tmp = 0;
3144 	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3145 		tmp |= (1 << i);
3146 	/* if all the backends are disabled, fix it up here */
3147 	if ((disabled_rb_mask & tmp) == tmp) {
3148 		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3149 			disabled_rb_mask &= ~(1 << i);
3150 	}
3151 
3152 	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3153 	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3154 
3155 	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3156 	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3157 	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3158 	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3159 	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3160 	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3161 	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3162 
3163 	if ((rdev->config.evergreen.max_backends == 1) &&
3164 	    (rdev->flags & RADEON_IS_IGP)) {
3165 		if ((disabled_rb_mask & 3) == 1) {
3166 			/* RB0 disabled, RB1 enabled */
3167 			tmp = 0x11111111;
3168 		} else {
3169 			/* RB1 disabled, RB0 enabled */
3170 			tmp = 0x00000000;
3171 		}
3172 	} else {
3173 		tmp = gb_addr_config & NUM_PIPES_MASK;
3174 		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3175 						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3176 	}
3177 	WREG32(GB_BACKEND_MAP, tmp);
3178 
3179 	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3180 	WREG32(CGTS_TCC_DISABLE, 0);
3181 	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3182 	WREG32(CGTS_USER_TCC_DISABLE, 0);
3183 
3184 	/* set HW defaults for 3D engine */
3185 	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3186 				     ROQ_IB2_START(0x2b)));
3187 
3188 	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3189 
3190 	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3191 			     SYNC_GRADIENT |
3192 			     SYNC_WALKER |
3193 			     SYNC_ALIGNER));
3194 
3195 	sx_debug_1 = RREG32(SX_DEBUG_1);
3196 	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3197 	WREG32(SX_DEBUG_1, sx_debug_1);
3198 
3199 
3200 	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3201 	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3202 	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3203 	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3204 
3205 	if (rdev->family <= CHIP_SUMO2)
3206 		WREG32(SMX_SAR_CTL0, 0x00010000);
3207 
3208 	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3209 					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3210 					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3211 
3212 	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3213 				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3214 				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3215 
3216 	WREG32(VGT_NUM_INSTANCES, 1);
3217 	WREG32(SPI_CONFIG_CNTL, 0);
3218 	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3219 	WREG32(CP_PERFMON_CNTL, 0);
3220 
3221 	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3222 				  FETCH_FIFO_HIWATER(0x4) |
3223 				  DONE_FIFO_HIWATER(0xe0) |
3224 				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3225 
3226 	sq_config = RREG32(SQ_CONFIG);
3227 	sq_config &= ~(PS_PRIO(3) |
3228 		       VS_PRIO(3) |
3229 		       GS_PRIO(3) |
3230 		       ES_PRIO(3));
3231 	sq_config |= (VC_ENABLE |
3232 		      EXPORT_SRC_C |
3233 		      PS_PRIO(0) |
3234 		      VS_PRIO(1) |
3235 		      GS_PRIO(2) |
3236 		      ES_PRIO(3));
3237 
3238 	switch (rdev->family) {
3239 	case CHIP_CEDAR:
3240 	case CHIP_PALM:
3241 	case CHIP_SUMO:
3242 	case CHIP_SUMO2:
3243 	case CHIP_CAICOS:
3244 		/* no vertex cache */
3245 		sq_config &= ~VC_ENABLE;
3246 		break;
3247 	default:
3248 		break;
3249 	}
3250 
3251 	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3252 
3253 	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3254 	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3255 	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3256 	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3257 	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3258 	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3259 	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3260 
3261 	switch (rdev->family) {
3262 	case CHIP_CEDAR:
3263 	case CHIP_PALM:
3264 	case CHIP_SUMO:
3265 	case CHIP_SUMO2:
3266 		ps_thread_count = 96;
3267 		break;
3268 	default:
3269 		ps_thread_count = 128;
3270 		break;
3271 	}
3272 
3273 	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3274 	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3275 	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3276 	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3277 	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3278 	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3279 
3280 	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3281 	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3282 	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3283 	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3284 	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3285 	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3286 
3287 	WREG32(SQ_CONFIG, sq_config);
3288 	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3289 	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3290 	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3291 	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3292 	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3293 	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3294 	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3295 	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3296 	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3297 	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3298 
3299 	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3300 					  FORCE_EOV_MAX_REZ_CNT(255)));
3301 
3302 	switch (rdev->family) {
3303 	case CHIP_CEDAR:
3304 	case CHIP_PALM:
3305 	case CHIP_SUMO:
3306 	case CHIP_SUMO2:
3307 	case CHIP_CAICOS:
3308 		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3309 		break;
3310 	default:
3311 		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3312 		break;
3313 	}
3314 	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3315 	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3316 
3317 	WREG32(VGT_GS_VERTEX_REUSE, 16);
3318 	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3319 	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3320 
3321 	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3322 	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3323 
3324 	WREG32(CB_PERF_CTR0_SEL_0, 0);
3325 	WREG32(CB_PERF_CTR0_SEL_1, 0);
3326 	WREG32(CB_PERF_CTR1_SEL_0, 0);
3327 	WREG32(CB_PERF_CTR1_SEL_1, 0);
3328 	WREG32(CB_PERF_CTR2_SEL_0, 0);
3329 	WREG32(CB_PERF_CTR2_SEL_1, 0);
3330 	WREG32(CB_PERF_CTR3_SEL_0, 0);
3331 	WREG32(CB_PERF_CTR3_SEL_1, 0);
3332 
3333 	/* clear render buffer base addresses */
3334 	WREG32(CB_COLOR0_BASE, 0);
3335 	WREG32(CB_COLOR1_BASE, 0);
3336 	WREG32(CB_COLOR2_BASE, 0);
3337 	WREG32(CB_COLOR3_BASE, 0);
3338 	WREG32(CB_COLOR4_BASE, 0);
3339 	WREG32(CB_COLOR5_BASE, 0);
3340 	WREG32(CB_COLOR6_BASE, 0);
3341 	WREG32(CB_COLOR7_BASE, 0);
3342 	WREG32(CB_COLOR8_BASE, 0);
3343 	WREG32(CB_COLOR9_BASE, 0);
3344 	WREG32(CB_COLOR10_BASE, 0);
3345 	WREG32(CB_COLOR11_BASE, 0);
3346 
3347 	/* set the shader const cache sizes to 0 */
3348 	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3349 		WREG32(i, 0);
3350 	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3351 		WREG32(i, 0);
3352 
3353 	tmp = RREG32(HDP_MISC_CNTL);
3354 	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3355 	WREG32(HDP_MISC_CNTL, tmp);
3356 
3357 	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3358 	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3359 
3360 	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3361 
3362 	udelay(50);
3363 
3364 }
3365 
3366 int evergreen_mc_init(struct radeon_device *rdev)
3367 {
3368 	u32 tmp;
3369 	int chansize, numchan;
3370 
3371 	/* Get VRAM informations */
3372 	rdev->mc.vram_is_ddr = true;
3373 	if ((rdev->family == CHIP_PALM) ||
3374 	    (rdev->family == CHIP_SUMO) ||
3375 	    (rdev->family == CHIP_SUMO2))
3376 		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3377 	else
3378 		tmp = RREG32(MC_ARB_RAMCFG);
3379 	if (tmp & CHANSIZE_OVERRIDE) {
3380 		chansize = 16;
3381 	} else if (tmp & CHANSIZE_MASK) {
3382 		chansize = 64;
3383 	} else {
3384 		chansize = 32;
3385 	}
3386 	tmp = RREG32(MC_SHARED_CHMAP);
3387 	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3388 	case 0:
3389 	default:
3390 		numchan = 1;
3391 		break;
3392 	case 1:
3393 		numchan = 2;
3394 		break;
3395 	case 2:
3396 		numchan = 4;
3397 		break;
3398 	case 3:
3399 		numchan = 8;
3400 		break;
3401 	}
3402 	rdev->mc.vram_width = numchan * chansize;
3403 	/* Could aper size report 0 ? */
3404 	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3405 	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3406 	/* Setup GPU memory space */
3407 	if ((rdev->family == CHIP_PALM) ||
3408 	    (rdev->family == CHIP_SUMO) ||
3409 	    (rdev->family == CHIP_SUMO2)) {
3410 		/* size in bytes on fusion */
3411 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3412 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3413 	} else {
3414 		/* size in MB on evergreen/cayman/tn */
3415 		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3416 		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3417 	}
3418 	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3419 	r700_vram_gtt_location(rdev, &rdev->mc);
3420 	radeon_update_bandwidth_info(rdev);
3421 
3422 	return 0;
3423 }
3424 
3425 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3426 {
3427 	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3428 		RREG32(GRBM_STATUS));
3429 	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3430 		RREG32(GRBM_STATUS_SE0));
3431 	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3432 		RREG32(GRBM_STATUS_SE1));
3433 	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3434 		RREG32(SRBM_STATUS));
3435 	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3436 		RREG32(SRBM_STATUS2));
3437 	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3438 		RREG32(CP_STALLED_STAT1));
3439 	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3440 		RREG32(CP_STALLED_STAT2));
3441 	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3442 		RREG32(CP_BUSY_STAT));
3443 	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3444 		RREG32(CP_STAT));
3445 	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3446 		RREG32(DMA_STATUS_REG));
3447 	if (rdev->family >= CHIP_CAYMAN) {
3448 		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3449 			 RREG32(DMA_STATUS_REG + 0x800));
3450 	}
3451 }
3452 
3453 bool evergreen_is_display_hung(struct radeon_device *rdev)
3454 {
3455 	u32 crtc_hung = 0;
3456 	u32 crtc_status[6];
3457 	u32 i, j, tmp;
3458 
3459 	for (i = 0; i < rdev->num_crtc; i++) {
3460 		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3461 			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3462 			crtc_hung |= (1 << i);
3463 		}
3464 	}
3465 
3466 	for (j = 0; j < 10; j++) {
3467 		for (i = 0; i < rdev->num_crtc; i++) {
3468 			if (crtc_hung & (1 << i)) {
3469 				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3470 				if (tmp != crtc_status[i])
3471 					crtc_hung &= ~(1 << i);
3472 			}
3473 		}
3474 		if (crtc_hung == 0)
3475 			return false;
3476 		udelay(100);
3477 	}
3478 
3479 	return true;
3480 }
3481 
3482 static u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3483 {
3484 	u32 reset_mask = 0;
3485 	u32 tmp;
3486 
3487 	/* GRBM_STATUS */
3488 	tmp = RREG32(GRBM_STATUS);
3489 	if (tmp & (PA_BUSY | SC_BUSY |
3490 		   SH_BUSY | SX_BUSY |
3491 		   TA_BUSY | VGT_BUSY |
3492 		   DB_BUSY | CB_BUSY |
3493 		   SPI_BUSY | VGT_BUSY_NO_DMA))
3494 		reset_mask |= RADEON_RESET_GFX;
3495 
3496 	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3497 		   CP_BUSY | CP_COHERENCY_BUSY))
3498 		reset_mask |= RADEON_RESET_CP;
3499 
3500 	if (tmp & GRBM_EE_BUSY)
3501 		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3502 
3503 	/* DMA_STATUS_REG */
3504 	tmp = RREG32(DMA_STATUS_REG);
3505 	if (!(tmp & DMA_IDLE))
3506 		reset_mask |= RADEON_RESET_DMA;
3507 
3508 	/* SRBM_STATUS2 */
3509 	tmp = RREG32(SRBM_STATUS2);
3510 	if (tmp & DMA_BUSY)
3511 		reset_mask |= RADEON_RESET_DMA;
3512 
3513 	/* SRBM_STATUS */
3514 	tmp = RREG32(SRBM_STATUS);
3515 	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3516 		reset_mask |= RADEON_RESET_RLC;
3517 
3518 	if (tmp & IH_BUSY)
3519 		reset_mask |= RADEON_RESET_IH;
3520 
3521 	if (tmp & SEM_BUSY)
3522 		reset_mask |= RADEON_RESET_SEM;
3523 
3524 	if (tmp & GRBM_RQ_PENDING)
3525 		reset_mask |= RADEON_RESET_GRBM;
3526 
3527 	if (tmp & VMC_BUSY)
3528 		reset_mask |= RADEON_RESET_VMC;
3529 
3530 	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3531 		   MCC_BUSY | MCD_BUSY))
3532 		reset_mask |= RADEON_RESET_MC;
3533 
3534 	if (evergreen_is_display_hung(rdev))
3535 		reset_mask |= RADEON_RESET_DISPLAY;
3536 
3537 	/* VM_L2_STATUS */
3538 	tmp = RREG32(VM_L2_STATUS);
3539 	if (tmp & L2_BUSY)
3540 		reset_mask |= RADEON_RESET_VMC;
3541 
3542 	/* Skip MC reset as it's mostly likely not hung, just busy */
3543 	if (reset_mask & RADEON_RESET_MC) {
3544 		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3545 		reset_mask &= ~RADEON_RESET_MC;
3546 	}
3547 
3548 	return reset_mask;
3549 }
3550 
3551 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3552 {
3553 	struct evergreen_mc_save save;
3554 	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3555 	u32 tmp;
3556 
3557 	if (reset_mask == 0)
3558 		return;
3559 
3560 	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3561 
3562 	evergreen_print_gpu_status_regs(rdev);
3563 
3564 	/* Disable CP parsing/prefetching */
3565 	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3566 
3567 	if (reset_mask & RADEON_RESET_DMA) {
3568 		/* Disable DMA */
3569 		tmp = RREG32(DMA_RB_CNTL);
3570 		tmp &= ~DMA_RB_ENABLE;
3571 		WREG32(DMA_RB_CNTL, tmp);
3572 	}
3573 
3574 	udelay(50);
3575 
3576 	evergreen_mc_stop(rdev, &save);
3577 	if (evergreen_mc_wait_for_idle(rdev)) {
3578 		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3579 	}
3580 
3581 	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3582 		grbm_soft_reset |= SOFT_RESET_DB |
3583 			SOFT_RESET_CB |
3584 			SOFT_RESET_PA |
3585 			SOFT_RESET_SC |
3586 			SOFT_RESET_SPI |
3587 			SOFT_RESET_SX |
3588 			SOFT_RESET_SH |
3589 			SOFT_RESET_TC |
3590 			SOFT_RESET_TA |
3591 			SOFT_RESET_VC |
3592 			SOFT_RESET_VGT;
3593 	}
3594 
3595 	if (reset_mask & RADEON_RESET_CP) {
3596 		grbm_soft_reset |= SOFT_RESET_CP |
3597 			SOFT_RESET_VGT;
3598 
3599 		srbm_soft_reset |= SOFT_RESET_GRBM;
3600 	}
3601 
3602 	if (reset_mask & RADEON_RESET_DMA)
3603 		srbm_soft_reset |= SOFT_RESET_DMA;
3604 
3605 	if (reset_mask & RADEON_RESET_DISPLAY)
3606 		srbm_soft_reset |= SOFT_RESET_DC;
3607 
3608 	if (reset_mask & RADEON_RESET_RLC)
3609 		srbm_soft_reset |= SOFT_RESET_RLC;
3610 
3611 	if (reset_mask & RADEON_RESET_SEM)
3612 		srbm_soft_reset |= SOFT_RESET_SEM;
3613 
3614 	if (reset_mask & RADEON_RESET_IH)
3615 		srbm_soft_reset |= SOFT_RESET_IH;
3616 
3617 	if (reset_mask & RADEON_RESET_GRBM)
3618 		srbm_soft_reset |= SOFT_RESET_GRBM;
3619 
3620 	if (reset_mask & RADEON_RESET_VMC)
3621 		srbm_soft_reset |= SOFT_RESET_VMC;
3622 
3623 	if (!(rdev->flags & RADEON_IS_IGP)) {
3624 		if (reset_mask & RADEON_RESET_MC)
3625 			srbm_soft_reset |= SOFT_RESET_MC;
3626 	}
3627 
3628 	if (grbm_soft_reset) {
3629 		tmp = RREG32(GRBM_SOFT_RESET);
3630 		tmp |= grbm_soft_reset;
3631 		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3632 		WREG32(GRBM_SOFT_RESET, tmp);
3633 		tmp = RREG32(GRBM_SOFT_RESET);
3634 
3635 		udelay(50);
3636 
3637 		tmp &= ~grbm_soft_reset;
3638 		WREG32(GRBM_SOFT_RESET, tmp);
3639 		tmp = RREG32(GRBM_SOFT_RESET);
3640 	}
3641 
3642 	if (srbm_soft_reset) {
3643 		tmp = RREG32(SRBM_SOFT_RESET);
3644 		tmp |= srbm_soft_reset;
3645 		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3646 		WREG32(SRBM_SOFT_RESET, tmp);
3647 		tmp = RREG32(SRBM_SOFT_RESET);
3648 
3649 		udelay(50);
3650 
3651 		tmp &= ~srbm_soft_reset;
3652 		WREG32(SRBM_SOFT_RESET, tmp);
3653 		tmp = RREG32(SRBM_SOFT_RESET);
3654 	}
3655 
3656 	/* Wait a little for things to settle down */
3657 	udelay(50);
3658 
3659 	evergreen_mc_resume(rdev, &save);
3660 	udelay(50);
3661 
3662 	evergreen_print_gpu_status_regs(rdev);
3663 }
3664 
3665 int evergreen_asic_reset(struct radeon_device *rdev)
3666 {
3667 	u32 reset_mask;
3668 
3669 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3670 
3671 	if (reset_mask)
3672 		r600_set_bios_scratch_engine_hung(rdev, true);
3673 
3674 	evergreen_gpu_soft_reset(rdev, reset_mask);
3675 
3676 	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3677 
3678 	if (!reset_mask)
3679 		r600_set_bios_scratch_engine_hung(rdev, false);
3680 
3681 	return 0;
3682 }
3683 
3684 /**
3685  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3686  *
3687  * @rdev: radeon_device pointer
3688  * @ring: radeon_ring structure holding ring information
3689  *
3690  * Check if the GFX engine is locked up.
3691  * Returns true if the engine appears to be locked up, false if not.
3692  */
3693 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3694 {
3695 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3696 
3697 	if (!(reset_mask & (RADEON_RESET_GFX |
3698 			    RADEON_RESET_COMPUTE |
3699 			    RADEON_RESET_CP))) {
3700 		radeon_ring_lockup_update(ring);
3701 		return false;
3702 	}
3703 	/* force CP activities */
3704 	radeon_ring_force_activity(rdev, ring);
3705 	return radeon_ring_test_lockup(rdev, ring);
3706 }
3707 
3708 /**
3709  * evergreen_dma_is_lockup - Check if the DMA engine is locked up
3710  *
3711  * @rdev: radeon_device pointer
3712  * @ring: radeon_ring structure holding ring information
3713  *
3714  * Check if the async DMA engine is locked up.
3715  * Returns true if the engine appears to be locked up, false if not.
3716  */
3717 bool evergreen_dma_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3718 {
3719 	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3720 
3721 	if (!(reset_mask & RADEON_RESET_DMA)) {
3722 		radeon_ring_lockup_update(ring);
3723 		return false;
3724 	}
3725 	/* force ring activities */
3726 	radeon_ring_force_activity(rdev, ring);
3727 	return radeon_ring_test_lockup(rdev, ring);
3728 }
3729 
3730 /* Interrupts */
3731 
3732 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
3733 {
3734 	if (crtc >= rdev->num_crtc)
3735 		return 0;
3736 	else
3737 		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
3738 }
3739 
3740 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
3741 {
3742 	u32 tmp;
3743 
3744 	if (rdev->family >= CHIP_CAYMAN) {
3745 		cayman_cp_int_cntl_setup(rdev, 0,
3746 					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3747 		cayman_cp_int_cntl_setup(rdev, 1, 0);
3748 		cayman_cp_int_cntl_setup(rdev, 2, 0);
3749 		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3750 		WREG32(CAYMAN_DMA1_CNTL, tmp);
3751 	} else
3752 		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
3753 	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3754 	WREG32(DMA_CNTL, tmp);
3755 	WREG32(GRBM_INT_CNTL, 0);
3756 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3757 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
3758 	if (rdev->num_crtc >= 4) {
3759 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3760 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
3761 	}
3762 	if (rdev->num_crtc >= 6) {
3763 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3764 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3765 	}
3766 
3767 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
3768 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
3769 	if (rdev->num_crtc >= 4) {
3770 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
3771 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
3772 	}
3773 	if (rdev->num_crtc >= 6) {
3774 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
3775 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
3776 	}
3777 
3778 	/* only one DAC on DCE6 */
3779 	if (!ASIC_IS_DCE6(rdev))
3780 		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
3781 	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
3782 
3783 	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3784 	WREG32(DC_HPD1_INT_CONTROL, tmp);
3785 	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3786 	WREG32(DC_HPD2_INT_CONTROL, tmp);
3787 	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3788 	WREG32(DC_HPD3_INT_CONTROL, tmp);
3789 	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3790 	WREG32(DC_HPD4_INT_CONTROL, tmp);
3791 	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3792 	WREG32(DC_HPD5_INT_CONTROL, tmp);
3793 	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
3794 	WREG32(DC_HPD6_INT_CONTROL, tmp);
3795 
3796 }
3797 
3798 int evergreen_irq_set(struct radeon_device *rdev)
3799 {
3800 	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
3801 	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
3802 	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
3803 	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
3804 	u32 grbm_int_cntl = 0;
3805 	u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
3806 	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
3807 	u32 dma_cntl, dma_cntl1 = 0;
3808 
3809 	if (!rdev->irq.installed) {
3810 		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
3811 		return -EINVAL;
3812 	}
3813 	/* don't enable anything if the ih is disabled */
3814 	if (!rdev->ih.enabled) {
3815 		r600_disable_interrupts(rdev);
3816 		/* force the active interrupt state to all disabled */
3817 		evergreen_disable_interrupt_state(rdev);
3818 		return 0;
3819 	}
3820 
3821 	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
3822 	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
3823 	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
3824 	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
3825 	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
3826 	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
3827 
3828 	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3829 	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3830 	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3831 	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3832 	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3833 	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
3834 
3835 	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
3836 
3837 	if (rdev->family >= CHIP_CAYMAN) {
3838 		/* enable CP interrupts on all rings */
3839 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3840 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3841 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3842 		}
3843 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
3844 			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
3845 			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
3846 		}
3847 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
3848 			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
3849 			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
3850 		}
3851 	} else {
3852 		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
3853 			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
3854 			cp_int_cntl |= RB_INT_ENABLE;
3855 			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
3856 		}
3857 	}
3858 
3859 	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
3860 		DRM_DEBUG("r600_irq_set: sw int dma\n");
3861 		dma_cntl |= TRAP_ENABLE;
3862 	}
3863 
3864 	if (rdev->family >= CHIP_CAYMAN) {
3865 		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
3866 		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
3867 			DRM_DEBUG("r600_irq_set: sw int dma1\n");
3868 			dma_cntl1 |= TRAP_ENABLE;
3869 		}
3870 	}
3871 
3872 	if (rdev->irq.crtc_vblank_int[0] ||
3873 	    atomic_read(&rdev->irq.pflip[0])) {
3874 		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
3875 		crtc1 |= VBLANK_INT_MASK;
3876 	}
3877 	if (rdev->irq.crtc_vblank_int[1] ||
3878 	    atomic_read(&rdev->irq.pflip[1])) {
3879 		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
3880 		crtc2 |= VBLANK_INT_MASK;
3881 	}
3882 	if (rdev->irq.crtc_vblank_int[2] ||
3883 	    atomic_read(&rdev->irq.pflip[2])) {
3884 		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
3885 		crtc3 |= VBLANK_INT_MASK;
3886 	}
3887 	if (rdev->irq.crtc_vblank_int[3] ||
3888 	    atomic_read(&rdev->irq.pflip[3])) {
3889 		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
3890 		crtc4 |= VBLANK_INT_MASK;
3891 	}
3892 	if (rdev->irq.crtc_vblank_int[4] ||
3893 	    atomic_read(&rdev->irq.pflip[4])) {
3894 		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
3895 		crtc5 |= VBLANK_INT_MASK;
3896 	}
3897 	if (rdev->irq.crtc_vblank_int[5] ||
3898 	    atomic_read(&rdev->irq.pflip[5])) {
3899 		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
3900 		crtc6 |= VBLANK_INT_MASK;
3901 	}
3902 	if (rdev->irq.hpd[0]) {
3903 		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
3904 		hpd1 |= DC_HPDx_INT_EN;
3905 	}
3906 	if (rdev->irq.hpd[1]) {
3907 		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
3908 		hpd2 |= DC_HPDx_INT_EN;
3909 	}
3910 	if (rdev->irq.hpd[2]) {
3911 		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
3912 		hpd3 |= DC_HPDx_INT_EN;
3913 	}
3914 	if (rdev->irq.hpd[3]) {
3915 		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
3916 		hpd4 |= DC_HPDx_INT_EN;
3917 	}
3918 	if (rdev->irq.hpd[4]) {
3919 		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
3920 		hpd5 |= DC_HPDx_INT_EN;
3921 	}
3922 	if (rdev->irq.hpd[5]) {
3923 		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
3924 		hpd6 |= DC_HPDx_INT_EN;
3925 	}
3926 	if (rdev->irq.afmt[0]) {
3927 		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
3928 		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3929 	}
3930 	if (rdev->irq.afmt[1]) {
3931 		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
3932 		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3933 	}
3934 	if (rdev->irq.afmt[2]) {
3935 		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
3936 		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3937 	}
3938 	if (rdev->irq.afmt[3]) {
3939 		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
3940 		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3941 	}
3942 	if (rdev->irq.afmt[4]) {
3943 		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
3944 		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3945 	}
3946 	if (rdev->irq.afmt[5]) {
3947 		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
3948 		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
3949 	}
3950 
3951 	if (rdev->family >= CHIP_CAYMAN) {
3952 		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
3953 		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
3954 		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
3955 	} else
3956 		WREG32(CP_INT_CNTL, cp_int_cntl);
3957 
3958 	WREG32(DMA_CNTL, dma_cntl);
3959 
3960 	if (rdev->family >= CHIP_CAYMAN)
3961 		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
3962 
3963 	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
3964 
3965 	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
3966 	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
3967 	if (rdev->num_crtc >= 4) {
3968 		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
3969 		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
3970 	}
3971 	if (rdev->num_crtc >= 6) {
3972 		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
3973 		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
3974 	}
3975 
3976 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
3977 	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
3978 	if (rdev->num_crtc >= 4) {
3979 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
3980 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
3981 	}
3982 	if (rdev->num_crtc >= 6) {
3983 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
3984 		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
3985 	}
3986 
3987 	WREG32(DC_HPD1_INT_CONTROL, hpd1);
3988 	WREG32(DC_HPD2_INT_CONTROL, hpd2);
3989 	WREG32(DC_HPD3_INT_CONTROL, hpd3);
3990 	WREG32(DC_HPD4_INT_CONTROL, hpd4);
3991 	WREG32(DC_HPD5_INT_CONTROL, hpd5);
3992 	WREG32(DC_HPD6_INT_CONTROL, hpd6);
3993 
3994 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
3995 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
3996 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
3997 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
3998 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
3999 	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4000 
4001 	return 0;
4002 }
4003 
4004 static void evergreen_irq_ack(struct radeon_device *rdev)
4005 {
4006 	u32 tmp;
4007 
4008 	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4009 	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4010 	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4011 	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4012 	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4013 	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4014 	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4015 	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4016 	if (rdev->num_crtc >= 4) {
4017 		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4018 		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4019 	}
4020 	if (rdev->num_crtc >= 6) {
4021 		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4022 		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4023 	}
4024 
4025 	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4026 	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4027 	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4028 	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4029 	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4030 	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4031 
4032 	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4033 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4034 	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4035 		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4036 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4037 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4038 	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4039 		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4040 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4041 		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4042 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4043 		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4044 
4045 	if (rdev->num_crtc >= 4) {
4046 		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4047 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4048 		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4049 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4050 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4051 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4052 		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4053 			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4054 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4055 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4056 		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4057 			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4058 	}
4059 
4060 	if (rdev->num_crtc >= 6) {
4061 		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4062 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4063 		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4064 			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4065 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4066 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4067 		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4068 			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4069 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4070 			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4071 		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4072 			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4073 	}
4074 
4075 	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4076 		tmp = RREG32(DC_HPD1_INT_CONTROL);
4077 		tmp |= DC_HPDx_INT_ACK;
4078 		WREG32(DC_HPD1_INT_CONTROL, tmp);
4079 	}
4080 	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4081 		tmp = RREG32(DC_HPD2_INT_CONTROL);
4082 		tmp |= DC_HPDx_INT_ACK;
4083 		WREG32(DC_HPD2_INT_CONTROL, tmp);
4084 	}
4085 	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4086 		tmp = RREG32(DC_HPD3_INT_CONTROL);
4087 		tmp |= DC_HPDx_INT_ACK;
4088 		WREG32(DC_HPD3_INT_CONTROL, tmp);
4089 	}
4090 	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4091 		tmp = RREG32(DC_HPD4_INT_CONTROL);
4092 		tmp |= DC_HPDx_INT_ACK;
4093 		WREG32(DC_HPD4_INT_CONTROL, tmp);
4094 	}
4095 	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4096 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4097 		tmp |= DC_HPDx_INT_ACK;
4098 		WREG32(DC_HPD5_INT_CONTROL, tmp);
4099 	}
4100 	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4101 		tmp = RREG32(DC_HPD5_INT_CONTROL);
4102 		tmp |= DC_HPDx_INT_ACK;
4103 		WREG32(DC_HPD6_INT_CONTROL, tmp);
4104 	}
4105 	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4106 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4107 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4108 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4109 	}
4110 	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4111 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4112 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4113 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4114 	}
4115 	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4116 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4117 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4118 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4119 	}
4120 	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4121 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4122 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4123 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4124 	}
4125 	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4126 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4127 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4128 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4129 	}
4130 	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4131 		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4132 		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4133 		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4134 	}
4135 }
4136 
4137 static void evergreen_irq_disable(struct radeon_device *rdev)
4138 {
4139 	r600_disable_interrupts(rdev);
4140 	/* Wait and acknowledge irq */
4141 	mdelay(1);
4142 	evergreen_irq_ack(rdev);
4143 	evergreen_disable_interrupt_state(rdev);
4144 }
4145 
4146 void evergreen_irq_suspend(struct radeon_device *rdev)
4147 {
4148 	evergreen_irq_disable(rdev);
4149 	r600_rlc_stop(rdev);
4150 }
4151 
4152 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4153 {
4154 	u32 wptr, tmp;
4155 
4156 	if (rdev->wb.enabled)
4157 		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4158 	else
4159 		wptr = RREG32(IH_RB_WPTR);
4160 
4161 	if (wptr & RB_OVERFLOW) {
4162 		/* When a ring buffer overflow happen start parsing interrupt
4163 		 * from the last not overwritten vector (wptr + 16). Hopefully
4164 		 * this should allow us to catchup.
4165 		 */
4166 		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4167 			wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4168 		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4169 		tmp = RREG32(IH_RB_CNTL);
4170 		tmp |= IH_WPTR_OVERFLOW_CLEAR;
4171 		WREG32(IH_RB_CNTL, tmp);
4172 	}
4173 	return (wptr & rdev->ih.ptr_mask);
4174 }
4175 
4176 int evergreen_irq_process(struct radeon_device *rdev)
4177 {
4178 	u32 wptr;
4179 	u32 rptr;
4180 	u32 src_id, src_data;
4181 	u32 ring_index;
4182 	bool queue_hotplug = false;
4183 	bool queue_hdmi = false;
4184 
4185 	if (!rdev->ih.enabled || rdev->shutdown)
4186 		return IRQ_NONE;
4187 
4188 	wptr = evergreen_get_ih_wptr(rdev);
4189 
4190 restart_ih:
4191 	/* is somebody else already processing irqs? */
4192 	if (atomic_xchg(&rdev->ih.lock, 1))
4193 		return IRQ_NONE;
4194 
4195 	rptr = rdev->ih.rptr;
4196 	DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4197 
4198 	/* Order reading of wptr vs. reading of IH ring data */
4199 	rmb();
4200 
4201 	/* display interrupts */
4202 	evergreen_irq_ack(rdev);
4203 
4204 	while (rptr != wptr) {
4205 		/* wptr/rptr are in bytes! */
4206 		ring_index = rptr / 4;
4207 		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4208 		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4209 
4210 		switch (src_id) {
4211 		case 1: /* D1 vblank/vline */
4212 			switch (src_data) {
4213 			case 0: /* D1 vblank */
4214 				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4215 					if (rdev->irq.crtc_vblank_int[0]) {
4216 						drm_handle_vblank(rdev->ddev, 0);
4217 						rdev->pm.vblank_sync = true;
4218 						wake_up(&rdev->irq.vblank_queue);
4219 					}
4220 					if (atomic_read(&rdev->irq.pflip[0]))
4221 						radeon_crtc_handle_flip(rdev, 0);
4222 					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4223 					DRM_DEBUG("IH: D1 vblank\n");
4224 				}
4225 				break;
4226 			case 1: /* D1 vline */
4227 				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4228 					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4229 					DRM_DEBUG("IH: D1 vline\n");
4230 				}
4231 				break;
4232 			default:
4233 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4234 				break;
4235 			}
4236 			break;
4237 		case 2: /* D2 vblank/vline */
4238 			switch (src_data) {
4239 			case 0: /* D2 vblank */
4240 				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4241 					if (rdev->irq.crtc_vblank_int[1]) {
4242 						drm_handle_vblank(rdev->ddev, 1);
4243 						rdev->pm.vblank_sync = true;
4244 						wake_up(&rdev->irq.vblank_queue);
4245 					}
4246 					if (atomic_read(&rdev->irq.pflip[1]))
4247 						radeon_crtc_handle_flip(rdev, 1);
4248 					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4249 					DRM_DEBUG("IH: D2 vblank\n");
4250 				}
4251 				break;
4252 			case 1: /* D2 vline */
4253 				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4254 					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4255 					DRM_DEBUG("IH: D2 vline\n");
4256 				}
4257 				break;
4258 			default:
4259 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4260 				break;
4261 			}
4262 			break;
4263 		case 3: /* D3 vblank/vline */
4264 			switch (src_data) {
4265 			case 0: /* D3 vblank */
4266 				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4267 					if (rdev->irq.crtc_vblank_int[2]) {
4268 						drm_handle_vblank(rdev->ddev, 2);
4269 						rdev->pm.vblank_sync = true;
4270 						wake_up(&rdev->irq.vblank_queue);
4271 					}
4272 					if (atomic_read(&rdev->irq.pflip[2]))
4273 						radeon_crtc_handle_flip(rdev, 2);
4274 					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4275 					DRM_DEBUG("IH: D3 vblank\n");
4276 				}
4277 				break;
4278 			case 1: /* D3 vline */
4279 				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4280 					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4281 					DRM_DEBUG("IH: D3 vline\n");
4282 				}
4283 				break;
4284 			default:
4285 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4286 				break;
4287 			}
4288 			break;
4289 		case 4: /* D4 vblank/vline */
4290 			switch (src_data) {
4291 			case 0: /* D4 vblank */
4292 				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4293 					if (rdev->irq.crtc_vblank_int[3]) {
4294 						drm_handle_vblank(rdev->ddev, 3);
4295 						rdev->pm.vblank_sync = true;
4296 						wake_up(&rdev->irq.vblank_queue);
4297 					}
4298 					if (atomic_read(&rdev->irq.pflip[3]))
4299 						radeon_crtc_handle_flip(rdev, 3);
4300 					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4301 					DRM_DEBUG("IH: D4 vblank\n");
4302 				}
4303 				break;
4304 			case 1: /* D4 vline */
4305 				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4306 					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4307 					DRM_DEBUG("IH: D4 vline\n");
4308 				}
4309 				break;
4310 			default:
4311 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4312 				break;
4313 			}
4314 			break;
4315 		case 5: /* D5 vblank/vline */
4316 			switch (src_data) {
4317 			case 0: /* D5 vblank */
4318 				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4319 					if (rdev->irq.crtc_vblank_int[4]) {
4320 						drm_handle_vblank(rdev->ddev, 4);
4321 						rdev->pm.vblank_sync = true;
4322 						wake_up(&rdev->irq.vblank_queue);
4323 					}
4324 					if (atomic_read(&rdev->irq.pflip[4]))
4325 						radeon_crtc_handle_flip(rdev, 4);
4326 					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4327 					DRM_DEBUG("IH: D5 vblank\n");
4328 				}
4329 				break;
4330 			case 1: /* D5 vline */
4331 				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4332 					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4333 					DRM_DEBUG("IH: D5 vline\n");
4334 				}
4335 				break;
4336 			default:
4337 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4338 				break;
4339 			}
4340 			break;
4341 		case 6: /* D6 vblank/vline */
4342 			switch (src_data) {
4343 			case 0: /* D6 vblank */
4344 				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4345 					if (rdev->irq.crtc_vblank_int[5]) {
4346 						drm_handle_vblank(rdev->ddev, 5);
4347 						rdev->pm.vblank_sync = true;
4348 						wake_up(&rdev->irq.vblank_queue);
4349 					}
4350 					if (atomic_read(&rdev->irq.pflip[5]))
4351 						radeon_crtc_handle_flip(rdev, 5);
4352 					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4353 					DRM_DEBUG("IH: D6 vblank\n");
4354 				}
4355 				break;
4356 			case 1: /* D6 vline */
4357 				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4358 					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4359 					DRM_DEBUG("IH: D6 vline\n");
4360 				}
4361 				break;
4362 			default:
4363 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4364 				break;
4365 			}
4366 			break;
4367 		case 42: /* HPD hotplug */
4368 			switch (src_data) {
4369 			case 0:
4370 				if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4371 					rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4372 					queue_hotplug = true;
4373 					DRM_DEBUG("IH: HPD1\n");
4374 				}
4375 				break;
4376 			case 1:
4377 				if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4378 					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4379 					queue_hotplug = true;
4380 					DRM_DEBUG("IH: HPD2\n");
4381 				}
4382 				break;
4383 			case 2:
4384 				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4385 					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4386 					queue_hotplug = true;
4387 					DRM_DEBUG("IH: HPD3\n");
4388 				}
4389 				break;
4390 			case 3:
4391 				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4392 					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4393 					queue_hotplug = true;
4394 					DRM_DEBUG("IH: HPD4\n");
4395 				}
4396 				break;
4397 			case 4:
4398 				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4399 					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
4400 					queue_hotplug = true;
4401 					DRM_DEBUG("IH: HPD5\n");
4402 				}
4403 				break;
4404 			case 5:
4405 				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4406 					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
4407 					queue_hotplug = true;
4408 					DRM_DEBUG("IH: HPD6\n");
4409 				}
4410 				break;
4411 			default:
4412 				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4413 				break;
4414 			}
4415 			break;
4416 		case 44: /* hdmi */
4417 			switch (src_data) {
4418 			case 0:
4419 				if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4420 					rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
4421 					queue_hdmi = true;
4422 					DRM_DEBUG("IH: HDMI0\n");
4423 				}
4424 				break;
4425 			case 1:
4426 				if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4427 					rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
4428 					queue_hdmi = true;
4429 					DRM_DEBUG("IH: HDMI1\n");
4430 				}
4431 				break;
4432 			case 2:
4433 				if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4434 					rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
4435 					queue_hdmi = true;
4436 					DRM_DEBUG("IH: HDMI2\n");
4437 				}
4438 				break;
4439 			case 3:
4440 				if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4441 					rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
4442 					queue_hdmi = true;
4443 					DRM_DEBUG("IH: HDMI3\n");
4444 				}
4445 				break;
4446 			case 4:
4447 				if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4448 					rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
4449 					queue_hdmi = true;
4450 					DRM_DEBUG("IH: HDMI4\n");
4451 				}
4452 				break;
4453 			case 5:
4454 				if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4455 					rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
4456 					queue_hdmi = true;
4457 					DRM_DEBUG("IH: HDMI5\n");
4458 				}
4459 				break;
4460 			default:
4461 				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
4462 				break;
4463 			}
4464 		case 124: /* UVD */
4465 			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
4466 			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
4467 			break;
4468 		case 146:
4469 		case 147:
4470 			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
4471 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
4472 				RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
4473 			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
4474 				RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
4475 			/* reset addr and status */
4476 			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
4477 			break;
4478 		case 176: /* CP_INT in ring buffer */
4479 		case 177: /* CP_INT in IB1 */
4480 		case 178: /* CP_INT in IB2 */
4481 			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
4482 			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4483 			break;
4484 		case 181: /* CP EOP event */
4485 			DRM_DEBUG("IH: CP EOP\n");
4486 			if (rdev->family >= CHIP_CAYMAN) {
4487 				switch (src_data) {
4488 				case 0:
4489 					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4490 					break;
4491 				case 1:
4492 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
4493 					break;
4494 				case 2:
4495 					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
4496 					break;
4497 				}
4498 			} else
4499 				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
4500 			break;
4501 		case 224: /* DMA trap event */
4502 			DRM_DEBUG("IH: DMA trap\n");
4503 			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
4504 			break;
4505 		case 233: /* GUI IDLE */
4506 			DRM_DEBUG("IH: GUI idle\n");
4507 			break;
4508 		case 244: /* DMA trap event */
4509 			if (rdev->family >= CHIP_CAYMAN) {
4510 				DRM_DEBUG("IH: DMA1 trap\n");
4511 				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
4512 			}
4513 			break;
4514 		default:
4515 			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4516 			break;
4517 		}
4518 
4519 		/* wptr/rptr are in bytes! */
4520 		rptr += 16;
4521 		rptr &= rdev->ih.ptr_mask;
4522 	}
4523 	if (queue_hotplug)
4524 		schedule_work(&rdev->hotplug_work);
4525 	if (queue_hdmi)
4526 		schedule_work(&rdev->audio_work);
4527 	rdev->ih.rptr = rptr;
4528 	WREG32(IH_RB_RPTR, rdev->ih.rptr);
4529 	atomic_set(&rdev->ih.lock, 0);
4530 
4531 	/* make sure wptr hasn't changed while processing */
4532 	wptr = evergreen_get_ih_wptr(rdev);
4533 	if (wptr != rptr)
4534 		goto restart_ih;
4535 
4536 	return IRQ_HANDLED;
4537 }
4538 
4539 /**
4540  * evergreen_dma_fence_ring_emit - emit a fence on the DMA ring
4541  *
4542  * @rdev: radeon_device pointer
4543  * @fence: radeon fence object
4544  *
4545  * Add a DMA fence packet to the ring to write
4546  * the fence seq number and DMA trap packet to generate
4547  * an interrupt if needed (evergreen-SI).
4548  */
4549 void evergreen_dma_fence_ring_emit(struct radeon_device *rdev,
4550 				   struct radeon_fence *fence)
4551 {
4552 	struct radeon_ring *ring = &rdev->ring[fence->ring];
4553 	u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
4554 	/* write the fence */
4555 	radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_FENCE, 0, 0));
4556 	radeon_ring_write(ring, addr & 0xfffffffc);
4557 	radeon_ring_write(ring, (upper_32_bits(addr) & 0xff));
4558 	radeon_ring_write(ring, fence->seq);
4559 	/* generate an interrupt */
4560 	radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_TRAP, 0, 0));
4561 	/* flush HDP */
4562 	radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_SRBM_WRITE, 0, 0));
4563 	radeon_ring_write(ring, (0xf << 16) | (HDP_MEM_COHERENCY_FLUSH_CNTL >> 2));
4564 	radeon_ring_write(ring, 1);
4565 }
4566 
4567 /**
4568  * evergreen_dma_ring_ib_execute - schedule an IB on the DMA engine
4569  *
4570  * @rdev: radeon_device pointer
4571  * @ib: IB object to schedule
4572  *
4573  * Schedule an IB in the DMA ring (evergreen).
4574  */
4575 void evergreen_dma_ring_ib_execute(struct radeon_device *rdev,
4576 				   struct radeon_ib *ib)
4577 {
4578 	struct radeon_ring *ring = &rdev->ring[ib->ring];
4579 
4580 	if (rdev->wb.enabled) {
4581 		u32 next_rptr = ring->wptr + 4;
4582 		while ((next_rptr & 7) != 5)
4583 			next_rptr++;
4584 		next_rptr += 3;
4585 		radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_WRITE, 0, 1));
4586 		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
4587 		radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xff);
4588 		radeon_ring_write(ring, next_rptr);
4589 	}
4590 
4591 	/* The indirect buffer packet must end on an 8 DW boundary in the DMA ring.
4592 	 * Pad as necessary with NOPs.
4593 	 */
4594 	while ((ring->wptr & 7) != 5)
4595 		radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
4596 	radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_INDIRECT_BUFFER, 0, 0));
4597 	radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0));
4598 	radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF));
4599 
4600 }
4601 
4602 /**
4603  * evergreen_copy_dma - copy pages using the DMA engine
4604  *
4605  * @rdev: radeon_device pointer
4606  * @src_offset: src GPU address
4607  * @dst_offset: dst GPU address
4608  * @num_gpu_pages: number of GPU pages to xfer
4609  * @fence: radeon fence object
4610  *
4611  * Copy GPU paging using the DMA engine (evergreen-cayman).
4612  * Used by the radeon ttm implementation to move pages if
4613  * registered as the asic copy callback.
4614  */
4615 int evergreen_copy_dma(struct radeon_device *rdev,
4616 		       uint64_t src_offset, uint64_t dst_offset,
4617 		       unsigned num_gpu_pages,
4618 		       struct radeon_fence **fence)
4619 {
4620 	struct radeon_semaphore *sem = NULL;
4621 	int ring_index = rdev->asic->copy.dma_ring_index;
4622 	struct radeon_ring *ring = &rdev->ring[ring_index];
4623 	u32 size_in_dw, cur_size_in_dw;
4624 	int i, num_loops;
4625 	int r = 0;
4626 
4627 	r = radeon_semaphore_create(rdev, &sem);
4628 	if (r) {
4629 		DRM_ERROR("radeon: moving bo (%d).\n", r);
4630 		return r;
4631 	}
4632 
4633 	size_in_dw = (num_gpu_pages << RADEON_GPU_PAGE_SHIFT) / 4;
4634 	num_loops = DIV_ROUND_UP(size_in_dw, 0xfffff);
4635 	r = radeon_ring_lock(rdev, ring, num_loops * 5 + 11);
4636 	if (r) {
4637 		DRM_ERROR("radeon: moving bo (%d).\n", r);
4638 		radeon_semaphore_free(rdev, &sem, NULL);
4639 		return r;
4640 	}
4641 
4642 	if (radeon_fence_need_sync(*fence, ring->idx)) {
4643 		radeon_semaphore_sync_rings(rdev, sem, (*fence)->ring,
4644 					    ring->idx);
4645 		radeon_fence_note_sync(*fence, ring->idx);
4646 	} else {
4647 		radeon_semaphore_free(rdev, &sem, NULL);
4648 	}
4649 
4650 	for (i = 0; i < num_loops; i++) {
4651 		cur_size_in_dw = size_in_dw;
4652 		if (cur_size_in_dw > 0xFFFFF)
4653 			cur_size_in_dw = 0xFFFFF;
4654 		size_in_dw -= cur_size_in_dw;
4655 		radeon_ring_write(ring, DMA_PACKET(DMA_PACKET_COPY, 0, cur_size_in_dw));
4656 		radeon_ring_write(ring, dst_offset & 0xfffffffc);
4657 		radeon_ring_write(ring, src_offset & 0xfffffffc);
4658 		radeon_ring_write(ring, upper_32_bits(dst_offset) & 0xff);
4659 		radeon_ring_write(ring, upper_32_bits(src_offset) & 0xff);
4660 		src_offset += cur_size_in_dw * 4;
4661 		dst_offset += cur_size_in_dw * 4;
4662 	}
4663 
4664 	r = radeon_fence_emit(rdev, fence, ring->idx);
4665 	if (r) {
4666 		radeon_ring_unlock_undo(rdev, ring);
4667 		return r;
4668 	}
4669 
4670 	radeon_ring_unlock_commit(rdev, ring);
4671 	radeon_semaphore_free(rdev, &sem, *fence);
4672 
4673 	return r;
4674 }
4675 
4676 static int evergreen_startup(struct radeon_device *rdev)
4677 {
4678 	struct radeon_ring *ring;
4679 	int r;
4680 
4681 	/* enable pcie gen2 link */
4682 	evergreen_pcie_gen2_enable(rdev);
4683 
4684 	if (ASIC_IS_DCE5(rdev)) {
4685 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
4686 			r = ni_init_microcode(rdev);
4687 			if (r) {
4688 				DRM_ERROR("Failed to load firmware!\n");
4689 				return r;
4690 			}
4691 		}
4692 		r = ni_mc_load_microcode(rdev);
4693 		if (r) {
4694 			DRM_ERROR("Failed to load MC firmware!\n");
4695 			return r;
4696 		}
4697 	} else {
4698 		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
4699 			r = r600_init_microcode(rdev);
4700 			if (r) {
4701 				DRM_ERROR("Failed to load firmware!\n");
4702 				return r;
4703 			}
4704 		}
4705 	}
4706 
4707 	r = r600_vram_scratch_init(rdev);
4708 	if (r)
4709 		return r;
4710 
4711 	evergreen_mc_program(rdev);
4712 	if (rdev->flags & RADEON_IS_AGP) {
4713 		evergreen_agp_enable(rdev);
4714 	} else {
4715 		r = evergreen_pcie_gart_enable(rdev);
4716 		if (r)
4717 			return r;
4718 	}
4719 	evergreen_gpu_init(rdev);
4720 
4721 	r = evergreen_blit_init(rdev);
4722 	if (r) {
4723 		r600_blit_fini(rdev);
4724 		rdev->asic->copy.copy = NULL;
4725 		dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
4726 	}
4727 
4728 	/* allocate wb buffer */
4729 	r = radeon_wb_init(rdev);
4730 	if (r)
4731 		return r;
4732 
4733 	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
4734 	if (r) {
4735 		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
4736 		return r;
4737 	}
4738 
4739 	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
4740 	if (r) {
4741 		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
4742 		return r;
4743 	}
4744 
4745 	r = rv770_uvd_resume(rdev);
4746 	if (!r) {
4747 		r = radeon_fence_driver_start_ring(rdev,
4748 						   R600_RING_TYPE_UVD_INDEX);
4749 		if (r)
4750 			dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
4751 	}
4752 
4753 	if (r)
4754 		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
4755 
4756 	/* Enable IRQ */
4757 	if (!rdev->irq.installed) {
4758 		r = radeon_irq_kms_init(rdev);
4759 		if (r)
4760 			return r;
4761 	}
4762 
4763 	r = r600_irq_init(rdev);
4764 	if (r) {
4765 		DRM_ERROR("radeon: IH init failed (%d).\n", r);
4766 		radeon_irq_kms_fini(rdev);
4767 		return r;
4768 	}
4769 	evergreen_irq_set(rdev);
4770 
4771 	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
4772 	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
4773 			     R600_CP_RB_RPTR, R600_CP_RB_WPTR,
4774 			     0, 0xfffff, RADEON_CP_PACKET2);
4775 	if (r)
4776 		return r;
4777 
4778 	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
4779 	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
4780 			     DMA_RB_RPTR, DMA_RB_WPTR,
4781 			     2, 0x3fffc, DMA_PACKET(DMA_PACKET_NOP, 0, 0));
4782 	if (r)
4783 		return r;
4784 
4785 	r = evergreen_cp_load_microcode(rdev);
4786 	if (r)
4787 		return r;
4788 	r = evergreen_cp_resume(rdev);
4789 	if (r)
4790 		return r;
4791 	r = r600_dma_resume(rdev);
4792 	if (r)
4793 		return r;
4794 
4795 	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
4796 	if (ring->ring_size) {
4797 		r = radeon_ring_init(rdev, ring, ring->ring_size,
4798 				     R600_WB_UVD_RPTR_OFFSET,
4799 				     UVD_RBC_RB_RPTR, UVD_RBC_RB_WPTR,
4800 				     0, 0xfffff, RADEON_CP_PACKET2);
4801 		if (!r)
4802 			r = r600_uvd_init(rdev);
4803 
4804 		if (r)
4805 			DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
4806 	}
4807 
4808 	r = radeon_ib_pool_init(rdev);
4809 	if (r) {
4810 		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
4811 		return r;
4812 	}
4813 
4814 	r = r600_audio_init(rdev);
4815 	if (r) {
4816 		DRM_ERROR("radeon: audio init failed\n");
4817 		return r;
4818 	}
4819 
4820 	return 0;
4821 }
4822 
4823 int evergreen_resume(struct radeon_device *rdev)
4824 {
4825 	int r;
4826 
4827 	/* reset the asic, the gfx blocks are often in a bad state
4828 	 * after the driver is unloaded or after a resume
4829 	 */
4830 	if (radeon_asic_reset(rdev))
4831 		dev_warn(rdev->dev, "GPU reset failed !\n");
4832 	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
4833 	 * posting will perform necessary task to bring back GPU into good
4834 	 * shape.
4835 	 */
4836 	/* post card */
4837 	atom_asic_init(rdev->mode_info.atom_context);
4838 
4839 	/* init golden registers */
4840 	evergreen_init_golden_registers(rdev);
4841 
4842 	rdev->accel_working = true;
4843 	r = evergreen_startup(rdev);
4844 	if (r) {
4845 		DRM_ERROR("evergreen startup failed on resume\n");
4846 		rdev->accel_working = false;
4847 		return r;
4848 	}
4849 
4850 	return r;
4851 
4852 }
4853 
4854 int evergreen_suspend(struct radeon_device *rdev)
4855 {
4856 	r600_audio_fini(rdev);
4857 	radeon_uvd_suspend(rdev);
4858 	r700_cp_stop(rdev);
4859 	r600_dma_stop(rdev);
4860 	r600_uvd_rbc_stop(rdev);
4861 	evergreen_irq_suspend(rdev);
4862 	radeon_wb_disable(rdev);
4863 	evergreen_pcie_gart_disable(rdev);
4864 
4865 	return 0;
4866 }
4867 
4868 /* Plan is to move initialization in that function and use
4869  * helper function so that radeon_device_init pretty much
4870  * do nothing more than calling asic specific function. This
4871  * should also allow to remove a bunch of callback function
4872  * like vram_info.
4873  */
4874 int evergreen_init(struct radeon_device *rdev)
4875 {
4876 	int r;
4877 
4878 	/* Read BIOS */
4879 	if (!radeon_get_bios(rdev)) {
4880 		if (ASIC_IS_AVIVO(rdev))
4881 			return -EINVAL;
4882 	}
4883 	/* Must be an ATOMBIOS */
4884 	if (!rdev->is_atom_bios) {
4885 		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
4886 		return -EINVAL;
4887 	}
4888 	r = radeon_atombios_init(rdev);
4889 	if (r)
4890 		return r;
4891 	/* reset the asic, the gfx blocks are often in a bad state
4892 	 * after the driver is unloaded or after a resume
4893 	 */
4894 	if (radeon_asic_reset(rdev))
4895 		dev_warn(rdev->dev, "GPU reset failed !\n");
4896 	/* Post card if necessary */
4897 	if (!radeon_card_posted(rdev)) {
4898 		if (!rdev->bios) {
4899 			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
4900 			return -EINVAL;
4901 		}
4902 		DRM_INFO("GPU not posted. posting now...\n");
4903 		atom_asic_init(rdev->mode_info.atom_context);
4904 	}
4905 	/* init golden registers */
4906 	evergreen_init_golden_registers(rdev);
4907 	/* Initialize scratch registers */
4908 	r600_scratch_init(rdev);
4909 	/* Initialize surface registers */
4910 	radeon_surface_init(rdev);
4911 	/* Initialize clocks */
4912 	radeon_get_clock_info(rdev->ddev);
4913 	/* Fence driver */
4914 	r = radeon_fence_driver_init(rdev);
4915 	if (r)
4916 		return r;
4917 	/* initialize AGP */
4918 	if (rdev->flags & RADEON_IS_AGP) {
4919 		r = radeon_agp_init(rdev);
4920 		if (r)
4921 			radeon_agp_disable(rdev);
4922 	}
4923 	/* initialize memory controller */
4924 	r = evergreen_mc_init(rdev);
4925 	if (r)
4926 		return r;
4927 	/* Memory manager */
4928 	r = radeon_bo_init(rdev);
4929 	if (r)
4930 		return r;
4931 
4932 	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
4933 	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
4934 
4935 	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
4936 	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
4937 
4938 	r = radeon_uvd_init(rdev);
4939 	if (!r) {
4940 		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
4941 		r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
4942 			       4096);
4943 	}
4944 
4945 	rdev->ih.ring_obj = NULL;
4946 	r600_ih_ring_init(rdev, 64 * 1024);
4947 
4948 	r = r600_pcie_gart_init(rdev);
4949 	if (r)
4950 		return r;
4951 
4952 	rdev->accel_working = true;
4953 	r = evergreen_startup(rdev);
4954 	if (r) {
4955 		dev_err(rdev->dev, "disabling GPU acceleration\n");
4956 		r700_cp_fini(rdev);
4957 		r600_dma_fini(rdev);
4958 		r600_irq_fini(rdev);
4959 		radeon_wb_fini(rdev);
4960 		radeon_ib_pool_fini(rdev);
4961 		radeon_irq_kms_fini(rdev);
4962 		evergreen_pcie_gart_fini(rdev);
4963 		rdev->accel_working = false;
4964 	}
4965 
4966 	/* Don't start up if the MC ucode is missing on BTC parts.
4967 	 * The default clocks and voltages before the MC ucode
4968 	 * is loaded are not suffient for advanced operations.
4969 	 */
4970 	if (ASIC_IS_DCE5(rdev)) {
4971 		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
4972 			DRM_ERROR("radeon: MC ucode required for NI+.\n");
4973 			return -EINVAL;
4974 		}
4975 	}
4976 
4977 	return 0;
4978 }
4979 
4980 void evergreen_fini(struct radeon_device *rdev)
4981 {
4982 	r600_audio_fini(rdev);
4983 	r600_blit_fini(rdev);
4984 	r700_cp_fini(rdev);
4985 	r600_dma_fini(rdev);
4986 	r600_irq_fini(rdev);
4987 	radeon_wb_fini(rdev);
4988 	radeon_ib_pool_fini(rdev);
4989 	radeon_irq_kms_fini(rdev);
4990 	evergreen_pcie_gart_fini(rdev);
4991 	radeon_uvd_fini(rdev);
4992 	r600_vram_scratch_fini(rdev);
4993 	radeon_gem_fini(rdev);
4994 	radeon_fence_driver_fini(rdev);
4995 	radeon_agp_fini(rdev);
4996 	radeon_bo_fini(rdev);
4997 	radeon_atombios_fini(rdev);
4998 	kfree(rdev->bios);
4999 	rdev->bios = NULL;
5000 }
5001 
5002 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5003 {
5004 	u32 link_width_cntl, speed_cntl;
5005 
5006 	if (radeon_pcie_gen2 == 0)
5007 		return;
5008 
5009 	if (rdev->flags & RADEON_IS_IGP)
5010 		return;
5011 
5012 	if (!(rdev->flags & RADEON_IS_PCIE))
5013 		return;
5014 
5015 	/* x2 cards have a special sequence */
5016 	if (ASIC_IS_X2(rdev))
5017 		return;
5018 
5019 	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5020 		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5021 		return;
5022 
5023 	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5024 	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5025 		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5026 		return;
5027 	}
5028 
5029 	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5030 
5031 	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5032 	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5033 
5034 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5035 		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5036 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5037 
5038 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5039 		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5040 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5041 
5042 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5043 		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5044 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5045 
5046 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5047 		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5048 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5049 
5050 		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5051 		speed_cntl |= LC_GEN2_EN_STRAP;
5052 		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5053 
5054 	} else {
5055 		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5056 		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5057 		if (1)
5058 			link_width_cntl |= LC_UPCONFIGURE_DIS;
5059 		else
5060 			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5061 		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5062 	}
5063 }
5064