1 /* 2 * Copyright 2008 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * Copyright 2009 Jerome Glisse. 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a 7 * copy of this software and associated documentation files (the "Software"), 8 * to deal in the Software without restriction, including without limitation 9 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 10 * and/or sell copies of the Software, and to permit persons to whom the 11 * Software is furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 * OTHER DEALINGS IN THE SOFTWARE. 23 * 24 * Authors: Dave Airlie 25 * Alex Deucher 26 * Jerome Glisse 27 */ 28 #ifndef __RADEON_ASIC_H__ 29 #define __RADEON_ASIC_H__ 30 31 /* 32 * common functions 33 */ 34 uint32_t radeon_legacy_get_engine_clock(struct radeon_device *rdev); 35 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); 36 uint32_t radeon_legacy_get_memory_clock(struct radeon_device *rdev); 37 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); 38 39 uint32_t radeon_atom_get_engine_clock(struct radeon_device *rdev); 40 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); 41 uint32_t radeon_atom_get_memory_clock(struct radeon_device *rdev); 42 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock); 43 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); 44 45 /* 46 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 47 */ 48 extern int r100_init(struct radeon_device *rdev); 49 extern void r100_fini(struct radeon_device *rdev); 50 extern int r100_suspend(struct radeon_device *rdev); 51 extern int r100_resume(struct radeon_device *rdev); 52 uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); 53 void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 54 void r100_vga_set_state(struct radeon_device *rdev, bool state); 55 int r100_gpu_reset(struct radeon_device *rdev); 56 u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc); 57 void r100_pci_gart_tlb_flush(struct radeon_device *rdev); 58 int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); 59 void r100_cp_commit(struct radeon_device *rdev); 60 void r100_ring_start(struct radeon_device *rdev); 61 int r100_irq_set(struct radeon_device *rdev); 62 int r100_irq_process(struct radeon_device *rdev); 63 void r100_fence_ring_emit(struct radeon_device *rdev, 64 struct radeon_fence *fence); 65 int r100_cs_parse(struct radeon_cs_parser *p); 66 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 67 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg); 68 int r100_copy_blit(struct radeon_device *rdev, 69 uint64_t src_offset, 70 uint64_t dst_offset, 71 unsigned num_pages, 72 struct radeon_fence *fence); 73 int r100_set_surface_reg(struct radeon_device *rdev, int reg, 74 uint32_t tiling_flags, uint32_t pitch, 75 uint32_t offset, uint32_t obj_size); 76 int r100_clear_surface_reg(struct radeon_device *rdev, int reg); 77 void r100_bandwidth_update(struct radeon_device *rdev); 78 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 79 int r100_ring_test(struct radeon_device *rdev); 80 void r100_hpd_init(struct radeon_device *rdev); 81 void r100_hpd_fini(struct radeon_device *rdev); 82 bool r100_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); 83 void r100_hpd_set_polarity(struct radeon_device *rdev, 84 enum radeon_hpd_id hpd); 85 86 static struct radeon_asic r100_asic = { 87 .init = &r100_init, 88 .fini = &r100_fini, 89 .suspend = &r100_suspend, 90 .resume = &r100_resume, 91 .vga_set_state = &r100_vga_set_state, 92 .gpu_reset = &r100_gpu_reset, 93 .gart_tlb_flush = &r100_pci_gart_tlb_flush, 94 .gart_set_page = &r100_pci_gart_set_page, 95 .cp_commit = &r100_cp_commit, 96 .ring_start = &r100_ring_start, 97 .ring_test = &r100_ring_test, 98 .ring_ib_execute = &r100_ring_ib_execute, 99 .irq_set = &r100_irq_set, 100 .irq_process = &r100_irq_process, 101 .get_vblank_counter = &r100_get_vblank_counter, 102 .fence_ring_emit = &r100_fence_ring_emit, 103 .cs_parse = &r100_cs_parse, 104 .copy_blit = &r100_copy_blit, 105 .copy_dma = NULL, 106 .copy = &r100_copy_blit, 107 .get_engine_clock = &radeon_legacy_get_engine_clock, 108 .set_engine_clock = &radeon_legacy_set_engine_clock, 109 .get_memory_clock = &radeon_legacy_get_memory_clock, 110 .set_memory_clock = NULL, 111 .set_pcie_lanes = NULL, 112 .set_clock_gating = &radeon_legacy_set_clock_gating, 113 .set_surface_reg = r100_set_surface_reg, 114 .clear_surface_reg = r100_clear_surface_reg, 115 .bandwidth_update = &r100_bandwidth_update, 116 .hpd_init = &r100_hpd_init, 117 .hpd_fini = &r100_hpd_fini, 118 .hpd_sense = &r100_hpd_sense, 119 .hpd_set_polarity = &r100_hpd_set_polarity, 120 }; 121 122 123 /* 124 * r300,r350,rv350,rv380 125 */ 126 extern int r300_init(struct radeon_device *rdev); 127 extern void r300_fini(struct radeon_device *rdev); 128 extern int r300_suspend(struct radeon_device *rdev); 129 extern int r300_resume(struct radeon_device *rdev); 130 extern int r300_gpu_reset(struct radeon_device *rdev); 131 extern void r300_ring_start(struct radeon_device *rdev); 132 extern void r300_fence_ring_emit(struct radeon_device *rdev, 133 struct radeon_fence *fence); 134 extern int r300_cs_parse(struct radeon_cs_parser *p); 135 extern void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev); 136 extern int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); 137 extern uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg); 138 extern void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 139 extern void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); 140 extern int r300_copy_dma(struct radeon_device *rdev, 141 uint64_t src_offset, 142 uint64_t dst_offset, 143 unsigned num_pages, 144 struct radeon_fence *fence); 145 static struct radeon_asic r300_asic = { 146 .init = &r300_init, 147 .fini = &r300_fini, 148 .suspend = &r300_suspend, 149 .resume = &r300_resume, 150 .vga_set_state = &r100_vga_set_state, 151 .gpu_reset = &r300_gpu_reset, 152 .gart_tlb_flush = &r100_pci_gart_tlb_flush, 153 .gart_set_page = &r100_pci_gart_set_page, 154 .cp_commit = &r100_cp_commit, 155 .ring_start = &r300_ring_start, 156 .ring_test = &r100_ring_test, 157 .ring_ib_execute = &r100_ring_ib_execute, 158 .irq_set = &r100_irq_set, 159 .irq_process = &r100_irq_process, 160 .get_vblank_counter = &r100_get_vblank_counter, 161 .fence_ring_emit = &r300_fence_ring_emit, 162 .cs_parse = &r300_cs_parse, 163 .copy_blit = &r100_copy_blit, 164 .copy_dma = &r300_copy_dma, 165 .copy = &r100_copy_blit, 166 .get_engine_clock = &radeon_legacy_get_engine_clock, 167 .set_engine_clock = &radeon_legacy_set_engine_clock, 168 .get_memory_clock = &radeon_legacy_get_memory_clock, 169 .set_memory_clock = NULL, 170 .set_pcie_lanes = &rv370_set_pcie_lanes, 171 .set_clock_gating = &radeon_legacy_set_clock_gating, 172 .set_surface_reg = r100_set_surface_reg, 173 .clear_surface_reg = r100_clear_surface_reg, 174 .bandwidth_update = &r100_bandwidth_update, 175 .hpd_init = &r100_hpd_init, 176 .hpd_fini = &r100_hpd_fini, 177 .hpd_sense = &r100_hpd_sense, 178 .hpd_set_polarity = &r100_hpd_set_polarity, 179 }; 180 181 /* 182 * r420,r423,rv410 183 */ 184 extern int r420_init(struct radeon_device *rdev); 185 extern void r420_fini(struct radeon_device *rdev); 186 extern int r420_suspend(struct radeon_device *rdev); 187 extern int r420_resume(struct radeon_device *rdev); 188 static struct radeon_asic r420_asic = { 189 .init = &r420_init, 190 .fini = &r420_fini, 191 .suspend = &r420_suspend, 192 .resume = &r420_resume, 193 .vga_set_state = &r100_vga_set_state, 194 .gpu_reset = &r300_gpu_reset, 195 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, 196 .gart_set_page = &rv370_pcie_gart_set_page, 197 .cp_commit = &r100_cp_commit, 198 .ring_start = &r300_ring_start, 199 .ring_test = &r100_ring_test, 200 .ring_ib_execute = &r100_ring_ib_execute, 201 .irq_set = &r100_irq_set, 202 .irq_process = &r100_irq_process, 203 .get_vblank_counter = &r100_get_vblank_counter, 204 .fence_ring_emit = &r300_fence_ring_emit, 205 .cs_parse = &r300_cs_parse, 206 .copy_blit = &r100_copy_blit, 207 .copy_dma = &r300_copy_dma, 208 .copy = &r100_copy_blit, 209 .get_engine_clock = &radeon_atom_get_engine_clock, 210 .set_engine_clock = &radeon_atom_set_engine_clock, 211 .get_memory_clock = &radeon_atom_get_memory_clock, 212 .set_memory_clock = &radeon_atom_set_memory_clock, 213 .set_pcie_lanes = &rv370_set_pcie_lanes, 214 .set_clock_gating = &radeon_atom_set_clock_gating, 215 .set_surface_reg = r100_set_surface_reg, 216 .clear_surface_reg = r100_clear_surface_reg, 217 .bandwidth_update = &r100_bandwidth_update, 218 .hpd_init = &r100_hpd_init, 219 .hpd_fini = &r100_hpd_fini, 220 .hpd_sense = &r100_hpd_sense, 221 .hpd_set_polarity = &r100_hpd_set_polarity, 222 }; 223 224 225 /* 226 * rs400,rs480 227 */ 228 extern int rs400_init(struct radeon_device *rdev); 229 extern void rs400_fini(struct radeon_device *rdev); 230 extern int rs400_suspend(struct radeon_device *rdev); 231 extern int rs400_resume(struct radeon_device *rdev); 232 void rs400_gart_tlb_flush(struct radeon_device *rdev); 233 int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); 234 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); 235 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 236 static struct radeon_asic rs400_asic = { 237 .init = &rs400_init, 238 .fini = &rs400_fini, 239 .suspend = &rs400_suspend, 240 .resume = &rs400_resume, 241 .vga_set_state = &r100_vga_set_state, 242 .gpu_reset = &r300_gpu_reset, 243 .gart_tlb_flush = &rs400_gart_tlb_flush, 244 .gart_set_page = &rs400_gart_set_page, 245 .cp_commit = &r100_cp_commit, 246 .ring_start = &r300_ring_start, 247 .ring_test = &r100_ring_test, 248 .ring_ib_execute = &r100_ring_ib_execute, 249 .irq_set = &r100_irq_set, 250 .irq_process = &r100_irq_process, 251 .get_vblank_counter = &r100_get_vblank_counter, 252 .fence_ring_emit = &r300_fence_ring_emit, 253 .cs_parse = &r300_cs_parse, 254 .copy_blit = &r100_copy_blit, 255 .copy_dma = &r300_copy_dma, 256 .copy = &r100_copy_blit, 257 .get_engine_clock = &radeon_legacy_get_engine_clock, 258 .set_engine_clock = &radeon_legacy_set_engine_clock, 259 .get_memory_clock = &radeon_legacy_get_memory_clock, 260 .set_memory_clock = NULL, 261 .set_pcie_lanes = NULL, 262 .set_clock_gating = &radeon_legacy_set_clock_gating, 263 .set_surface_reg = r100_set_surface_reg, 264 .clear_surface_reg = r100_clear_surface_reg, 265 .bandwidth_update = &r100_bandwidth_update, 266 .hpd_init = &r100_hpd_init, 267 .hpd_fini = &r100_hpd_fini, 268 .hpd_sense = &r100_hpd_sense, 269 .hpd_set_polarity = &r100_hpd_set_polarity, 270 }; 271 272 273 /* 274 * rs600. 275 */ 276 extern int rs600_init(struct radeon_device *rdev); 277 extern void rs600_fini(struct radeon_device *rdev); 278 extern int rs600_suspend(struct radeon_device *rdev); 279 extern int rs600_resume(struct radeon_device *rdev); 280 int rs600_irq_set(struct radeon_device *rdev); 281 int rs600_irq_process(struct radeon_device *rdev); 282 u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc); 283 void rs600_gart_tlb_flush(struct radeon_device *rdev); 284 int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); 285 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); 286 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 287 void rs600_bandwidth_update(struct radeon_device *rdev); 288 void rs600_hpd_init(struct radeon_device *rdev); 289 void rs600_hpd_fini(struct radeon_device *rdev); 290 bool rs600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); 291 void rs600_hpd_set_polarity(struct radeon_device *rdev, 292 enum radeon_hpd_id hpd); 293 294 static struct radeon_asic rs600_asic = { 295 .init = &rs600_init, 296 .fini = &rs600_fini, 297 .suspend = &rs600_suspend, 298 .resume = &rs600_resume, 299 .vga_set_state = &r100_vga_set_state, 300 .gpu_reset = &r300_gpu_reset, 301 .gart_tlb_flush = &rs600_gart_tlb_flush, 302 .gart_set_page = &rs600_gart_set_page, 303 .cp_commit = &r100_cp_commit, 304 .ring_start = &r300_ring_start, 305 .ring_test = &r100_ring_test, 306 .ring_ib_execute = &r100_ring_ib_execute, 307 .irq_set = &rs600_irq_set, 308 .irq_process = &rs600_irq_process, 309 .get_vblank_counter = &rs600_get_vblank_counter, 310 .fence_ring_emit = &r300_fence_ring_emit, 311 .cs_parse = &r300_cs_parse, 312 .copy_blit = &r100_copy_blit, 313 .copy_dma = &r300_copy_dma, 314 .copy = &r100_copy_blit, 315 .get_engine_clock = &radeon_atom_get_engine_clock, 316 .set_engine_clock = &radeon_atom_set_engine_clock, 317 .get_memory_clock = &radeon_atom_get_memory_clock, 318 .set_memory_clock = &radeon_atom_set_memory_clock, 319 .set_pcie_lanes = NULL, 320 .set_clock_gating = &radeon_atom_set_clock_gating, 321 .bandwidth_update = &rs600_bandwidth_update, 322 .hpd_init = &rs600_hpd_init, 323 .hpd_fini = &rs600_hpd_fini, 324 .hpd_sense = &rs600_hpd_sense, 325 .hpd_set_polarity = &rs600_hpd_set_polarity, 326 }; 327 328 329 /* 330 * rs690,rs740 331 */ 332 int rs690_init(struct radeon_device *rdev); 333 void rs690_fini(struct radeon_device *rdev); 334 int rs690_resume(struct radeon_device *rdev); 335 int rs690_suspend(struct radeon_device *rdev); 336 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); 337 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 338 void rs690_bandwidth_update(struct radeon_device *rdev); 339 static struct radeon_asic rs690_asic = { 340 .init = &rs690_init, 341 .fini = &rs690_fini, 342 .suspend = &rs690_suspend, 343 .resume = &rs690_resume, 344 .vga_set_state = &r100_vga_set_state, 345 .gpu_reset = &r300_gpu_reset, 346 .gart_tlb_flush = &rs400_gart_tlb_flush, 347 .gart_set_page = &rs400_gart_set_page, 348 .cp_commit = &r100_cp_commit, 349 .ring_start = &r300_ring_start, 350 .ring_test = &r100_ring_test, 351 .ring_ib_execute = &r100_ring_ib_execute, 352 .irq_set = &rs600_irq_set, 353 .irq_process = &rs600_irq_process, 354 .get_vblank_counter = &rs600_get_vblank_counter, 355 .fence_ring_emit = &r300_fence_ring_emit, 356 .cs_parse = &r300_cs_parse, 357 .copy_blit = &r100_copy_blit, 358 .copy_dma = &r300_copy_dma, 359 .copy = &r300_copy_dma, 360 .get_engine_clock = &radeon_atom_get_engine_clock, 361 .set_engine_clock = &radeon_atom_set_engine_clock, 362 .get_memory_clock = &radeon_atom_get_memory_clock, 363 .set_memory_clock = &radeon_atom_set_memory_clock, 364 .set_pcie_lanes = NULL, 365 .set_clock_gating = &radeon_atom_set_clock_gating, 366 .set_surface_reg = r100_set_surface_reg, 367 .clear_surface_reg = r100_clear_surface_reg, 368 .bandwidth_update = &rs690_bandwidth_update, 369 .hpd_init = &rs600_hpd_init, 370 .hpd_fini = &rs600_hpd_fini, 371 .hpd_sense = &rs600_hpd_sense, 372 .hpd_set_polarity = &rs600_hpd_set_polarity, 373 }; 374 375 376 /* 377 * rv515 378 */ 379 int rv515_init(struct radeon_device *rdev); 380 void rv515_fini(struct radeon_device *rdev); 381 int rv515_gpu_reset(struct radeon_device *rdev); 382 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg); 383 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 384 void rv515_ring_start(struct radeon_device *rdev); 385 uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg); 386 void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 387 void rv515_bandwidth_update(struct radeon_device *rdev); 388 int rv515_resume(struct radeon_device *rdev); 389 int rv515_suspend(struct radeon_device *rdev); 390 static struct radeon_asic rv515_asic = { 391 .init = &rv515_init, 392 .fini = &rv515_fini, 393 .suspend = &rv515_suspend, 394 .resume = &rv515_resume, 395 .vga_set_state = &r100_vga_set_state, 396 .gpu_reset = &rv515_gpu_reset, 397 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, 398 .gart_set_page = &rv370_pcie_gart_set_page, 399 .cp_commit = &r100_cp_commit, 400 .ring_start = &rv515_ring_start, 401 .ring_test = &r100_ring_test, 402 .ring_ib_execute = &r100_ring_ib_execute, 403 .irq_set = &rs600_irq_set, 404 .irq_process = &rs600_irq_process, 405 .get_vblank_counter = &rs600_get_vblank_counter, 406 .fence_ring_emit = &r300_fence_ring_emit, 407 .cs_parse = &r300_cs_parse, 408 .copy_blit = &r100_copy_blit, 409 .copy_dma = &r300_copy_dma, 410 .copy = &r100_copy_blit, 411 .get_engine_clock = &radeon_atom_get_engine_clock, 412 .set_engine_clock = &radeon_atom_set_engine_clock, 413 .get_memory_clock = &radeon_atom_get_memory_clock, 414 .set_memory_clock = &radeon_atom_set_memory_clock, 415 .set_pcie_lanes = &rv370_set_pcie_lanes, 416 .set_clock_gating = &radeon_atom_set_clock_gating, 417 .set_surface_reg = r100_set_surface_reg, 418 .clear_surface_reg = r100_clear_surface_reg, 419 .bandwidth_update = &rv515_bandwidth_update, 420 .hpd_init = &rs600_hpd_init, 421 .hpd_fini = &rs600_hpd_fini, 422 .hpd_sense = &rs600_hpd_sense, 423 .hpd_set_polarity = &rs600_hpd_set_polarity, 424 }; 425 426 427 /* 428 * r520,rv530,rv560,rv570,r580 429 */ 430 int r520_init(struct radeon_device *rdev); 431 int r520_resume(struct radeon_device *rdev); 432 static struct radeon_asic r520_asic = { 433 .init = &r520_init, 434 .fini = &rv515_fini, 435 .suspend = &rv515_suspend, 436 .resume = &r520_resume, 437 .vga_set_state = &r100_vga_set_state, 438 .gpu_reset = &rv515_gpu_reset, 439 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, 440 .gart_set_page = &rv370_pcie_gart_set_page, 441 .cp_commit = &r100_cp_commit, 442 .ring_start = &rv515_ring_start, 443 .ring_test = &r100_ring_test, 444 .ring_ib_execute = &r100_ring_ib_execute, 445 .irq_set = &rs600_irq_set, 446 .irq_process = &rs600_irq_process, 447 .get_vblank_counter = &rs600_get_vblank_counter, 448 .fence_ring_emit = &r300_fence_ring_emit, 449 .cs_parse = &r300_cs_parse, 450 .copy_blit = &r100_copy_blit, 451 .copy_dma = &r300_copy_dma, 452 .copy = &r100_copy_blit, 453 .get_engine_clock = &radeon_atom_get_engine_clock, 454 .set_engine_clock = &radeon_atom_set_engine_clock, 455 .get_memory_clock = &radeon_atom_get_memory_clock, 456 .set_memory_clock = &radeon_atom_set_memory_clock, 457 .set_pcie_lanes = &rv370_set_pcie_lanes, 458 .set_clock_gating = &radeon_atom_set_clock_gating, 459 .set_surface_reg = r100_set_surface_reg, 460 .clear_surface_reg = r100_clear_surface_reg, 461 .bandwidth_update = &rv515_bandwidth_update, 462 .hpd_init = &rs600_hpd_init, 463 .hpd_fini = &rs600_hpd_fini, 464 .hpd_sense = &rs600_hpd_sense, 465 .hpd_set_polarity = &rs600_hpd_set_polarity, 466 }; 467 468 /* 469 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880 470 */ 471 int r600_init(struct radeon_device *rdev); 472 void r600_fini(struct radeon_device *rdev); 473 int r600_suspend(struct radeon_device *rdev); 474 int r600_resume(struct radeon_device *rdev); 475 void r600_vga_set_state(struct radeon_device *rdev, bool state); 476 int r600_wb_init(struct radeon_device *rdev); 477 void r600_wb_fini(struct radeon_device *rdev); 478 void r600_cp_commit(struct radeon_device *rdev); 479 void r600_pcie_gart_tlb_flush(struct radeon_device *rdev); 480 uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg); 481 void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 482 int r600_cs_parse(struct radeon_cs_parser *p); 483 void r600_fence_ring_emit(struct radeon_device *rdev, 484 struct radeon_fence *fence); 485 int r600_copy_dma(struct radeon_device *rdev, 486 uint64_t src_offset, 487 uint64_t dst_offset, 488 unsigned num_pages, 489 struct radeon_fence *fence); 490 int r600_irq_process(struct radeon_device *rdev); 491 int r600_irq_set(struct radeon_device *rdev); 492 int r600_gpu_reset(struct radeon_device *rdev); 493 int r600_set_surface_reg(struct radeon_device *rdev, int reg, 494 uint32_t tiling_flags, uint32_t pitch, 495 uint32_t offset, uint32_t obj_size); 496 int r600_clear_surface_reg(struct radeon_device *rdev, int reg); 497 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 498 int r600_ring_test(struct radeon_device *rdev); 499 int r600_copy_blit(struct radeon_device *rdev, 500 uint64_t src_offset, uint64_t dst_offset, 501 unsigned num_pages, struct radeon_fence *fence); 502 void r600_hpd_init(struct radeon_device *rdev); 503 void r600_hpd_fini(struct radeon_device *rdev); 504 bool r600_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd); 505 void r600_hpd_set_polarity(struct radeon_device *rdev, 506 enum radeon_hpd_id hpd); 507 508 static struct radeon_asic r600_asic = { 509 .init = &r600_init, 510 .fini = &r600_fini, 511 .suspend = &r600_suspend, 512 .resume = &r600_resume, 513 .cp_commit = &r600_cp_commit, 514 .vga_set_state = &r600_vga_set_state, 515 .gpu_reset = &r600_gpu_reset, 516 .gart_tlb_flush = &r600_pcie_gart_tlb_flush, 517 .gart_set_page = &rs600_gart_set_page, 518 .ring_test = &r600_ring_test, 519 .ring_ib_execute = &r600_ring_ib_execute, 520 .irq_set = &r600_irq_set, 521 .irq_process = &r600_irq_process, 522 .get_vblank_counter = &rs600_get_vblank_counter, 523 .fence_ring_emit = &r600_fence_ring_emit, 524 .cs_parse = &r600_cs_parse, 525 .copy_blit = &r600_copy_blit, 526 .copy_dma = &r600_copy_blit, 527 .copy = &r600_copy_blit, 528 .get_engine_clock = &radeon_atom_get_engine_clock, 529 .set_engine_clock = &radeon_atom_set_engine_clock, 530 .get_memory_clock = &radeon_atom_get_memory_clock, 531 .set_memory_clock = &radeon_atom_set_memory_clock, 532 .set_pcie_lanes = NULL, 533 .set_clock_gating = &radeon_atom_set_clock_gating, 534 .set_surface_reg = r600_set_surface_reg, 535 .clear_surface_reg = r600_clear_surface_reg, 536 .bandwidth_update = &rv515_bandwidth_update, 537 .hpd_init = &r600_hpd_init, 538 .hpd_fini = &r600_hpd_fini, 539 .hpd_sense = &r600_hpd_sense, 540 .hpd_set_polarity = &r600_hpd_set_polarity, 541 }; 542 543 /* 544 * rv770,rv730,rv710,rv740 545 */ 546 int rv770_init(struct radeon_device *rdev); 547 void rv770_fini(struct radeon_device *rdev); 548 int rv770_suspend(struct radeon_device *rdev); 549 int rv770_resume(struct radeon_device *rdev); 550 int rv770_gpu_reset(struct radeon_device *rdev); 551 552 static struct radeon_asic rv770_asic = { 553 .init = &rv770_init, 554 .fini = &rv770_fini, 555 .suspend = &rv770_suspend, 556 .resume = &rv770_resume, 557 .cp_commit = &r600_cp_commit, 558 .gpu_reset = &rv770_gpu_reset, 559 .vga_set_state = &r600_vga_set_state, 560 .gart_tlb_flush = &r600_pcie_gart_tlb_flush, 561 .gart_set_page = &rs600_gart_set_page, 562 .ring_test = &r600_ring_test, 563 .ring_ib_execute = &r600_ring_ib_execute, 564 .irq_set = &r600_irq_set, 565 .irq_process = &r600_irq_process, 566 .get_vblank_counter = &rs600_get_vblank_counter, 567 .fence_ring_emit = &r600_fence_ring_emit, 568 .cs_parse = &r600_cs_parse, 569 .copy_blit = &r600_copy_blit, 570 .copy_dma = &r600_copy_blit, 571 .copy = &r600_copy_blit, 572 .get_engine_clock = &radeon_atom_get_engine_clock, 573 .set_engine_clock = &radeon_atom_set_engine_clock, 574 .get_memory_clock = &radeon_atom_get_memory_clock, 575 .set_memory_clock = &radeon_atom_set_memory_clock, 576 .set_pcie_lanes = NULL, 577 .set_clock_gating = &radeon_atom_set_clock_gating, 578 .set_surface_reg = r600_set_surface_reg, 579 .clear_surface_reg = r600_clear_surface_reg, 580 .bandwidth_update = &rv515_bandwidth_update, 581 .hpd_init = &r600_hpd_init, 582 .hpd_fini = &r600_hpd_fini, 583 .hpd_sense = &r600_hpd_sense, 584 .hpd_set_polarity = &r600_hpd_set_polarity, 585 }; 586 587 #endif 588