1 /* 2 * Copyright 2008 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * Copyright 2009 Jerome Glisse. 5 * 6 * Permission is hereby granted, free of charge, to any person obtaining a 7 * copy of this software and associated documentation files (the "Software"), 8 * to deal in the Software without restriction, including without limitation 9 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 10 * and/or sell copies of the Software, and to permit persons to whom the 11 * Software is furnished to do so, subject to the following conditions: 12 * 13 * The above copyright notice and this permission notice shall be included in 14 * all copies or substantial portions of the Software. 15 * 16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 22 * OTHER DEALINGS IN THE SOFTWARE. 23 * 24 * Authors: Dave Airlie 25 * Alex Deucher 26 * Jerome Glisse 27 */ 28 #ifndef __RADEON_ASIC_H__ 29 #define __RADEON_ASIC_H__ 30 31 /* 32 * common functions 33 */ 34 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); 35 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable); 36 37 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock); 38 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock); 39 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable); 40 41 /* 42 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280 43 */ 44 int r100_init(struct radeon_device *rdev); 45 int r200_init(struct radeon_device *rdev); 46 uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg); 47 void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 48 void r100_errata(struct radeon_device *rdev); 49 void r100_vram_info(struct radeon_device *rdev); 50 void r100_vga_set_state(struct radeon_device *rdev, bool state); 51 int r100_gpu_reset(struct radeon_device *rdev); 52 int r100_mc_init(struct radeon_device *rdev); 53 void r100_mc_fini(struct radeon_device *rdev); 54 u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc); 55 int r100_wb_init(struct radeon_device *rdev); 56 void r100_wb_fini(struct radeon_device *rdev); 57 int r100_pci_gart_init(struct radeon_device *rdev); 58 void r100_pci_gart_fini(struct radeon_device *rdev); 59 int r100_pci_gart_enable(struct radeon_device *rdev); 60 void r100_pci_gart_disable(struct radeon_device *rdev); 61 void r100_pci_gart_tlb_flush(struct radeon_device *rdev); 62 int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); 63 int r100_cp_init(struct radeon_device *rdev, unsigned ring_size); 64 void r100_cp_fini(struct radeon_device *rdev); 65 void r100_cp_disable(struct radeon_device *rdev); 66 void r100_cp_commit(struct radeon_device *rdev); 67 void r100_ring_start(struct radeon_device *rdev); 68 int r100_irq_set(struct radeon_device *rdev); 69 int r100_irq_process(struct radeon_device *rdev); 70 void r100_fence_ring_emit(struct radeon_device *rdev, 71 struct radeon_fence *fence); 72 int r100_cs_parse(struct radeon_cs_parser *p); 73 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 74 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg); 75 int r100_copy_blit(struct radeon_device *rdev, 76 uint64_t src_offset, 77 uint64_t dst_offset, 78 unsigned num_pages, 79 struct radeon_fence *fence); 80 int r100_set_surface_reg(struct radeon_device *rdev, int reg, 81 uint32_t tiling_flags, uint32_t pitch, 82 uint32_t offset, uint32_t obj_size); 83 int r100_clear_surface_reg(struct radeon_device *rdev, int reg); 84 void r100_bandwidth_update(struct radeon_device *rdev); 85 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 86 int r100_ib_test(struct radeon_device *rdev); 87 int r100_ring_test(struct radeon_device *rdev); 88 89 static struct radeon_asic r100_asic = { 90 .init = &r100_init, 91 .errata = &r100_errata, 92 .vram_info = &r100_vram_info, 93 .vga_set_state = &r100_vga_set_state, 94 .gpu_reset = &r100_gpu_reset, 95 .mc_init = &r100_mc_init, 96 .mc_fini = &r100_mc_fini, 97 .wb_init = &r100_wb_init, 98 .wb_fini = &r100_wb_fini, 99 .gart_init = &r100_pci_gart_init, 100 .gart_fini = &r100_pci_gart_fini, 101 .gart_enable = &r100_pci_gart_enable, 102 .gart_disable = &r100_pci_gart_disable, 103 .gart_tlb_flush = &r100_pci_gart_tlb_flush, 104 .gart_set_page = &r100_pci_gart_set_page, 105 .cp_init = &r100_cp_init, 106 .cp_fini = &r100_cp_fini, 107 .cp_disable = &r100_cp_disable, 108 .cp_commit = &r100_cp_commit, 109 .ring_start = &r100_ring_start, 110 .ring_test = &r100_ring_test, 111 .ring_ib_execute = &r100_ring_ib_execute, 112 .ib_test = &r100_ib_test, 113 .irq_set = &r100_irq_set, 114 .irq_process = &r100_irq_process, 115 .get_vblank_counter = &r100_get_vblank_counter, 116 .fence_ring_emit = &r100_fence_ring_emit, 117 .cs_parse = &r100_cs_parse, 118 .copy_blit = &r100_copy_blit, 119 .copy_dma = NULL, 120 .copy = &r100_copy_blit, 121 .set_engine_clock = &radeon_legacy_set_engine_clock, 122 .set_memory_clock = NULL, 123 .set_pcie_lanes = NULL, 124 .set_clock_gating = &radeon_legacy_set_clock_gating, 125 .set_surface_reg = r100_set_surface_reg, 126 .clear_surface_reg = r100_clear_surface_reg, 127 .bandwidth_update = &r100_bandwidth_update, 128 }; 129 130 131 /* 132 * r300,r350,rv350,rv380 133 */ 134 int r300_init(struct radeon_device *rdev); 135 void r300_errata(struct radeon_device *rdev); 136 void r300_vram_info(struct radeon_device *rdev); 137 int r300_gpu_reset(struct radeon_device *rdev); 138 int r300_mc_init(struct radeon_device *rdev); 139 void r300_mc_fini(struct radeon_device *rdev); 140 void r300_ring_start(struct radeon_device *rdev); 141 void r300_fence_ring_emit(struct radeon_device *rdev, 142 struct radeon_fence *fence); 143 int r300_cs_parse(struct radeon_cs_parser *p); 144 int rv370_pcie_gart_init(struct radeon_device *rdev); 145 void rv370_pcie_gart_fini(struct radeon_device *rdev); 146 int rv370_pcie_gart_enable(struct radeon_device *rdev); 147 void rv370_pcie_gart_disable(struct radeon_device *rdev); 148 void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev); 149 int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); 150 uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg); 151 void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 152 void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes); 153 int r300_copy_dma(struct radeon_device *rdev, 154 uint64_t src_offset, 155 uint64_t dst_offset, 156 unsigned num_pages, 157 struct radeon_fence *fence); 158 159 static struct radeon_asic r300_asic = { 160 .init = &r300_init, 161 .errata = &r300_errata, 162 .vram_info = &r300_vram_info, 163 .vga_set_state = &r100_vga_set_state, 164 .gpu_reset = &r300_gpu_reset, 165 .mc_init = &r300_mc_init, 166 .mc_fini = &r300_mc_fini, 167 .wb_init = &r100_wb_init, 168 .wb_fini = &r100_wb_fini, 169 .gart_init = &r100_pci_gart_init, 170 .gart_fini = &r100_pci_gart_fini, 171 .gart_enable = &r100_pci_gart_enable, 172 .gart_disable = &r100_pci_gart_disable, 173 .gart_tlb_flush = &r100_pci_gart_tlb_flush, 174 .gart_set_page = &r100_pci_gart_set_page, 175 .cp_init = &r100_cp_init, 176 .cp_fini = &r100_cp_fini, 177 .cp_disable = &r100_cp_disable, 178 .cp_commit = &r100_cp_commit, 179 .ring_start = &r300_ring_start, 180 .ring_test = &r100_ring_test, 181 .ring_ib_execute = &r100_ring_ib_execute, 182 .ib_test = &r100_ib_test, 183 .irq_set = &r100_irq_set, 184 .irq_process = &r100_irq_process, 185 .get_vblank_counter = &r100_get_vblank_counter, 186 .fence_ring_emit = &r300_fence_ring_emit, 187 .cs_parse = &r300_cs_parse, 188 .copy_blit = &r100_copy_blit, 189 .copy_dma = &r300_copy_dma, 190 .copy = &r100_copy_blit, 191 .set_engine_clock = &radeon_legacy_set_engine_clock, 192 .set_memory_clock = NULL, 193 .set_pcie_lanes = &rv370_set_pcie_lanes, 194 .set_clock_gating = &radeon_legacy_set_clock_gating, 195 .set_surface_reg = r100_set_surface_reg, 196 .clear_surface_reg = r100_clear_surface_reg, 197 .bandwidth_update = &r100_bandwidth_update, 198 }; 199 200 /* 201 * r420,r423,rv410 202 */ 203 extern int r420_init(struct radeon_device *rdev); 204 extern void r420_fini(struct radeon_device *rdev); 205 extern int r420_suspend(struct radeon_device *rdev); 206 extern int r420_resume(struct radeon_device *rdev); 207 static struct radeon_asic r420_asic = { 208 .init = &r420_init, 209 .fini = &r420_fini, 210 .suspend = &r420_suspend, 211 .resume = &r420_resume, 212 .errata = NULL, 213 .vram_info = NULL, 214 .vga_set_state = &r100_vga_set_state, 215 .gpu_reset = &r300_gpu_reset, 216 .mc_init = NULL, 217 .mc_fini = NULL, 218 .wb_init = NULL, 219 .wb_fini = NULL, 220 .gart_enable = NULL, 221 .gart_disable = NULL, 222 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, 223 .gart_set_page = &rv370_pcie_gart_set_page, 224 .cp_init = NULL, 225 .cp_fini = NULL, 226 .cp_disable = NULL, 227 .cp_commit = &r100_cp_commit, 228 .ring_start = &r300_ring_start, 229 .ring_test = &r100_ring_test, 230 .ring_ib_execute = &r100_ring_ib_execute, 231 .ib_test = NULL, 232 .irq_set = &r100_irq_set, 233 .irq_process = &r100_irq_process, 234 .get_vblank_counter = &r100_get_vblank_counter, 235 .fence_ring_emit = &r300_fence_ring_emit, 236 .cs_parse = &r300_cs_parse, 237 .copy_blit = &r100_copy_blit, 238 .copy_dma = &r300_copy_dma, 239 .copy = &r100_copy_blit, 240 .set_engine_clock = &radeon_atom_set_engine_clock, 241 .set_memory_clock = &radeon_atom_set_memory_clock, 242 .set_pcie_lanes = &rv370_set_pcie_lanes, 243 .set_clock_gating = &radeon_atom_set_clock_gating, 244 .set_surface_reg = r100_set_surface_reg, 245 .clear_surface_reg = r100_clear_surface_reg, 246 .bandwidth_update = &r100_bandwidth_update, 247 }; 248 249 250 /* 251 * rs400,rs480 252 */ 253 void rs400_errata(struct radeon_device *rdev); 254 void rs400_vram_info(struct radeon_device *rdev); 255 int rs400_mc_init(struct radeon_device *rdev); 256 void rs400_mc_fini(struct radeon_device *rdev); 257 int rs400_gart_init(struct radeon_device *rdev); 258 void rs400_gart_fini(struct radeon_device *rdev); 259 int rs400_gart_enable(struct radeon_device *rdev); 260 void rs400_gart_disable(struct radeon_device *rdev); 261 void rs400_gart_tlb_flush(struct radeon_device *rdev); 262 int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); 263 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg); 264 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 265 static struct radeon_asic rs400_asic = { 266 .init = &r300_init, 267 .errata = &rs400_errata, 268 .vram_info = &rs400_vram_info, 269 .vga_set_state = &r100_vga_set_state, 270 .gpu_reset = &r300_gpu_reset, 271 .mc_init = &rs400_mc_init, 272 .mc_fini = &rs400_mc_fini, 273 .wb_init = &r100_wb_init, 274 .wb_fini = &r100_wb_fini, 275 .gart_init = &rs400_gart_init, 276 .gart_fini = &rs400_gart_fini, 277 .gart_enable = &rs400_gart_enable, 278 .gart_disable = &rs400_gart_disable, 279 .gart_tlb_flush = &rs400_gart_tlb_flush, 280 .gart_set_page = &rs400_gart_set_page, 281 .cp_init = &r100_cp_init, 282 .cp_fini = &r100_cp_fini, 283 .cp_disable = &r100_cp_disable, 284 .cp_commit = &r100_cp_commit, 285 .ring_start = &r300_ring_start, 286 .ring_test = &r100_ring_test, 287 .ring_ib_execute = &r100_ring_ib_execute, 288 .ib_test = &r100_ib_test, 289 .irq_set = &r100_irq_set, 290 .irq_process = &r100_irq_process, 291 .get_vblank_counter = &r100_get_vblank_counter, 292 .fence_ring_emit = &r300_fence_ring_emit, 293 .cs_parse = &r300_cs_parse, 294 .copy_blit = &r100_copy_blit, 295 .copy_dma = &r300_copy_dma, 296 .copy = &r100_copy_blit, 297 .set_engine_clock = &radeon_legacy_set_engine_clock, 298 .set_memory_clock = NULL, 299 .set_pcie_lanes = NULL, 300 .set_clock_gating = &radeon_legacy_set_clock_gating, 301 .set_surface_reg = r100_set_surface_reg, 302 .clear_surface_reg = r100_clear_surface_reg, 303 .bandwidth_update = &r100_bandwidth_update, 304 }; 305 306 307 /* 308 * rs600. 309 */ 310 int rs600_init(struct radeon_device *rdev); 311 void rs600_errata(struct radeon_device *rdev); 312 void rs600_vram_info(struct radeon_device *rdev); 313 int rs600_mc_init(struct radeon_device *rdev); 314 void rs600_mc_fini(struct radeon_device *rdev); 315 int rs600_irq_set(struct radeon_device *rdev); 316 int rs600_irq_process(struct radeon_device *rdev); 317 u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc); 318 int rs600_gart_init(struct radeon_device *rdev); 319 void rs600_gart_fini(struct radeon_device *rdev); 320 int rs600_gart_enable(struct radeon_device *rdev); 321 void rs600_gart_disable(struct radeon_device *rdev); 322 void rs600_gart_tlb_flush(struct radeon_device *rdev); 323 int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr); 324 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg); 325 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 326 void rs600_bandwidth_update(struct radeon_device *rdev); 327 static struct radeon_asic rs600_asic = { 328 .init = &rs600_init, 329 .errata = &rs600_errata, 330 .vram_info = &rs600_vram_info, 331 .vga_set_state = &r100_vga_set_state, 332 .gpu_reset = &r300_gpu_reset, 333 .mc_init = &rs600_mc_init, 334 .mc_fini = &rs600_mc_fini, 335 .wb_init = &r100_wb_init, 336 .wb_fini = &r100_wb_fini, 337 .gart_init = &rs600_gart_init, 338 .gart_fini = &rs600_gart_fini, 339 .gart_enable = &rs600_gart_enable, 340 .gart_disable = &rs600_gart_disable, 341 .gart_tlb_flush = &rs600_gart_tlb_flush, 342 .gart_set_page = &rs600_gart_set_page, 343 .cp_init = &r100_cp_init, 344 .cp_fini = &r100_cp_fini, 345 .cp_disable = &r100_cp_disable, 346 .cp_commit = &r100_cp_commit, 347 .ring_start = &r300_ring_start, 348 .ring_test = &r100_ring_test, 349 .ring_ib_execute = &r100_ring_ib_execute, 350 .ib_test = &r100_ib_test, 351 .irq_set = &rs600_irq_set, 352 .irq_process = &rs600_irq_process, 353 .get_vblank_counter = &rs600_get_vblank_counter, 354 .fence_ring_emit = &r300_fence_ring_emit, 355 .cs_parse = &r300_cs_parse, 356 .copy_blit = &r100_copy_blit, 357 .copy_dma = &r300_copy_dma, 358 .copy = &r100_copy_blit, 359 .set_engine_clock = &radeon_atom_set_engine_clock, 360 .set_memory_clock = &radeon_atom_set_memory_clock, 361 .set_pcie_lanes = NULL, 362 .set_clock_gating = &radeon_atom_set_clock_gating, 363 .bandwidth_update = &rs600_bandwidth_update, 364 }; 365 366 367 /* 368 * rs690,rs740 369 */ 370 void rs690_errata(struct radeon_device *rdev); 371 void rs690_vram_info(struct radeon_device *rdev); 372 int rs690_mc_init(struct radeon_device *rdev); 373 void rs690_mc_fini(struct radeon_device *rdev); 374 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg); 375 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 376 void rs690_bandwidth_update(struct radeon_device *rdev); 377 static struct radeon_asic rs690_asic = { 378 .init = &rs600_init, 379 .errata = &rs690_errata, 380 .vram_info = &rs690_vram_info, 381 .vga_set_state = &r100_vga_set_state, 382 .gpu_reset = &r300_gpu_reset, 383 .mc_init = &rs690_mc_init, 384 .mc_fini = &rs690_mc_fini, 385 .wb_init = &r100_wb_init, 386 .wb_fini = &r100_wb_fini, 387 .gart_init = &rs400_gart_init, 388 .gart_fini = &rs400_gart_fini, 389 .gart_enable = &rs400_gart_enable, 390 .gart_disable = &rs400_gart_disable, 391 .gart_tlb_flush = &rs400_gart_tlb_flush, 392 .gart_set_page = &rs400_gart_set_page, 393 .cp_init = &r100_cp_init, 394 .cp_fini = &r100_cp_fini, 395 .cp_disable = &r100_cp_disable, 396 .cp_commit = &r100_cp_commit, 397 .ring_start = &r300_ring_start, 398 .ring_test = &r100_ring_test, 399 .ring_ib_execute = &r100_ring_ib_execute, 400 .ib_test = &r100_ib_test, 401 .irq_set = &rs600_irq_set, 402 .irq_process = &rs600_irq_process, 403 .get_vblank_counter = &rs600_get_vblank_counter, 404 .fence_ring_emit = &r300_fence_ring_emit, 405 .cs_parse = &r300_cs_parse, 406 .copy_blit = &r100_copy_blit, 407 .copy_dma = &r300_copy_dma, 408 .copy = &r300_copy_dma, 409 .set_engine_clock = &radeon_atom_set_engine_clock, 410 .set_memory_clock = &radeon_atom_set_memory_clock, 411 .set_pcie_lanes = NULL, 412 .set_clock_gating = &radeon_atom_set_clock_gating, 413 .set_surface_reg = r100_set_surface_reg, 414 .clear_surface_reg = r100_clear_surface_reg, 415 .bandwidth_update = &rs690_bandwidth_update, 416 }; 417 418 419 /* 420 * rv515 421 */ 422 int rv515_init(struct radeon_device *rdev); 423 void rv515_fini(struct radeon_device *rdev); 424 int rv515_gpu_reset(struct radeon_device *rdev); 425 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg); 426 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 427 void rv515_ring_start(struct radeon_device *rdev); 428 uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg); 429 void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 430 void rv515_bandwidth_update(struct radeon_device *rdev); 431 int rv515_resume(struct radeon_device *rdev); 432 int rv515_suspend(struct radeon_device *rdev); 433 static struct radeon_asic rv515_asic = { 434 .init = &rv515_init, 435 .fini = &rv515_fini, 436 .suspend = &rv515_suspend, 437 .resume = &rv515_resume, 438 .errata = NULL, 439 .vram_info = NULL, 440 .vga_set_state = &r100_vga_set_state, 441 .gpu_reset = &rv515_gpu_reset, 442 .mc_init = NULL, 443 .mc_fini = NULL, 444 .wb_init = NULL, 445 .wb_fini = NULL, 446 .gart_init = &rv370_pcie_gart_init, 447 .gart_fini = &rv370_pcie_gart_fini, 448 .gart_enable = NULL, 449 .gart_disable = NULL, 450 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, 451 .gart_set_page = &rv370_pcie_gart_set_page, 452 .cp_init = NULL, 453 .cp_fini = NULL, 454 .cp_disable = NULL, 455 .cp_commit = &r100_cp_commit, 456 .ring_start = &rv515_ring_start, 457 .ring_test = &r100_ring_test, 458 .ring_ib_execute = &r100_ring_ib_execute, 459 .ib_test = NULL, 460 .irq_set = &rs600_irq_set, 461 .irq_process = &rs600_irq_process, 462 .get_vblank_counter = &rs600_get_vblank_counter, 463 .fence_ring_emit = &r300_fence_ring_emit, 464 .cs_parse = &r300_cs_parse, 465 .copy_blit = &r100_copy_blit, 466 .copy_dma = &r300_copy_dma, 467 .copy = &r100_copy_blit, 468 .set_engine_clock = &radeon_atom_set_engine_clock, 469 .set_memory_clock = &radeon_atom_set_memory_clock, 470 .set_pcie_lanes = &rv370_set_pcie_lanes, 471 .set_clock_gating = &radeon_atom_set_clock_gating, 472 .set_surface_reg = r100_set_surface_reg, 473 .clear_surface_reg = r100_clear_surface_reg, 474 .bandwidth_update = &rv515_bandwidth_update, 475 }; 476 477 478 /* 479 * r520,rv530,rv560,rv570,r580 480 */ 481 int r520_init(struct radeon_device *rdev); 482 int r520_resume(struct radeon_device *rdev); 483 static struct radeon_asic r520_asic = { 484 .init = &r520_init, 485 .fini = &rv515_fini, 486 .suspend = &rv515_suspend, 487 .resume = &r520_resume, 488 .errata = NULL, 489 .vram_info = NULL, 490 .vga_set_state = &r100_vga_set_state, 491 .gpu_reset = &rv515_gpu_reset, 492 .mc_init = NULL, 493 .mc_fini = NULL, 494 .wb_init = NULL, 495 .wb_fini = NULL, 496 .gart_init = NULL, 497 .gart_fini = NULL, 498 .gart_enable = NULL, 499 .gart_disable = NULL, 500 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush, 501 .gart_set_page = &rv370_pcie_gart_set_page, 502 .cp_init = NULL, 503 .cp_fini = NULL, 504 .cp_disable = NULL, 505 .cp_commit = &r100_cp_commit, 506 .ring_start = &rv515_ring_start, 507 .ring_test = &r100_ring_test, 508 .ring_ib_execute = &r100_ring_ib_execute, 509 .ib_test = NULL, 510 .irq_set = &rs600_irq_set, 511 .irq_process = &rs600_irq_process, 512 .get_vblank_counter = &rs600_get_vblank_counter, 513 .fence_ring_emit = &r300_fence_ring_emit, 514 .cs_parse = &r300_cs_parse, 515 .copy_blit = &r100_copy_blit, 516 .copy_dma = &r300_copy_dma, 517 .copy = &r100_copy_blit, 518 .set_engine_clock = &radeon_atom_set_engine_clock, 519 .set_memory_clock = &radeon_atom_set_memory_clock, 520 .set_pcie_lanes = &rv370_set_pcie_lanes, 521 .set_clock_gating = &radeon_atom_set_clock_gating, 522 .set_surface_reg = r100_set_surface_reg, 523 .clear_surface_reg = r100_clear_surface_reg, 524 .bandwidth_update = &rv515_bandwidth_update, 525 }; 526 527 /* 528 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880 529 */ 530 int r600_init(struct radeon_device *rdev); 531 void r600_fini(struct radeon_device *rdev); 532 int r600_suspend(struct radeon_device *rdev); 533 int r600_resume(struct radeon_device *rdev); 534 void r600_vga_set_state(struct radeon_device *rdev, bool state); 535 int r600_wb_init(struct radeon_device *rdev); 536 void r600_wb_fini(struct radeon_device *rdev); 537 void r600_cp_commit(struct radeon_device *rdev); 538 void r600_pcie_gart_tlb_flush(struct radeon_device *rdev); 539 uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg); 540 void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v); 541 int r600_cs_parse(struct radeon_cs_parser *p); 542 void r600_fence_ring_emit(struct radeon_device *rdev, 543 struct radeon_fence *fence); 544 int r600_copy_dma(struct radeon_device *rdev, 545 uint64_t src_offset, 546 uint64_t dst_offset, 547 unsigned num_pages, 548 struct radeon_fence *fence); 549 int r600_irq_process(struct radeon_device *rdev); 550 int r600_irq_set(struct radeon_device *rdev); 551 int r600_gpu_reset(struct radeon_device *rdev); 552 int r600_set_surface_reg(struct radeon_device *rdev, int reg, 553 uint32_t tiling_flags, uint32_t pitch, 554 uint32_t offset, uint32_t obj_size); 555 int r600_clear_surface_reg(struct radeon_device *rdev, int reg); 556 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib); 557 int r600_ib_test(struct radeon_device *rdev); 558 int r600_ring_test(struct radeon_device *rdev); 559 int r600_copy_blit(struct radeon_device *rdev, 560 uint64_t src_offset, uint64_t dst_offset, 561 unsigned num_pages, struct radeon_fence *fence); 562 563 static struct radeon_asic r600_asic = { 564 .errata = NULL, 565 .init = &r600_init, 566 .fini = &r600_fini, 567 .suspend = &r600_suspend, 568 .resume = &r600_resume, 569 .cp_commit = &r600_cp_commit, 570 .vram_info = NULL, 571 .vga_set_state = &r600_vga_set_state, 572 .gpu_reset = &r600_gpu_reset, 573 .mc_init = NULL, 574 .mc_fini = NULL, 575 .wb_init = &r600_wb_init, 576 .wb_fini = &r600_wb_fini, 577 .gart_enable = NULL, 578 .gart_disable = NULL, 579 .gart_tlb_flush = &r600_pcie_gart_tlb_flush, 580 .gart_set_page = &rs600_gart_set_page, 581 .cp_init = NULL, 582 .cp_fini = NULL, 583 .cp_disable = NULL, 584 .ring_start = NULL, 585 .ring_test = &r600_ring_test, 586 .ring_ib_execute = &r600_ring_ib_execute, 587 .ib_test = &r600_ib_test, 588 .irq_set = &r600_irq_set, 589 .irq_process = &r600_irq_process, 590 .fence_ring_emit = &r600_fence_ring_emit, 591 .cs_parse = &r600_cs_parse, 592 .copy_blit = &r600_copy_blit, 593 .copy_dma = &r600_copy_blit, 594 .copy = &r600_copy_blit, 595 .set_engine_clock = &radeon_atom_set_engine_clock, 596 .set_memory_clock = &radeon_atom_set_memory_clock, 597 .set_pcie_lanes = NULL, 598 .set_clock_gating = &radeon_atom_set_clock_gating, 599 .set_surface_reg = r600_set_surface_reg, 600 .clear_surface_reg = r600_clear_surface_reg, 601 .bandwidth_update = &rv515_bandwidth_update, 602 }; 603 604 /* 605 * rv770,rv730,rv710,rv740 606 */ 607 int rv770_init(struct radeon_device *rdev); 608 void rv770_fini(struct radeon_device *rdev); 609 int rv770_suspend(struct radeon_device *rdev); 610 int rv770_resume(struct radeon_device *rdev); 611 int rv770_gpu_reset(struct radeon_device *rdev); 612 613 static struct radeon_asic rv770_asic = { 614 .errata = NULL, 615 .init = &rv770_init, 616 .fini = &rv770_fini, 617 .suspend = &rv770_suspend, 618 .resume = &rv770_resume, 619 .cp_commit = &r600_cp_commit, 620 .vram_info = NULL, 621 .gpu_reset = &rv770_gpu_reset, 622 .vga_set_state = &r600_vga_set_state, 623 .mc_init = NULL, 624 .mc_fini = NULL, 625 .wb_init = &r600_wb_init, 626 .wb_fini = &r600_wb_fini, 627 .gart_enable = NULL, 628 .gart_disable = NULL, 629 .gart_tlb_flush = &r600_pcie_gart_tlb_flush, 630 .gart_set_page = &rs600_gart_set_page, 631 .cp_init = NULL, 632 .cp_fini = NULL, 633 .cp_disable = NULL, 634 .ring_start = NULL, 635 .ring_test = &r600_ring_test, 636 .ring_ib_execute = &r600_ring_ib_execute, 637 .ib_test = &r600_ib_test, 638 .irq_set = &r600_irq_set, 639 .irq_process = &r600_irq_process, 640 .fence_ring_emit = &r600_fence_ring_emit, 641 .cs_parse = &r600_cs_parse, 642 .copy_blit = &r600_copy_blit, 643 .copy_dma = &r600_copy_blit, 644 .copy = &r600_copy_blit, 645 .set_engine_clock = &radeon_atom_set_engine_clock, 646 .set_memory_clock = &radeon_atom_set_memory_clock, 647 .set_pcie_lanes = NULL, 648 .set_clock_gating = &radeon_atom_set_clock_gating, 649 .set_surface_reg = r600_set_surface_reg, 650 .clear_surface_reg = r600_clear_surface_reg, 651 .bandwidth_update = &rv515_bandwidth_update, 652 }; 653 654 #endif 655