1 /* 2 * Copyright 2007-8 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 */ 26 #include "drmP.h" 27 #include "radeon_drm.h" 28 #include "radeon.h" 29 30 #include "atom.h" 31 #include <asm/div64.h> 32 33 #include "drm_crtc_helper.h" 34 #include "drm_edid.h" 35 36 static int radeon_ddc_dump(struct drm_connector *connector); 37 38 static void avivo_crtc_load_lut(struct drm_crtc *crtc) 39 { 40 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 41 struct drm_device *dev = crtc->dev; 42 struct radeon_device *rdev = dev->dev_private; 43 int i; 44 45 DRM_DEBUG("%d\n", radeon_crtc->crtc_id); 46 WREG32(AVIVO_DC_LUTA_CONTROL + radeon_crtc->crtc_offset, 0); 47 48 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0); 49 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0); 50 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0); 51 52 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff); 53 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff); 54 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff); 55 56 WREG32(AVIVO_DC_LUT_RW_SELECT, radeon_crtc->crtc_id); 57 WREG32(AVIVO_DC_LUT_RW_MODE, 0); 58 WREG32(AVIVO_DC_LUT_WRITE_EN_MASK, 0x0000003f); 59 60 WREG8(AVIVO_DC_LUT_RW_INDEX, 0); 61 for (i = 0; i < 256; i++) { 62 WREG32(AVIVO_DC_LUT_30_COLOR, 63 (radeon_crtc->lut_r[i] << 20) | 64 (radeon_crtc->lut_g[i] << 10) | 65 (radeon_crtc->lut_b[i] << 0)); 66 } 67 68 WREG32(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset, radeon_crtc->crtc_id); 69 } 70 71 static void legacy_crtc_load_lut(struct drm_crtc *crtc) 72 { 73 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 74 struct drm_device *dev = crtc->dev; 75 struct radeon_device *rdev = dev->dev_private; 76 int i; 77 uint32_t dac2_cntl; 78 79 dac2_cntl = RREG32(RADEON_DAC_CNTL2); 80 if (radeon_crtc->crtc_id == 0) 81 dac2_cntl &= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL; 82 else 83 dac2_cntl |= RADEON_DAC2_PALETTE_ACC_CTL; 84 WREG32(RADEON_DAC_CNTL2, dac2_cntl); 85 86 WREG8(RADEON_PALETTE_INDEX, 0); 87 for (i = 0; i < 256; i++) { 88 WREG32(RADEON_PALETTE_30_DATA, 89 (radeon_crtc->lut_r[i] << 20) | 90 (radeon_crtc->lut_g[i] << 10) | 91 (radeon_crtc->lut_b[i] << 0)); 92 } 93 } 94 95 void radeon_crtc_load_lut(struct drm_crtc *crtc) 96 { 97 struct drm_device *dev = crtc->dev; 98 struct radeon_device *rdev = dev->dev_private; 99 100 if (!crtc->enabled) 101 return; 102 103 if (ASIC_IS_AVIVO(rdev)) 104 avivo_crtc_load_lut(crtc); 105 else 106 legacy_crtc_load_lut(crtc); 107 } 108 109 /** Sets the color ramps on behalf of fbcon */ 110 void radeon_crtc_fb_gamma_set(struct drm_crtc *crtc, u16 red, u16 green, 111 u16 blue, int regno) 112 { 113 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 114 115 radeon_crtc->lut_r[regno] = red >> 6; 116 radeon_crtc->lut_g[regno] = green >> 6; 117 radeon_crtc->lut_b[regno] = blue >> 6; 118 } 119 120 /** Gets the color ramps on behalf of fbcon */ 121 void radeon_crtc_fb_gamma_get(struct drm_crtc *crtc, u16 *red, u16 *green, 122 u16 *blue, int regno) 123 { 124 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 125 126 *red = radeon_crtc->lut_r[regno] << 6; 127 *green = radeon_crtc->lut_g[regno] << 6; 128 *blue = radeon_crtc->lut_b[regno] << 6; 129 } 130 131 static void radeon_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green, 132 u16 *blue, uint32_t size) 133 { 134 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 135 int i; 136 137 if (size != 256) { 138 return; 139 } 140 141 /* userspace palettes are always correct as is */ 142 for (i = 0; i < 256; i++) { 143 radeon_crtc->lut_r[i] = red[i] >> 6; 144 radeon_crtc->lut_g[i] = green[i] >> 6; 145 radeon_crtc->lut_b[i] = blue[i] >> 6; 146 } 147 radeon_crtc_load_lut(crtc); 148 } 149 150 static void radeon_crtc_destroy(struct drm_crtc *crtc) 151 { 152 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 153 154 drm_crtc_cleanup(crtc); 155 kfree(radeon_crtc); 156 } 157 158 static const struct drm_crtc_funcs radeon_crtc_funcs = { 159 .cursor_set = radeon_crtc_cursor_set, 160 .cursor_move = radeon_crtc_cursor_move, 161 .gamma_set = radeon_crtc_gamma_set, 162 .set_config = drm_crtc_helper_set_config, 163 .destroy = radeon_crtc_destroy, 164 }; 165 166 static void radeon_crtc_init(struct drm_device *dev, int index) 167 { 168 struct radeon_device *rdev = dev->dev_private; 169 struct radeon_crtc *radeon_crtc; 170 int i; 171 172 radeon_crtc = kzalloc(sizeof(struct radeon_crtc) + (RADEONFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL); 173 if (radeon_crtc == NULL) 174 return; 175 176 drm_crtc_init(dev, &radeon_crtc->base, &radeon_crtc_funcs); 177 178 drm_mode_crtc_set_gamma_size(&radeon_crtc->base, 256); 179 radeon_crtc->crtc_id = index; 180 rdev->mode_info.crtcs[index] = radeon_crtc; 181 182 #if 0 183 radeon_crtc->mode_set.crtc = &radeon_crtc->base; 184 radeon_crtc->mode_set.connectors = (struct drm_connector **)(radeon_crtc + 1); 185 radeon_crtc->mode_set.num_connectors = 0; 186 #endif 187 188 for (i = 0; i < 256; i++) { 189 radeon_crtc->lut_r[i] = i << 2; 190 radeon_crtc->lut_g[i] = i << 2; 191 radeon_crtc->lut_b[i] = i << 2; 192 } 193 194 if (rdev->is_atom_bios && (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom)) 195 radeon_atombios_init_crtc(dev, radeon_crtc); 196 else 197 radeon_legacy_init_crtc(dev, radeon_crtc); 198 } 199 200 static const char *encoder_names[34] = { 201 "NONE", 202 "INTERNAL_LVDS", 203 "INTERNAL_TMDS1", 204 "INTERNAL_TMDS2", 205 "INTERNAL_DAC1", 206 "INTERNAL_DAC2", 207 "INTERNAL_SDVOA", 208 "INTERNAL_SDVOB", 209 "SI170B", 210 "CH7303", 211 "CH7301", 212 "INTERNAL_DVO1", 213 "EXTERNAL_SDVOA", 214 "EXTERNAL_SDVOB", 215 "TITFP513", 216 "INTERNAL_LVTM1", 217 "VT1623", 218 "HDMI_SI1930", 219 "HDMI_INTERNAL", 220 "INTERNAL_KLDSCP_TMDS1", 221 "INTERNAL_KLDSCP_DVO1", 222 "INTERNAL_KLDSCP_DAC1", 223 "INTERNAL_KLDSCP_DAC2", 224 "SI178", 225 "MVPU_FPGA", 226 "INTERNAL_DDI", 227 "VT1625", 228 "HDMI_SI1932", 229 "DP_AN9801", 230 "DP_DP501", 231 "INTERNAL_UNIPHY", 232 "INTERNAL_KLDSCP_LVTMA", 233 "INTERNAL_UNIPHY1", 234 "INTERNAL_UNIPHY2", 235 }; 236 237 static const char *connector_names[15] = { 238 "Unknown", 239 "VGA", 240 "DVI-I", 241 "DVI-D", 242 "DVI-A", 243 "Composite", 244 "S-video", 245 "LVDS", 246 "Component", 247 "DIN", 248 "DisplayPort", 249 "HDMI-A", 250 "HDMI-B", 251 "TV", 252 "eDP", 253 }; 254 255 static const char *hpd_names[7] = { 256 "NONE", 257 "HPD1", 258 "HPD2", 259 "HPD3", 260 "HPD4", 261 "HPD5", 262 "HPD6", 263 }; 264 265 static void radeon_print_display_setup(struct drm_device *dev) 266 { 267 struct drm_connector *connector; 268 struct radeon_connector *radeon_connector; 269 struct drm_encoder *encoder; 270 struct radeon_encoder *radeon_encoder; 271 uint32_t devices; 272 int i = 0; 273 274 DRM_INFO("Radeon Display Connectors\n"); 275 list_for_each_entry(connector, &dev->mode_config.connector_list, head) { 276 radeon_connector = to_radeon_connector(connector); 277 DRM_INFO("Connector %d:\n", i); 278 DRM_INFO(" %s\n", connector_names[connector->connector_type]); 279 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE) 280 DRM_INFO(" %s\n", hpd_names[radeon_connector->hpd.hpd]); 281 if (radeon_connector->ddc_bus) 282 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n", 283 radeon_connector->ddc_bus->rec.mask_clk_reg, 284 radeon_connector->ddc_bus->rec.mask_data_reg, 285 radeon_connector->ddc_bus->rec.a_clk_reg, 286 radeon_connector->ddc_bus->rec.a_data_reg, 287 radeon_connector->ddc_bus->rec.en_clk_reg, 288 radeon_connector->ddc_bus->rec.en_data_reg, 289 radeon_connector->ddc_bus->rec.y_clk_reg, 290 radeon_connector->ddc_bus->rec.y_data_reg); 291 DRM_INFO(" Encoders:\n"); 292 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 293 radeon_encoder = to_radeon_encoder(encoder); 294 devices = radeon_encoder->devices & radeon_connector->devices; 295 if (devices) { 296 if (devices & ATOM_DEVICE_CRT1_SUPPORT) 297 DRM_INFO(" CRT1: %s\n", encoder_names[radeon_encoder->encoder_id]); 298 if (devices & ATOM_DEVICE_CRT2_SUPPORT) 299 DRM_INFO(" CRT2: %s\n", encoder_names[radeon_encoder->encoder_id]); 300 if (devices & ATOM_DEVICE_LCD1_SUPPORT) 301 DRM_INFO(" LCD1: %s\n", encoder_names[radeon_encoder->encoder_id]); 302 if (devices & ATOM_DEVICE_DFP1_SUPPORT) 303 DRM_INFO(" DFP1: %s\n", encoder_names[radeon_encoder->encoder_id]); 304 if (devices & ATOM_DEVICE_DFP2_SUPPORT) 305 DRM_INFO(" DFP2: %s\n", encoder_names[radeon_encoder->encoder_id]); 306 if (devices & ATOM_DEVICE_DFP3_SUPPORT) 307 DRM_INFO(" DFP3: %s\n", encoder_names[radeon_encoder->encoder_id]); 308 if (devices & ATOM_DEVICE_DFP4_SUPPORT) 309 DRM_INFO(" DFP4: %s\n", encoder_names[radeon_encoder->encoder_id]); 310 if (devices & ATOM_DEVICE_DFP5_SUPPORT) 311 DRM_INFO(" DFP5: %s\n", encoder_names[radeon_encoder->encoder_id]); 312 if (devices & ATOM_DEVICE_TV1_SUPPORT) 313 DRM_INFO(" TV1: %s\n", encoder_names[radeon_encoder->encoder_id]); 314 if (devices & ATOM_DEVICE_CV_SUPPORT) 315 DRM_INFO(" CV: %s\n", encoder_names[radeon_encoder->encoder_id]); 316 } 317 } 318 i++; 319 } 320 } 321 322 static bool radeon_setup_enc_conn(struct drm_device *dev) 323 { 324 struct radeon_device *rdev = dev->dev_private; 325 struct drm_connector *drm_connector; 326 bool ret = false; 327 328 if (rdev->bios) { 329 if (rdev->is_atom_bios) { 330 if (rdev->family >= CHIP_R600) 331 ret = radeon_get_atom_connector_info_from_object_table(dev); 332 else 333 ret = radeon_get_atom_connector_info_from_supported_devices_table(dev); 334 } else { 335 ret = radeon_get_legacy_connector_info_from_bios(dev); 336 if (ret == false) 337 ret = radeon_get_legacy_connector_info_from_table(dev); 338 } 339 } else { 340 if (!ASIC_IS_AVIVO(rdev)) 341 ret = radeon_get_legacy_connector_info_from_table(dev); 342 } 343 if (ret) { 344 radeon_setup_encoder_clones(dev); 345 radeon_print_display_setup(dev); 346 list_for_each_entry(drm_connector, &dev->mode_config.connector_list, head) 347 radeon_ddc_dump(drm_connector); 348 } 349 350 return ret; 351 } 352 353 int radeon_ddc_get_modes(struct radeon_connector *radeon_connector) 354 { 355 int ret = 0; 356 357 if ((radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_DisplayPort) || 358 (radeon_connector->base.connector_type == DRM_MODE_CONNECTOR_eDP)) { 359 struct radeon_connector_atom_dig *dig = radeon_connector->con_priv; 360 if (dig->dp_i2c_bus) 361 radeon_connector->edid = drm_get_edid(&radeon_connector->base, &dig->dp_i2c_bus->adapter); 362 } 363 if (!radeon_connector->ddc_bus) 364 return -1; 365 if (!radeon_connector->edid) { 366 radeon_i2c_do_lock(radeon_connector->ddc_bus, 1); 367 radeon_connector->edid = drm_get_edid(&radeon_connector->base, &radeon_connector->ddc_bus->adapter); 368 radeon_i2c_do_lock(radeon_connector->ddc_bus, 0); 369 } 370 371 if (radeon_connector->edid) { 372 drm_mode_connector_update_edid_property(&radeon_connector->base, radeon_connector->edid); 373 ret = drm_add_edid_modes(&radeon_connector->base, radeon_connector->edid); 374 return ret; 375 } 376 drm_mode_connector_update_edid_property(&radeon_connector->base, NULL); 377 return 0; 378 } 379 380 static int radeon_ddc_dump(struct drm_connector *connector) 381 { 382 struct edid *edid; 383 struct radeon_connector *radeon_connector = to_radeon_connector(connector); 384 int ret = 0; 385 386 if (!radeon_connector->ddc_bus) 387 return -1; 388 radeon_i2c_do_lock(radeon_connector->ddc_bus, 1); 389 edid = drm_get_edid(connector, &radeon_connector->ddc_bus->adapter); 390 radeon_i2c_do_lock(radeon_connector->ddc_bus, 0); 391 if (edid) { 392 kfree(edid); 393 } 394 return ret; 395 } 396 397 static inline uint32_t radeon_div(uint64_t n, uint32_t d) 398 { 399 uint64_t mod; 400 401 n += d / 2; 402 403 mod = do_div(n, d); 404 return n; 405 } 406 407 void radeon_compute_pll(struct radeon_pll *pll, 408 uint64_t freq, 409 uint32_t *dot_clock_p, 410 uint32_t *fb_div_p, 411 uint32_t *frac_fb_div_p, 412 uint32_t *ref_div_p, 413 uint32_t *post_div_p, 414 int flags) 415 { 416 uint32_t min_ref_div = pll->min_ref_div; 417 uint32_t max_ref_div = pll->max_ref_div; 418 uint32_t min_fractional_feed_div = 0; 419 uint32_t max_fractional_feed_div = 0; 420 uint32_t best_vco = pll->best_vco; 421 uint32_t best_post_div = 1; 422 uint32_t best_ref_div = 1; 423 uint32_t best_feedback_div = 1; 424 uint32_t best_frac_feedback_div = 0; 425 uint32_t best_freq = -1; 426 uint32_t best_error = 0xffffffff; 427 uint32_t best_vco_diff = 1; 428 uint32_t post_div; 429 430 DRM_DEBUG("PLL freq %llu %u %u\n", freq, pll->min_ref_div, pll->max_ref_div); 431 freq = freq * 1000; 432 433 if (flags & RADEON_PLL_USE_REF_DIV) 434 min_ref_div = max_ref_div = pll->reference_div; 435 else { 436 while (min_ref_div < max_ref_div-1) { 437 uint32_t mid = (min_ref_div + max_ref_div) / 2; 438 uint32_t pll_in = pll->reference_freq / mid; 439 if (pll_in < pll->pll_in_min) 440 max_ref_div = mid; 441 else if (pll_in > pll->pll_in_max) 442 min_ref_div = mid; 443 else 444 break; 445 } 446 } 447 448 if (flags & RADEON_PLL_USE_FRAC_FB_DIV) { 449 min_fractional_feed_div = pll->min_frac_feedback_div; 450 max_fractional_feed_div = pll->max_frac_feedback_div; 451 } 452 453 for (post_div = pll->min_post_div; post_div <= pll->max_post_div; ++post_div) { 454 uint32_t ref_div; 455 456 if ((flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1)) 457 continue; 458 459 /* legacy radeons only have a few post_divs */ 460 if (flags & RADEON_PLL_LEGACY) { 461 if ((post_div == 5) || 462 (post_div == 7) || 463 (post_div == 9) || 464 (post_div == 10) || 465 (post_div == 11) || 466 (post_div == 13) || 467 (post_div == 14) || 468 (post_div == 15)) 469 continue; 470 } 471 472 for (ref_div = min_ref_div; ref_div <= max_ref_div; ++ref_div) { 473 uint32_t feedback_div, current_freq = 0, error, vco_diff; 474 uint32_t pll_in = pll->reference_freq / ref_div; 475 uint32_t min_feed_div = pll->min_feedback_div; 476 uint32_t max_feed_div = pll->max_feedback_div + 1; 477 478 if (pll_in < pll->pll_in_min || pll_in > pll->pll_in_max) 479 continue; 480 481 while (min_feed_div < max_feed_div) { 482 uint32_t vco; 483 uint32_t min_frac_feed_div = min_fractional_feed_div; 484 uint32_t max_frac_feed_div = max_fractional_feed_div + 1; 485 uint32_t frac_feedback_div; 486 uint64_t tmp; 487 488 feedback_div = (min_feed_div + max_feed_div) / 2; 489 490 tmp = (uint64_t)pll->reference_freq * feedback_div; 491 vco = radeon_div(tmp, ref_div); 492 493 if (vco < pll->pll_out_min) { 494 min_feed_div = feedback_div + 1; 495 continue; 496 } else if (vco > pll->pll_out_max) { 497 max_feed_div = feedback_div; 498 continue; 499 } 500 501 while (min_frac_feed_div < max_frac_feed_div) { 502 frac_feedback_div = (min_frac_feed_div + max_frac_feed_div) / 2; 503 tmp = (uint64_t)pll->reference_freq * 10000 * feedback_div; 504 tmp += (uint64_t)pll->reference_freq * 1000 * frac_feedback_div; 505 current_freq = radeon_div(tmp, ref_div * post_div); 506 507 if (flags & RADEON_PLL_PREFER_CLOSEST_LOWER) { 508 error = freq - current_freq; 509 error = error < 0 ? 0xffffffff : error; 510 } else 511 error = abs(current_freq - freq); 512 vco_diff = abs(vco - best_vco); 513 514 if ((best_vco == 0 && error < best_error) || 515 (best_vco != 0 && 516 (error < best_error - 100 || 517 (abs(error - best_error) < 100 && vco_diff < best_vco_diff)))) { 518 best_post_div = post_div; 519 best_ref_div = ref_div; 520 best_feedback_div = feedback_div; 521 best_frac_feedback_div = frac_feedback_div; 522 best_freq = current_freq; 523 best_error = error; 524 best_vco_diff = vco_diff; 525 } else if (current_freq == freq) { 526 if (best_freq == -1) { 527 best_post_div = post_div; 528 best_ref_div = ref_div; 529 best_feedback_div = feedback_div; 530 best_frac_feedback_div = frac_feedback_div; 531 best_freq = current_freq; 532 best_error = error; 533 best_vco_diff = vco_diff; 534 } else if (((flags & RADEON_PLL_PREFER_LOW_REF_DIV) && (ref_div < best_ref_div)) || 535 ((flags & RADEON_PLL_PREFER_HIGH_REF_DIV) && (ref_div > best_ref_div)) || 536 ((flags & RADEON_PLL_PREFER_LOW_FB_DIV) && (feedback_div < best_feedback_div)) || 537 ((flags & RADEON_PLL_PREFER_HIGH_FB_DIV) && (feedback_div > best_feedback_div)) || 538 ((flags & RADEON_PLL_PREFER_LOW_POST_DIV) && (post_div < best_post_div)) || 539 ((flags & RADEON_PLL_PREFER_HIGH_POST_DIV) && (post_div > best_post_div))) { 540 best_post_div = post_div; 541 best_ref_div = ref_div; 542 best_feedback_div = feedback_div; 543 best_frac_feedback_div = frac_feedback_div; 544 best_freq = current_freq; 545 best_error = error; 546 best_vco_diff = vco_diff; 547 } 548 } 549 if (current_freq < freq) 550 min_frac_feed_div = frac_feedback_div + 1; 551 else 552 max_frac_feed_div = frac_feedback_div; 553 } 554 if (current_freq < freq) 555 min_feed_div = feedback_div + 1; 556 else 557 max_feed_div = feedback_div; 558 } 559 } 560 } 561 562 *dot_clock_p = best_freq / 10000; 563 *fb_div_p = best_feedback_div; 564 *frac_fb_div_p = best_frac_feedback_div; 565 *ref_div_p = best_ref_div; 566 *post_div_p = best_post_div; 567 } 568 569 void radeon_compute_pll_avivo(struct radeon_pll *pll, 570 uint64_t freq, 571 uint32_t *dot_clock_p, 572 uint32_t *fb_div_p, 573 uint32_t *frac_fb_div_p, 574 uint32_t *ref_div_p, 575 uint32_t *post_div_p, 576 int flags) 577 { 578 fixed20_12 m, n, frac_n, p, f_vco, f_pclk, best_freq; 579 fixed20_12 pll_out_max, pll_out_min; 580 fixed20_12 pll_in_max, pll_in_min; 581 fixed20_12 reference_freq; 582 fixed20_12 error, ffreq, a, b; 583 584 pll_out_max.full = rfixed_const(pll->pll_out_max); 585 pll_out_min.full = rfixed_const(pll->pll_out_min); 586 pll_in_max.full = rfixed_const(pll->pll_in_max); 587 pll_in_min.full = rfixed_const(pll->pll_in_min); 588 reference_freq.full = rfixed_const(pll->reference_freq); 589 do_div(freq, 10); 590 ffreq.full = rfixed_const(freq); 591 error.full = rfixed_const(100 * 100); 592 593 /* max p */ 594 p.full = rfixed_div(pll_out_max, ffreq); 595 p.full = rfixed_floor(p); 596 597 /* min m */ 598 m.full = rfixed_div(reference_freq, pll_in_max); 599 m.full = rfixed_ceil(m); 600 601 while (1) { 602 n.full = rfixed_div(ffreq, reference_freq); 603 n.full = rfixed_mul(n, m); 604 n.full = rfixed_mul(n, p); 605 606 f_vco.full = rfixed_div(n, m); 607 f_vco.full = rfixed_mul(f_vco, reference_freq); 608 609 f_pclk.full = rfixed_div(f_vco, p); 610 611 if (f_pclk.full > ffreq.full) 612 error.full = f_pclk.full - ffreq.full; 613 else 614 error.full = ffreq.full - f_pclk.full; 615 error.full = rfixed_div(error, f_pclk); 616 a.full = rfixed_const(100 * 100); 617 error.full = rfixed_mul(error, a); 618 619 a.full = rfixed_mul(m, p); 620 a.full = rfixed_div(n, a); 621 best_freq.full = rfixed_mul(reference_freq, a); 622 623 if (rfixed_trunc(error) < 25) 624 break; 625 626 a.full = rfixed_const(1); 627 m.full = m.full + a.full; 628 a.full = rfixed_div(reference_freq, m); 629 if (a.full >= pll_in_min.full) 630 continue; 631 632 m.full = rfixed_div(reference_freq, pll_in_max); 633 m.full = rfixed_ceil(m); 634 a.full= rfixed_const(1); 635 p.full = p.full - a.full; 636 a.full = rfixed_mul(p, ffreq); 637 if (a.full >= pll_out_min.full) 638 continue; 639 else { 640 DRM_ERROR("Unable to find pll dividers\n"); 641 break; 642 } 643 } 644 645 a.full = rfixed_const(10); 646 b.full = rfixed_mul(n, a); 647 648 frac_n.full = rfixed_floor(n); 649 frac_n.full = rfixed_mul(frac_n, a); 650 frac_n.full = b.full - frac_n.full; 651 652 *dot_clock_p = rfixed_trunc(best_freq); 653 *fb_div_p = rfixed_trunc(n); 654 *frac_fb_div_p = rfixed_trunc(frac_n); 655 *ref_div_p = rfixed_trunc(m); 656 *post_div_p = rfixed_trunc(p); 657 658 DRM_DEBUG("%u %d.%d, %d, %d\n", *dot_clock_p * 10, *fb_div_p, *frac_fb_div_p, *ref_div_p, *post_div_p); 659 } 660 661 static void radeon_user_framebuffer_destroy(struct drm_framebuffer *fb) 662 { 663 struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb); 664 struct drm_device *dev = fb->dev; 665 666 if (fb->fbdev) 667 radeonfb_remove(dev, fb); 668 669 if (radeon_fb->obj) { 670 radeon_gem_object_unpin(radeon_fb->obj); 671 mutex_lock(&dev->struct_mutex); 672 drm_gem_object_unreference(radeon_fb->obj); 673 mutex_unlock(&dev->struct_mutex); 674 } 675 drm_framebuffer_cleanup(fb); 676 kfree(radeon_fb); 677 } 678 679 static int radeon_user_framebuffer_create_handle(struct drm_framebuffer *fb, 680 struct drm_file *file_priv, 681 unsigned int *handle) 682 { 683 struct radeon_framebuffer *radeon_fb = to_radeon_framebuffer(fb); 684 685 return drm_gem_handle_create(file_priv, radeon_fb->obj, handle); 686 } 687 688 static const struct drm_framebuffer_funcs radeon_fb_funcs = { 689 .destroy = radeon_user_framebuffer_destroy, 690 .create_handle = radeon_user_framebuffer_create_handle, 691 }; 692 693 struct drm_framebuffer * 694 radeon_framebuffer_create(struct drm_device *dev, 695 struct drm_mode_fb_cmd *mode_cmd, 696 struct drm_gem_object *obj) 697 { 698 struct radeon_framebuffer *radeon_fb; 699 700 radeon_fb = kzalloc(sizeof(*radeon_fb), GFP_KERNEL); 701 if (radeon_fb == NULL) { 702 return NULL; 703 } 704 drm_framebuffer_init(dev, &radeon_fb->base, &radeon_fb_funcs); 705 drm_helper_mode_fill_fb_struct(&radeon_fb->base, mode_cmd); 706 radeon_fb->obj = obj; 707 return &radeon_fb->base; 708 } 709 710 static struct drm_framebuffer * 711 radeon_user_framebuffer_create(struct drm_device *dev, 712 struct drm_file *file_priv, 713 struct drm_mode_fb_cmd *mode_cmd) 714 { 715 struct drm_gem_object *obj; 716 717 obj = drm_gem_object_lookup(dev, file_priv, mode_cmd->handle); 718 719 return radeon_framebuffer_create(dev, mode_cmd, obj); 720 } 721 722 static const struct drm_mode_config_funcs radeon_mode_funcs = { 723 .fb_create = radeon_user_framebuffer_create, 724 .fb_changed = radeonfb_probe, 725 }; 726 727 struct drm_prop_enum_list { 728 int type; 729 char *name; 730 }; 731 732 static struct drm_prop_enum_list radeon_tmds_pll_enum_list[] = 733 { { 0, "driver" }, 734 { 1, "bios" }, 735 }; 736 737 static struct drm_prop_enum_list radeon_tv_std_enum_list[] = 738 { { TV_STD_NTSC, "ntsc" }, 739 { TV_STD_PAL, "pal" }, 740 { TV_STD_PAL_M, "pal-m" }, 741 { TV_STD_PAL_60, "pal-60" }, 742 { TV_STD_NTSC_J, "ntsc-j" }, 743 { TV_STD_SCART_PAL, "scart-pal" }, 744 { TV_STD_PAL_CN, "pal-cn" }, 745 { TV_STD_SECAM, "secam" }, 746 }; 747 748 static int radeon_modeset_create_props(struct radeon_device *rdev) 749 { 750 int i, sz; 751 752 if (rdev->is_atom_bios) { 753 rdev->mode_info.coherent_mode_property = 754 drm_property_create(rdev->ddev, 755 DRM_MODE_PROP_RANGE, 756 "coherent", 2); 757 if (!rdev->mode_info.coherent_mode_property) 758 return -ENOMEM; 759 760 rdev->mode_info.coherent_mode_property->values[0] = 0; 761 rdev->mode_info.coherent_mode_property->values[1] = 1; 762 } 763 764 if (!ASIC_IS_AVIVO(rdev)) { 765 sz = ARRAY_SIZE(radeon_tmds_pll_enum_list); 766 rdev->mode_info.tmds_pll_property = 767 drm_property_create(rdev->ddev, 768 DRM_MODE_PROP_ENUM, 769 "tmds_pll", sz); 770 for (i = 0; i < sz; i++) { 771 drm_property_add_enum(rdev->mode_info.tmds_pll_property, 772 i, 773 radeon_tmds_pll_enum_list[i].type, 774 radeon_tmds_pll_enum_list[i].name); 775 } 776 } 777 778 rdev->mode_info.load_detect_property = 779 drm_property_create(rdev->ddev, 780 DRM_MODE_PROP_RANGE, 781 "load detection", 2); 782 if (!rdev->mode_info.load_detect_property) 783 return -ENOMEM; 784 rdev->mode_info.load_detect_property->values[0] = 0; 785 rdev->mode_info.load_detect_property->values[1] = 1; 786 787 drm_mode_create_scaling_mode_property(rdev->ddev); 788 789 sz = ARRAY_SIZE(radeon_tv_std_enum_list); 790 rdev->mode_info.tv_std_property = 791 drm_property_create(rdev->ddev, 792 DRM_MODE_PROP_ENUM, 793 "tv standard", sz); 794 for (i = 0; i < sz; i++) { 795 drm_property_add_enum(rdev->mode_info.tv_std_property, 796 i, 797 radeon_tv_std_enum_list[i].type, 798 radeon_tv_std_enum_list[i].name); 799 } 800 801 return 0; 802 } 803 804 int radeon_modeset_init(struct radeon_device *rdev) 805 { 806 int num_crtc = 2, i; 807 int ret; 808 809 drm_mode_config_init(rdev->ddev); 810 rdev->mode_info.mode_config_initialized = true; 811 812 rdev->ddev->mode_config.funcs = (void *)&radeon_mode_funcs; 813 814 if (ASIC_IS_AVIVO(rdev)) { 815 rdev->ddev->mode_config.max_width = 8192; 816 rdev->ddev->mode_config.max_height = 8192; 817 } else { 818 rdev->ddev->mode_config.max_width = 4096; 819 rdev->ddev->mode_config.max_height = 4096; 820 } 821 822 rdev->ddev->mode_config.fb_base = rdev->mc.aper_base; 823 824 ret = radeon_modeset_create_props(rdev); 825 if (ret) { 826 return ret; 827 } 828 829 if (rdev->flags & RADEON_SINGLE_CRTC) 830 num_crtc = 1; 831 832 /* allocate crtcs */ 833 for (i = 0; i < num_crtc; i++) { 834 radeon_crtc_init(rdev->ddev, i); 835 } 836 837 /* okay we should have all the bios connectors */ 838 ret = radeon_setup_enc_conn(rdev->ddev); 839 if (!ret) { 840 return ret; 841 } 842 /* initialize hpd */ 843 radeon_hpd_init(rdev); 844 drm_helper_initial_config(rdev->ddev); 845 return 0; 846 } 847 848 void radeon_modeset_fini(struct radeon_device *rdev) 849 { 850 if (rdev->mode_info.mode_config_initialized) { 851 radeon_hpd_fini(rdev); 852 drm_mode_config_cleanup(rdev->ddev); 853 rdev->mode_info.mode_config_initialized = false; 854 } 855 } 856 857 bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc, 858 struct drm_display_mode *mode, 859 struct drm_display_mode *adjusted_mode) 860 { 861 struct drm_device *dev = crtc->dev; 862 struct drm_encoder *encoder; 863 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 864 struct radeon_encoder *radeon_encoder; 865 bool first = true; 866 867 list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) { 868 radeon_encoder = to_radeon_encoder(encoder); 869 if (encoder->crtc != crtc) 870 continue; 871 if (first) { 872 /* set scaling */ 873 if (radeon_encoder->rmx_type == RMX_OFF) 874 radeon_crtc->rmx_type = RMX_OFF; 875 else if (mode->hdisplay < radeon_encoder->native_mode.hdisplay || 876 mode->vdisplay < radeon_encoder->native_mode.vdisplay) 877 radeon_crtc->rmx_type = radeon_encoder->rmx_type; 878 else 879 radeon_crtc->rmx_type = RMX_OFF; 880 /* copy native mode */ 881 memcpy(&radeon_crtc->native_mode, 882 &radeon_encoder->native_mode, 883 sizeof(struct drm_display_mode)); 884 first = false; 885 } else { 886 if (radeon_crtc->rmx_type != radeon_encoder->rmx_type) { 887 /* WARNING: Right now this can't happen but 888 * in the future we need to check that scaling 889 * are consistent accross different encoder 890 * (ie all encoder can work with the same 891 * scaling). 892 */ 893 DRM_ERROR("Scaling not consistent accross encoder.\n"); 894 return false; 895 } 896 } 897 } 898 if (radeon_crtc->rmx_type != RMX_OFF) { 899 fixed20_12 a, b; 900 a.full = rfixed_const(crtc->mode.vdisplay); 901 b.full = rfixed_const(radeon_crtc->native_mode.hdisplay); 902 radeon_crtc->vsc.full = rfixed_div(a, b); 903 a.full = rfixed_const(crtc->mode.hdisplay); 904 b.full = rfixed_const(radeon_crtc->native_mode.vdisplay); 905 radeon_crtc->hsc.full = rfixed_div(a, b); 906 } else { 907 radeon_crtc->vsc.full = rfixed_const(1); 908 radeon_crtc->hsc.full = rfixed_const(1); 909 } 910 return true; 911 } 912