1 /* 2 * Copyright 2007-8 Advanced Micro Devices, Inc. 3 * Copyright 2008 Red Hat Inc. 4 * 5 * Permission is hereby granted, free of charge, to any person obtaining a 6 * copy of this software and associated documentation files (the "Software"), 7 * to deal in the Software without restriction, including without limitation 8 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 9 * and/or sell copies of the Software, and to permit persons to whom the 10 * Software is furnished to do so, subject to the following conditions: 11 * 12 * The above copyright notice and this permission notice shall be included in 13 * all copies or substantial portions of the Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR 19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, 20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 21 * OTHER DEALINGS IN THE SOFTWARE. 22 * 23 * Authors: Dave Airlie 24 * Alex Deucher 25 */ 26 #include <drm/drmP.h> 27 #include <drm/radeon_drm.h> 28 #include "radeon.h" 29 30 static void radeon_lock_cursor(struct drm_crtc *crtc, bool lock) 31 { 32 struct radeon_device *rdev = crtc->dev->dev_private; 33 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 34 uint32_t cur_lock; 35 36 if (ASIC_IS_DCE4(rdev)) { 37 cur_lock = RREG32(EVERGREEN_CUR_UPDATE + radeon_crtc->crtc_offset); 38 if (lock) 39 cur_lock |= EVERGREEN_CURSOR_UPDATE_LOCK; 40 else 41 cur_lock &= ~EVERGREEN_CURSOR_UPDATE_LOCK; 42 WREG32(EVERGREEN_CUR_UPDATE + radeon_crtc->crtc_offset, cur_lock); 43 } else if (ASIC_IS_AVIVO(rdev)) { 44 cur_lock = RREG32(AVIVO_D1CUR_UPDATE + radeon_crtc->crtc_offset); 45 if (lock) 46 cur_lock |= AVIVO_D1CURSOR_UPDATE_LOCK; 47 else 48 cur_lock &= ~AVIVO_D1CURSOR_UPDATE_LOCK; 49 WREG32(AVIVO_D1CUR_UPDATE + radeon_crtc->crtc_offset, cur_lock); 50 } else { 51 cur_lock = RREG32(RADEON_CUR_OFFSET + radeon_crtc->crtc_offset); 52 if (lock) 53 cur_lock |= RADEON_CUR_LOCK; 54 else 55 cur_lock &= ~RADEON_CUR_LOCK; 56 WREG32(RADEON_CUR_OFFSET + radeon_crtc->crtc_offset, cur_lock); 57 } 58 } 59 60 static void radeon_hide_cursor(struct drm_crtc *crtc) 61 { 62 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 63 struct radeon_device *rdev = crtc->dev->dev_private; 64 65 if (ASIC_IS_DCE4(rdev)) { 66 WREG32_IDX(EVERGREEN_CUR_CONTROL + radeon_crtc->crtc_offset, 67 EVERGREEN_CURSOR_MODE(EVERGREEN_CURSOR_24_8_PRE_MULT) | 68 EVERGREEN_CURSOR_URGENT_CONTROL(EVERGREEN_CURSOR_URGENT_1_2)); 69 } else if (ASIC_IS_AVIVO(rdev)) { 70 WREG32_IDX(AVIVO_D1CUR_CONTROL + radeon_crtc->crtc_offset, 71 (AVIVO_D1CURSOR_MODE_24BPP << AVIVO_D1CURSOR_MODE_SHIFT)); 72 } else { 73 u32 reg; 74 switch (radeon_crtc->crtc_id) { 75 case 0: 76 reg = RADEON_CRTC_GEN_CNTL; 77 break; 78 case 1: 79 reg = RADEON_CRTC2_GEN_CNTL; 80 break; 81 default: 82 return; 83 } 84 WREG32_IDX(reg, RREG32_IDX(reg) & ~RADEON_CRTC_CUR_EN); 85 } 86 } 87 88 static void radeon_show_cursor(struct drm_crtc *crtc) 89 { 90 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 91 struct radeon_device *rdev = crtc->dev->dev_private; 92 93 if (ASIC_IS_DCE4(rdev)) { 94 WREG32(RADEON_MM_INDEX, EVERGREEN_CUR_CONTROL + radeon_crtc->crtc_offset); 95 WREG32(RADEON_MM_DATA, EVERGREEN_CURSOR_EN | 96 EVERGREEN_CURSOR_MODE(EVERGREEN_CURSOR_24_8_PRE_MULT) | 97 EVERGREEN_CURSOR_URGENT_CONTROL(EVERGREEN_CURSOR_URGENT_1_2)); 98 } else if (ASIC_IS_AVIVO(rdev)) { 99 WREG32(RADEON_MM_INDEX, AVIVO_D1CUR_CONTROL + radeon_crtc->crtc_offset); 100 WREG32(RADEON_MM_DATA, AVIVO_D1CURSOR_EN | 101 (AVIVO_D1CURSOR_MODE_24BPP << AVIVO_D1CURSOR_MODE_SHIFT)); 102 } else { 103 switch (radeon_crtc->crtc_id) { 104 case 0: 105 WREG32(RADEON_MM_INDEX, RADEON_CRTC_GEN_CNTL); 106 break; 107 case 1: 108 WREG32(RADEON_MM_INDEX, RADEON_CRTC2_GEN_CNTL); 109 break; 110 default: 111 return; 112 } 113 114 WREG32_P(RADEON_MM_DATA, (RADEON_CRTC_CUR_EN | 115 (RADEON_CRTC_CUR_MODE_24BPP << RADEON_CRTC_CUR_MODE_SHIFT)), 116 ~(RADEON_CRTC_CUR_EN | RADEON_CRTC_CUR_MODE_MASK)); 117 } 118 } 119 120 static int radeon_cursor_move_locked(struct drm_crtc *crtc, int x, int y) 121 { 122 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 123 struct radeon_device *rdev = crtc->dev->dev_private; 124 int xorigin = 0, yorigin = 0; 125 int w = radeon_crtc->cursor_width; 126 127 if (ASIC_IS_AVIVO(rdev)) { 128 /* avivo cursor are offset into the total surface */ 129 x += crtc->x; 130 y += crtc->y; 131 } 132 DRM_DEBUG("x %d y %d c->x %d c->y %d\n", x, y, crtc->x, crtc->y); 133 134 if (x < 0) { 135 xorigin = min(-x, radeon_crtc->max_cursor_width - 1); 136 x = 0; 137 } 138 if (y < 0) { 139 yorigin = min(-y, radeon_crtc->max_cursor_height - 1); 140 y = 0; 141 } 142 143 /* fixed on DCE6 and newer */ 144 if (ASIC_IS_AVIVO(rdev) && !ASIC_IS_DCE6(rdev)) { 145 int i = 0; 146 struct drm_crtc *crtc_p; 147 148 /* 149 * avivo cursor image can't end on 128 pixel boundary or 150 * go past the end of the frame if both crtcs are enabled 151 * 152 * NOTE: It is safe to access crtc->enabled of other crtcs 153 * without holding either the mode_config lock or the other 154 * crtc's lock as long as write access to this flag _always_ 155 * grabs all locks. 156 */ 157 list_for_each_entry(crtc_p, &crtc->dev->mode_config.crtc_list, head) { 158 if (crtc_p->enabled) 159 i++; 160 } 161 if (i > 1) { 162 int cursor_end, frame_end; 163 164 cursor_end = x - xorigin + w; 165 frame_end = crtc->x + crtc->mode.crtc_hdisplay; 166 if (cursor_end >= frame_end) { 167 w = w - (cursor_end - frame_end); 168 if (!(frame_end & 0x7f)) 169 w--; 170 } else { 171 if (!(cursor_end & 0x7f)) 172 w--; 173 } 174 if (w <= 0) { 175 w = 1; 176 cursor_end = x - xorigin + w; 177 if (!(cursor_end & 0x7f)) { 178 x--; 179 WARN_ON_ONCE(x < 0); 180 } 181 } 182 } 183 } 184 185 if (ASIC_IS_DCE4(rdev)) { 186 WREG32(EVERGREEN_CUR_POSITION + radeon_crtc->crtc_offset, (x << 16) | y); 187 WREG32(EVERGREEN_CUR_HOT_SPOT + radeon_crtc->crtc_offset, (xorigin << 16) | yorigin); 188 WREG32(EVERGREEN_CUR_SIZE + radeon_crtc->crtc_offset, 189 ((w - 1) << 16) | (radeon_crtc->cursor_height - 1)); 190 } else if (ASIC_IS_AVIVO(rdev)) { 191 WREG32(AVIVO_D1CUR_POSITION + radeon_crtc->crtc_offset, (x << 16) | y); 192 WREG32(AVIVO_D1CUR_HOT_SPOT + radeon_crtc->crtc_offset, (xorigin << 16) | yorigin); 193 WREG32(AVIVO_D1CUR_SIZE + radeon_crtc->crtc_offset, 194 ((w - 1) << 16) | (radeon_crtc->cursor_height - 1)); 195 } else { 196 if (crtc->mode.flags & DRM_MODE_FLAG_DBLSCAN) 197 y *= 2; 198 199 WREG32(RADEON_CUR_HORZ_VERT_OFF + radeon_crtc->crtc_offset, 200 (RADEON_CUR_LOCK 201 | (xorigin << 16) 202 | yorigin)); 203 WREG32(RADEON_CUR_HORZ_VERT_POSN + radeon_crtc->crtc_offset, 204 (RADEON_CUR_LOCK 205 | (x << 16) 206 | y)); 207 /* offset is from DISP(2)_BASE_ADDRESS */ 208 WREG32(RADEON_CUR_OFFSET + radeon_crtc->crtc_offset, (radeon_crtc->legacy_cursor_offset + 209 (yorigin * 256))); 210 } 211 212 radeon_crtc->cursor_x = x; 213 radeon_crtc->cursor_y = y; 214 215 return 0; 216 } 217 218 int radeon_crtc_cursor_move(struct drm_crtc *crtc, 219 int x, int y) 220 { 221 int ret; 222 223 radeon_lock_cursor(crtc, true); 224 ret = radeon_cursor_move_locked(crtc, x, y); 225 radeon_lock_cursor(crtc, false); 226 227 return ret; 228 } 229 230 static int radeon_set_cursor(struct drm_crtc *crtc, struct drm_gem_object *obj) 231 { 232 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 233 struct radeon_device *rdev = crtc->dev->dev_private; 234 struct radeon_bo *robj = gem_to_radeon_bo(obj); 235 uint64_t gpu_addr; 236 int ret; 237 238 ret = radeon_bo_reserve(robj, false); 239 if (unlikely(ret != 0)) 240 goto fail; 241 /* Only 27 bit offset for legacy cursor */ 242 ret = radeon_bo_pin_restricted(robj, RADEON_GEM_DOMAIN_VRAM, 243 ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27, 244 &gpu_addr); 245 radeon_bo_unreserve(robj); 246 if (ret) 247 goto fail; 248 249 if (ASIC_IS_DCE4(rdev)) { 250 WREG32(EVERGREEN_CUR_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset, 251 upper_32_bits(gpu_addr)); 252 WREG32(EVERGREEN_CUR_SURFACE_ADDRESS + radeon_crtc->crtc_offset, 253 gpu_addr & 0xffffffff); 254 } else if (ASIC_IS_AVIVO(rdev)) { 255 if (rdev->family >= CHIP_RV770) { 256 if (radeon_crtc->crtc_id) 257 WREG32(R700_D2CUR_SURFACE_ADDRESS_HIGH, upper_32_bits(gpu_addr)); 258 else 259 WREG32(R700_D1CUR_SURFACE_ADDRESS_HIGH, upper_32_bits(gpu_addr)); 260 } 261 WREG32(AVIVO_D1CUR_SURFACE_ADDRESS + radeon_crtc->crtc_offset, 262 gpu_addr & 0xffffffff); 263 } else { 264 radeon_crtc->legacy_cursor_offset = gpu_addr - radeon_crtc->legacy_display_base_addr; 265 /* offset is from DISP(2)_BASE_ADDRESS */ 266 WREG32(RADEON_CUR_OFFSET + radeon_crtc->crtc_offset, radeon_crtc->legacy_cursor_offset); 267 } 268 269 return 0; 270 271 fail: 272 drm_gem_object_unreference_unlocked(obj); 273 274 return ret; 275 } 276 277 int radeon_crtc_cursor_set2(struct drm_crtc *crtc, 278 struct drm_file *file_priv, 279 uint32_t handle, 280 uint32_t width, 281 uint32_t height, 282 int32_t hot_x, 283 int32_t hot_y) 284 { 285 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 286 struct drm_gem_object *obj; 287 int ret; 288 289 if (!handle) { 290 /* turn off cursor */ 291 radeon_hide_cursor(crtc); 292 obj = NULL; 293 goto unpin; 294 } 295 296 if ((width > radeon_crtc->max_cursor_width) || 297 (height > radeon_crtc->max_cursor_height)) { 298 DRM_ERROR("bad cursor width or height %d x %d\n", width, height); 299 return -EINVAL; 300 } 301 302 obj = drm_gem_object_lookup(crtc->dev, file_priv, handle); 303 if (!obj) { 304 DRM_ERROR("Cannot find cursor object %x for crtc %d\n", handle, radeon_crtc->crtc_id); 305 return -ENOENT; 306 } 307 308 radeon_crtc->cursor_width = width; 309 radeon_crtc->cursor_height = height; 310 311 radeon_lock_cursor(crtc, true); 312 313 if (hot_x != radeon_crtc->cursor_hot_x || 314 hot_y != radeon_crtc->cursor_hot_y) { 315 int x, y; 316 317 x = radeon_crtc->cursor_x + radeon_crtc->cursor_hot_x - hot_x; 318 y = radeon_crtc->cursor_y + radeon_crtc->cursor_hot_y - hot_y; 319 320 radeon_cursor_move_locked(crtc, x, y); 321 322 radeon_crtc->cursor_hot_x = hot_x; 323 radeon_crtc->cursor_hot_y = hot_y; 324 } 325 326 ret = radeon_set_cursor(crtc, obj); 327 328 if (ret) 329 DRM_ERROR("radeon_set_cursor returned %d, not changing cursor\n", 330 ret); 331 else 332 radeon_show_cursor(crtc); 333 334 radeon_lock_cursor(crtc, false); 335 336 unpin: 337 if (radeon_crtc->cursor_bo) { 338 struct radeon_bo *robj = gem_to_radeon_bo(radeon_crtc->cursor_bo); 339 ret = radeon_bo_reserve(robj, false); 340 if (likely(ret == 0)) { 341 radeon_bo_unpin(robj); 342 radeon_bo_unreserve(robj); 343 } 344 if (radeon_crtc->cursor_bo != obj) 345 drm_gem_object_unreference_unlocked(radeon_crtc->cursor_bo); 346 } 347 348 radeon_crtc->cursor_bo = obj; 349 return 0; 350 } 351 352 /** 353 * radeon_cursor_reset - Re-set the current cursor, if any. 354 * 355 * @crtc: drm crtc 356 * 357 * If the CRTC passed in currently has a cursor assigned, this function 358 * makes sure it's visible. 359 */ 360 void radeon_cursor_reset(struct drm_crtc *crtc) 361 { 362 struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc); 363 int ret; 364 365 if (radeon_crtc->cursor_bo) { 366 radeon_lock_cursor(crtc, true); 367 368 radeon_cursor_move_locked(crtc, radeon_crtc->cursor_x, 369 radeon_crtc->cursor_y); 370 371 ret = radeon_set_cursor(crtc, radeon_crtc->cursor_bo); 372 if (ret) 373 DRM_ERROR("radeon_set_cursor returned %d, not showing " 374 "cursor\n", ret); 375 else 376 radeon_show_cursor(crtc); 377 378 radeon_lock_cursor(crtc, false); 379 } 380 } 381