1 // SPDX-License-Identifier: GPL-2.0-or-later 2 3 #include <linux/dma-resv.h> 4 #include <linux/dma-fence-chain.h> 5 6 #include <drm/drm_atomic_state_helper.h> 7 #include <drm/drm_atomic_uapi.h> 8 #include <drm/drm_framebuffer.h> 9 #include <drm/drm_gem.h> 10 #include <drm/drm_gem_atomic_helper.h> 11 #include <drm/drm_gem_framebuffer_helper.h> 12 #include <drm/drm_simple_kms_helper.h> 13 14 #include "drm_internal.h" 15 16 /** 17 * DOC: overview 18 * 19 * The GEM atomic helpers library implements generic atomic-commit 20 * functions for drivers that use GEM objects. Currently, it provides 21 * synchronization helpers, and plane state and framebuffer BO mappings 22 * for planes with shadow buffers. 23 * 24 * Before scanout, a plane's framebuffer needs to be synchronized with 25 * possible writers that draw into the framebuffer. All drivers should 26 * call drm_gem_plane_helper_prepare_fb() from their implementation of 27 * struct &drm_plane_helper.prepare_fb . It sets the plane's fence from 28 * the framebuffer so that the DRM core can synchronize access automatically. 29 * drm_gem_plane_helper_prepare_fb() can also be used directly as 30 * implementation of prepare_fb. 31 * 32 * .. code-block:: c 33 * 34 * #include <drm/drm_gem_atomic_helper.h> 35 * 36 * struct drm_plane_helper_funcs driver_plane_helper_funcs = { 37 * ..., 38 * . prepare_fb = drm_gem_plane_helper_prepare_fb, 39 * }; 40 * 41 * A driver using a shadow buffer copies the content of the shadow buffers 42 * into the HW's framebuffer memory during an atomic update. This requires 43 * a mapping of the shadow buffer into kernel address space. The mappings 44 * cannot be established by commit-tail functions, such as atomic_update, 45 * as this would violate locking rules around dma_buf_vmap(). 46 * 47 * The helpers for shadow-buffered planes establish and release mappings, 48 * and provide struct drm_shadow_plane_state, which stores the plane's mapping 49 * for commit-tail functions. 50 * 51 * Shadow-buffered planes can easily be enabled by using the provided macros 52 * %DRM_GEM_SHADOW_PLANE_FUNCS and %DRM_GEM_SHADOW_PLANE_HELPER_FUNCS. 53 * These macros set up the plane and plane-helper callbacks to point to the 54 * shadow-buffer helpers. 55 * 56 * .. code-block:: c 57 * 58 * #include <drm/drm_gem_atomic_helper.h> 59 * 60 * struct drm_plane_funcs driver_plane_funcs = { 61 * ..., 62 * DRM_GEM_SHADOW_PLANE_FUNCS, 63 * }; 64 * 65 * struct drm_plane_helper_funcs driver_plane_helper_funcs = { 66 * ..., 67 * DRM_GEM_SHADOW_PLANE_HELPER_FUNCS, 68 * }; 69 * 70 * In the driver's atomic-update function, shadow-buffer mappings are available 71 * from the plane state. Use to_drm_shadow_plane_state() to upcast from 72 * struct drm_plane_state. 73 * 74 * .. code-block:: c 75 * 76 * void driver_plane_atomic_update(struct drm_plane *plane, 77 * struct drm_plane_state *old_plane_state) 78 * { 79 * struct drm_plane_state *plane_state = plane->state; 80 * struct drm_shadow_plane_state *shadow_plane_state = 81 * to_drm_shadow_plane_state(plane_state); 82 * 83 * // access shadow buffer via shadow_plane_state->map 84 * } 85 * 86 * A mapping address for each of the framebuffer's buffer object is stored in 87 * struct &drm_shadow_plane_state.map. The mappings are valid while the state 88 * is being used. 89 * 90 * Drivers that use struct drm_simple_display_pipe can use 91 * %DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS to initialize the rsp 92 * callbacks. Access to shadow-buffer mappings is similar to regular 93 * atomic_update. 94 * 95 * .. code-block:: c 96 * 97 * struct drm_simple_display_pipe_funcs driver_pipe_funcs = { 98 * ..., 99 * DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS, 100 * }; 101 * 102 * void driver_pipe_enable(struct drm_simple_display_pipe *pipe, 103 * struct drm_crtc_state *crtc_state, 104 * struct drm_plane_state *plane_state) 105 * { 106 * struct drm_shadow_plane_state *shadow_plane_state = 107 * to_drm_shadow_plane_state(plane_state); 108 * 109 * // access shadow buffer via shadow_plane_state->map 110 * } 111 */ 112 113 /* 114 * Plane Helpers 115 */ 116 117 /** 118 * drm_gem_plane_helper_prepare_fb() - Prepare a GEM backed framebuffer 119 * @plane: Plane 120 * @state: Plane state the fence will be attached to 121 * 122 * This function extracts the exclusive fence from &drm_gem_object.resv and 123 * attaches it to plane state for the atomic helper to wait on. This is 124 * necessary to correctly implement implicit synchronization for any buffers 125 * shared as a struct &dma_buf. This function can be used as the 126 * &drm_plane_helper_funcs.prepare_fb callback. 127 * 128 * There is no need for &drm_plane_helper_funcs.cleanup_fb hook for simple 129 * GEM based framebuffer drivers which have their buffers always pinned in 130 * memory. 131 * 132 * This function is the default implementation for GEM drivers of 133 * &drm_plane_helper_funcs.prepare_fb if no callback is provided. 134 */ 135 int drm_gem_plane_helper_prepare_fb(struct drm_plane *plane, 136 struct drm_plane_state *state) 137 { 138 struct dma_fence *fence = dma_fence_get(state->fence); 139 enum dma_resv_usage usage; 140 size_t i; 141 int ret; 142 143 if (!state->fb) 144 return 0; 145 146 /* 147 * Only add the kernel fences here if there is already a fence set via 148 * explicit fencing interfaces on the atomic ioctl. 149 * 150 * This way explicit fencing can be used to overrule implicit fencing, 151 * which is important to make explicit fencing use-cases work: One 152 * example is using one buffer for 2 screens with different refresh 153 * rates. Implicit fencing will clamp rendering to the refresh rate of 154 * the slower screen, whereas explicit fence allows 2 independent 155 * render and display loops on a single buffer. If a driver allows 156 * obeys both implicit and explicit fences for plane updates, then it 157 * will break all the benefits of explicit fencing. 158 */ 159 usage = fence ? DMA_RESV_USAGE_KERNEL : DMA_RESV_USAGE_WRITE; 160 161 for (i = 0; i < state->fb->format->num_planes; ++i) { 162 struct drm_gem_object *obj = drm_gem_fb_get_obj(state->fb, i); 163 struct dma_fence *new; 164 165 if (!obj) { 166 ret = -EINVAL; 167 goto error; 168 } 169 170 ret = dma_resv_get_singleton(obj->resv, usage, &new); 171 if (ret) 172 goto error; 173 174 if (new && fence) { 175 struct dma_fence_chain *chain = dma_fence_chain_alloc(); 176 177 if (!chain) { 178 ret = -ENOMEM; 179 goto error; 180 } 181 182 dma_fence_chain_init(chain, fence, new, 1); 183 fence = &chain->base; 184 185 } else if (new) { 186 fence = new; 187 } 188 } 189 190 dma_fence_put(state->fence); 191 state->fence = fence; 192 return 0; 193 194 error: 195 dma_fence_put(fence); 196 return ret; 197 } 198 EXPORT_SYMBOL_GPL(drm_gem_plane_helper_prepare_fb); 199 200 /* 201 * Shadow-buffered Planes 202 */ 203 204 /** 205 * __drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state 206 * @plane: the plane 207 * @new_shadow_plane_state: the new shadow-buffered plane state 208 * 209 * This function duplicates shadow-buffered plane state. This is helpful for drivers 210 * that subclass struct drm_shadow_plane_state. 211 * 212 * The function does not duplicate existing mappings of the shadow buffers. 213 * Mappings are maintained during the atomic commit by the plane's prepare_fb 214 * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb() 215 * for corresponding helpers. 216 */ 217 void 218 __drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane, 219 struct drm_shadow_plane_state *new_shadow_plane_state) 220 { 221 __drm_atomic_helper_plane_duplicate_state(plane, &new_shadow_plane_state->base); 222 } 223 EXPORT_SYMBOL(__drm_gem_duplicate_shadow_plane_state); 224 225 /** 226 * drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state 227 * @plane: the plane 228 * 229 * This function implements struct &drm_plane_funcs.atomic_duplicate_state for 230 * shadow-buffered planes. It assumes the existing state to be of type 231 * struct drm_shadow_plane_state and it allocates the new state to be of this 232 * type. 233 * 234 * The function does not duplicate existing mappings of the shadow buffers. 235 * Mappings are maintained during the atomic commit by the plane's prepare_fb 236 * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb() 237 * for corresponding helpers. 238 * 239 * Returns: 240 * A pointer to a new plane state on success, or NULL otherwise. 241 */ 242 struct drm_plane_state * 243 drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane) 244 { 245 struct drm_plane_state *plane_state = plane->state; 246 struct drm_shadow_plane_state *new_shadow_plane_state; 247 248 if (!plane_state) 249 return NULL; 250 251 new_shadow_plane_state = kzalloc(sizeof(*new_shadow_plane_state), GFP_KERNEL); 252 if (!new_shadow_plane_state) 253 return NULL; 254 __drm_gem_duplicate_shadow_plane_state(plane, new_shadow_plane_state); 255 256 return &new_shadow_plane_state->base; 257 } 258 EXPORT_SYMBOL(drm_gem_duplicate_shadow_plane_state); 259 260 /** 261 * __drm_gem_destroy_shadow_plane_state - cleans up shadow-buffered plane state 262 * @shadow_plane_state: the shadow-buffered plane state 263 * 264 * This function cleans up shadow-buffered plane state. Helpful for drivers that 265 * subclass struct drm_shadow_plane_state. 266 */ 267 void __drm_gem_destroy_shadow_plane_state(struct drm_shadow_plane_state *shadow_plane_state) 268 { 269 __drm_atomic_helper_plane_destroy_state(&shadow_plane_state->base); 270 } 271 EXPORT_SYMBOL(__drm_gem_destroy_shadow_plane_state); 272 273 /** 274 * drm_gem_destroy_shadow_plane_state - deletes shadow-buffered plane state 275 * @plane: the plane 276 * @plane_state: the plane state of type struct drm_shadow_plane_state 277 * 278 * This function implements struct &drm_plane_funcs.atomic_destroy_state 279 * for shadow-buffered planes. It expects that mappings of shadow buffers 280 * have been released already. 281 */ 282 void drm_gem_destroy_shadow_plane_state(struct drm_plane *plane, 283 struct drm_plane_state *plane_state) 284 { 285 struct drm_shadow_plane_state *shadow_plane_state = 286 to_drm_shadow_plane_state(plane_state); 287 288 __drm_gem_destroy_shadow_plane_state(shadow_plane_state); 289 kfree(shadow_plane_state); 290 } 291 EXPORT_SYMBOL(drm_gem_destroy_shadow_plane_state); 292 293 /** 294 * __drm_gem_reset_shadow_plane - resets a shadow-buffered plane 295 * @plane: the plane 296 * @shadow_plane_state: the shadow-buffered plane state 297 * 298 * This function resets state for shadow-buffered planes. Helpful 299 * for drivers that subclass struct drm_shadow_plane_state. 300 */ 301 void __drm_gem_reset_shadow_plane(struct drm_plane *plane, 302 struct drm_shadow_plane_state *shadow_plane_state) 303 { 304 __drm_atomic_helper_plane_reset(plane, &shadow_plane_state->base); 305 } 306 EXPORT_SYMBOL(__drm_gem_reset_shadow_plane); 307 308 /** 309 * drm_gem_reset_shadow_plane - resets a shadow-buffered plane 310 * @plane: the plane 311 * 312 * This function implements struct &drm_plane_funcs.reset_plane for 313 * shadow-buffered planes. It assumes the current plane state to be 314 * of type struct drm_shadow_plane and it allocates the new state of 315 * this type. 316 */ 317 void drm_gem_reset_shadow_plane(struct drm_plane *plane) 318 { 319 struct drm_shadow_plane_state *shadow_plane_state; 320 321 if (plane->state) { 322 drm_gem_destroy_shadow_plane_state(plane, plane->state); 323 plane->state = NULL; /* must be set to NULL here */ 324 } 325 326 shadow_plane_state = kzalloc(sizeof(*shadow_plane_state), GFP_KERNEL); 327 if (!shadow_plane_state) 328 return; 329 __drm_gem_reset_shadow_plane(plane, shadow_plane_state); 330 } 331 EXPORT_SYMBOL(drm_gem_reset_shadow_plane); 332 333 /** 334 * drm_gem_begin_shadow_fb_access - prepares shadow framebuffers for CPU access 335 * @plane: the plane 336 * @plane_state: the plane state of type struct drm_shadow_plane_state 337 * 338 * This function implements struct &drm_plane_helper_funcs.begin_fb_access. It 339 * maps all buffer objects of the plane's framebuffer into kernel address 340 * space and stores them in struct &drm_shadow_plane_state.map. The first data 341 * bytes are available in struct &drm_shadow_plane_state.data. 342 * 343 * See drm_gem_end_shadow_fb_access() for cleanup. 344 * 345 * Returns: 346 * 0 on success, or a negative errno code otherwise. 347 */ 348 int drm_gem_begin_shadow_fb_access(struct drm_plane *plane, struct drm_plane_state *plane_state) 349 { 350 struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state); 351 struct drm_framebuffer *fb = plane_state->fb; 352 353 if (!fb) 354 return 0; 355 356 return drm_gem_fb_vmap(fb, shadow_plane_state->map, shadow_plane_state->data); 357 } 358 EXPORT_SYMBOL(drm_gem_begin_shadow_fb_access); 359 360 /** 361 * drm_gem_end_shadow_fb_access - releases shadow framebuffers from CPU access 362 * @plane: the plane 363 * @plane_state: the plane state of type struct drm_shadow_plane_state 364 * 365 * This function implements struct &drm_plane_helper_funcs.end_fb_access. It 366 * undoes all effects of drm_gem_begin_shadow_fb_access() in reverse order. 367 * 368 * See drm_gem_begin_shadow_fb_access() for more information. 369 */ 370 void drm_gem_end_shadow_fb_access(struct drm_plane *plane, struct drm_plane_state *plane_state) 371 { 372 struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state); 373 struct drm_framebuffer *fb = plane_state->fb; 374 375 if (!fb) 376 return; 377 378 drm_gem_fb_vunmap(fb, shadow_plane_state->map); 379 } 380 EXPORT_SYMBOL(drm_gem_end_shadow_fb_access); 381 382 /** 383 * drm_gem_simple_kms_begin_shadow_fb_access - prepares shadow framebuffers for CPU access 384 * @pipe: the simple display pipe 385 * @plane_state: the plane state of type struct drm_shadow_plane_state 386 * 387 * This function implements struct drm_simple_display_funcs.begin_fb_access. 388 * 389 * See drm_gem_begin_shadow_fb_access() for details and 390 * drm_gem_simple_kms_cleanup_shadow_fb() for cleanup. 391 * 392 * Returns: 393 * 0 on success, or a negative errno code otherwise. 394 */ 395 int drm_gem_simple_kms_begin_shadow_fb_access(struct drm_simple_display_pipe *pipe, 396 struct drm_plane_state *plane_state) 397 { 398 return drm_gem_begin_shadow_fb_access(&pipe->plane, plane_state); 399 } 400 EXPORT_SYMBOL(drm_gem_simple_kms_begin_shadow_fb_access); 401 402 /** 403 * drm_gem_simple_kms_end_shadow_fb_access - releases shadow framebuffers from CPU access 404 * @pipe: the simple display pipe 405 * @plane_state: the plane state of type struct drm_shadow_plane_state 406 * 407 * This function implements struct drm_simple_display_funcs.end_fb_access. 408 * It undoes all effects of drm_gem_simple_kms_begin_shadow_fb_access() in 409 * reverse order. 410 * 411 * See drm_gem_simple_kms_begin_shadow_fb_access(). 412 */ 413 void drm_gem_simple_kms_end_shadow_fb_access(struct drm_simple_display_pipe *pipe, 414 struct drm_plane_state *plane_state) 415 { 416 drm_gem_end_shadow_fb_access(&pipe->plane, plane_state); 417 } 418 EXPORT_SYMBOL(drm_gem_simple_kms_end_shadow_fb_access); 419 420 /** 421 * drm_gem_simple_kms_reset_shadow_plane - resets a shadow-buffered plane 422 * @pipe: the simple display pipe 423 * 424 * This function implements struct drm_simple_display_funcs.reset_plane 425 * for shadow-buffered planes. 426 */ 427 void drm_gem_simple_kms_reset_shadow_plane(struct drm_simple_display_pipe *pipe) 428 { 429 drm_gem_reset_shadow_plane(&pipe->plane); 430 } 431 EXPORT_SYMBOL(drm_gem_simple_kms_reset_shadow_plane); 432 433 /** 434 * drm_gem_simple_kms_duplicate_shadow_plane_state - duplicates shadow-buffered plane state 435 * @pipe: the simple display pipe 436 * 437 * This function implements struct drm_simple_display_funcs.duplicate_plane_state 438 * for shadow-buffered planes. It does not duplicate existing mappings of the shadow 439 * buffers. Mappings are maintained during the atomic commit by the plane's prepare_fb 440 * and cleanup_fb helpers. 441 * 442 * Returns: 443 * A pointer to a new plane state on success, or NULL otherwise. 444 */ 445 struct drm_plane_state * 446 drm_gem_simple_kms_duplicate_shadow_plane_state(struct drm_simple_display_pipe *pipe) 447 { 448 return drm_gem_duplicate_shadow_plane_state(&pipe->plane); 449 } 450 EXPORT_SYMBOL(drm_gem_simple_kms_duplicate_shadow_plane_state); 451 452 /** 453 * drm_gem_simple_kms_destroy_shadow_plane_state - resets shadow-buffered plane state 454 * @pipe: the simple display pipe 455 * @plane_state: the plane state of type struct drm_shadow_plane_state 456 * 457 * This function implements struct drm_simple_display_funcs.destroy_plane_state 458 * for shadow-buffered planes. It expects that mappings of shadow buffers 459 * have been released already. 460 */ 461 void drm_gem_simple_kms_destroy_shadow_plane_state(struct drm_simple_display_pipe *pipe, 462 struct drm_plane_state *plane_state) 463 { 464 drm_gem_destroy_shadow_plane_state(&pipe->plane, plane_state); 465 } 466 EXPORT_SYMBOL(drm_gem_simple_kms_destroy_shadow_plane_state); 467