1 /* SPDX-License-Identifier: GPL-2.0 OR MIT */ 2 /************************************************************************** 3 * 4 * Copyright © 2018 VMware, Inc., Palo Alto, CA., USA 5 * All Rights Reserved. 6 * 7 * Permission is hereby granted, free of charge, to any person obtaining a 8 * copy of this software and associated documentation files (the 9 * "Software"), to deal in the Software without restriction, including 10 * without limitation the rights to use, copy, modify, merge, publish, 11 * distribute, sub license, and/or sell copies of the Software, and to 12 * permit persons to whom the Software is furnished to do so, subject to 13 * the following conditions: 14 * 15 * The above copyright notice and this permission notice (including the 16 * next paragraph) shall be included in all copies or substantial portions 17 * of the Software. 18 * 19 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 20 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 21 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL 22 * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, 23 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 24 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE 25 * USE OR OTHER DEALINGS IN THE SOFTWARE. 26 * 27 **************************************************************************/ 28 #ifndef _VMWGFX_VALIDATION_H_ 29 #define _VMWGFX_VALIDATION_H_ 30 31 #include <linux/list.h> 32 #include <linux/ww_mutex.h> 33 34 #include <drm/drm_hashtab.h> 35 #include <drm/ttm/ttm_execbuf_util.h> 36 37 #define VMW_RES_DIRTY_NONE 0 38 #define VMW_RES_DIRTY_SET BIT(0) 39 #define VMW_RES_DIRTY_CLEAR BIT(1) 40 41 /** 42 * struct vmw_validation_mem - Custom interface to provide memory reservations 43 * for the validation code. 44 * @reserve_mem: Callback to reserve memory 45 * @unreserve_mem: Callback to unreserve memory 46 * @gran: Reservation granularity. Contains a hint how much memory should 47 * be reserved in each call to @reserve_mem(). A slow implementation may want 48 * reservation to be done in large batches. 49 */ 50 struct vmw_validation_mem { 51 int (*reserve_mem)(struct vmw_validation_mem *m, size_t size); 52 void (*unreserve_mem)(struct vmw_validation_mem *m, size_t size); 53 size_t gran; 54 }; 55 56 /** 57 * struct vmw_validation_context - Per command submission validation context 58 * @ht: Hash table used to find resource- or buffer object duplicates 59 * @resource_list: List head for resource validation metadata 60 * @resource_ctx_list: List head for resource validation metadata for 61 * resources that need to be validated before those in @resource_list 62 * @bo_list: List head for buffer objects 63 * @page_list: List of pages used by the memory allocator 64 * @ticket: Ticked used for ww mutex locking 65 * @res_mutex: Pointer to mutex used for resource reserving 66 * @merge_dups: Whether to merge metadata for duplicate resources or 67 * buffer objects 68 * @mem_size_left: Free memory left in the last page in @page_list 69 * @page_address: Kernel virtual address of the last page in @page_list 70 * @vm: A pointer to the memory reservation interface or NULL if no 71 * memory reservation is needed. 72 * @vm_size_left: Amount of reserved memory that so far has not been allocated. 73 * @total_mem: Amount of reserved memory. 74 */ 75 struct vmw_validation_context { 76 struct drm_open_hash *ht; 77 struct list_head resource_list; 78 struct list_head resource_ctx_list; 79 struct list_head bo_list; 80 struct list_head page_list; 81 struct ww_acquire_ctx ticket; 82 struct mutex *res_mutex; 83 unsigned int merge_dups; 84 unsigned int mem_size_left; 85 u8 *page_address; 86 struct vmw_validation_mem *vm; 87 size_t vm_size_left; 88 size_t total_mem; 89 }; 90 91 struct vmw_buffer_object; 92 struct vmw_resource; 93 struct vmw_fence_obj; 94 95 #if 0 96 /** 97 * DECLARE_VAL_CONTEXT - Declare a validation context with initialization 98 * @_name: The name of the variable 99 * @_ht: The hash table used to find dups or NULL if none 100 * @_merge_dups: Whether to merge duplicate buffer object- or resource 101 * entries. If set to true, ideally a hash table pointer should be supplied 102 * as well unless the number of resources and buffer objects per validation 103 * is known to be very small 104 */ 105 #endif 106 #define DECLARE_VAL_CONTEXT(_name, _ht, _merge_dups) \ 107 struct vmw_validation_context _name = \ 108 { .ht = _ht, \ 109 .resource_list = LIST_HEAD_INIT((_name).resource_list), \ 110 .resource_ctx_list = LIST_HEAD_INIT((_name).resource_ctx_list), \ 111 .bo_list = LIST_HEAD_INIT((_name).bo_list), \ 112 .page_list = LIST_HEAD_INIT((_name).page_list), \ 113 .res_mutex = NULL, \ 114 .merge_dups = _merge_dups, \ 115 .mem_size_left = 0, \ 116 } 117 118 /** 119 * vmw_validation_has_bos - return whether the validation context has 120 * any buffer objects registered. 121 * 122 * @ctx: The validation context 123 * Returns: Whether any buffer objects are registered 124 */ 125 static inline bool 126 vmw_validation_has_bos(struct vmw_validation_context *ctx) 127 { 128 return !list_empty(&ctx->bo_list); 129 } 130 131 /** 132 * vmw_validation_set_val_mem - Register a validation mem object for 133 * validation memory reservation 134 * @ctx: The validation context 135 * @vm: Pointer to a struct vmw_validation_mem 136 * 137 * Must be set before the first attempt to allocate validation memory. 138 */ 139 static inline void 140 vmw_validation_set_val_mem(struct vmw_validation_context *ctx, 141 struct vmw_validation_mem *vm) 142 { 143 ctx->vm = vm; 144 } 145 146 /** 147 * vmw_validation_set_ht - Register a hash table for duplicate finding 148 * @ctx: The validation context 149 * @ht: Pointer to a hash table to use for duplicate finding 150 * This function is intended to be used if the hash table wasn't 151 * available at validation context declaration time 152 */ 153 static inline void vmw_validation_set_ht(struct vmw_validation_context *ctx, 154 struct drm_open_hash *ht) 155 { 156 ctx->ht = ht; 157 } 158 159 /** 160 * vmw_validation_bo_reserve - Reserve buffer objects registered with a 161 * validation context 162 * @ctx: The validation context 163 * @intr: Perform waits interruptible 164 * 165 * Return: Zero on success, -ERESTARTSYS when interrupted, negative error 166 * code on failure 167 */ 168 static inline int 169 vmw_validation_bo_reserve(struct vmw_validation_context *ctx, 170 bool intr) 171 { 172 return ttm_eu_reserve_buffers(&ctx->ticket, &ctx->bo_list, intr, 173 NULL, true); 174 } 175 176 /** 177 * vmw_validation_bo_backoff - Unreserve buffer objects registered with a 178 * validation context 179 * @ctx: The validation context 180 * 181 * This function unreserves the buffer objects previously reserved using 182 * vmw_validation_bo_reserve. It's typically used as part of an error path 183 */ 184 static inline void 185 vmw_validation_bo_backoff(struct vmw_validation_context *ctx) 186 { 187 ttm_eu_backoff_reservation(&ctx->ticket, &ctx->bo_list); 188 } 189 190 /** 191 * vmw_validation_bo_fence - Unreserve and fence buffer objects registered 192 * with a validation context 193 * @ctx: The validation context 194 * 195 * This function unreserves the buffer objects previously reserved using 196 * vmw_validation_bo_reserve, and fences them with a fence object. 197 */ 198 static inline void 199 vmw_validation_bo_fence(struct vmw_validation_context *ctx, 200 struct vmw_fence_obj *fence) 201 { 202 ttm_eu_fence_buffer_objects(&ctx->ticket, &ctx->bo_list, 203 (void *) fence); 204 } 205 206 /** 207 * vmw_validation_context_init - Initialize a validation context 208 * @ctx: Pointer to the validation context to initialize 209 * 210 * This function initializes a validation context with @merge_dups set 211 * to false 212 */ 213 static inline void 214 vmw_validation_context_init(struct vmw_validation_context *ctx) 215 { 216 memset(ctx, 0, sizeof(*ctx)); 217 INIT_LIST_HEAD(&ctx->resource_list); 218 INIT_LIST_HEAD(&ctx->resource_ctx_list); 219 INIT_LIST_HEAD(&ctx->bo_list); 220 } 221 222 /** 223 * vmw_validation_align - Align a validation memory allocation 224 * @val: The size to be aligned 225 * 226 * Returns: @val aligned to the granularity used by the validation memory 227 * allocator. 228 */ 229 static inline unsigned int vmw_validation_align(unsigned int val) 230 { 231 return ALIGN(val, sizeof(long)); 232 } 233 234 int vmw_validation_add_bo(struct vmw_validation_context *ctx, 235 struct vmw_buffer_object *vbo, 236 bool as_mob, bool cpu_blit); 237 int vmw_validation_bo_validate_single(struct ttm_buffer_object *bo, 238 bool interruptible, 239 bool validate_as_mob); 240 int vmw_validation_bo_validate(struct vmw_validation_context *ctx, bool intr); 241 void vmw_validation_unref_lists(struct vmw_validation_context *ctx); 242 int vmw_validation_add_resource(struct vmw_validation_context *ctx, 243 struct vmw_resource *res, 244 size_t priv_size, 245 u32 dirty, 246 void **p_node, 247 bool *first_usage); 248 void vmw_validation_drop_ht(struct vmw_validation_context *ctx); 249 int vmw_validation_res_reserve(struct vmw_validation_context *ctx, 250 bool intr); 251 void vmw_validation_res_unreserve(struct vmw_validation_context *ctx, 252 bool backoff); 253 void vmw_validation_res_switch_backup(struct vmw_validation_context *ctx, 254 void *val_private, 255 struct vmw_buffer_object *vbo, 256 unsigned long backup_offset); 257 int vmw_validation_res_validate(struct vmw_validation_context *ctx, bool intr); 258 259 int vmw_validation_prepare(struct vmw_validation_context *ctx, 260 struct mutex *mutex, bool intr); 261 void vmw_validation_revert(struct vmw_validation_context *ctx); 262 void vmw_validation_done(struct vmw_validation_context *ctx, 263 struct vmw_fence_obj *fence); 264 265 void *vmw_validation_mem_alloc(struct vmw_validation_context *ctx, 266 unsigned int size); 267 int vmw_validation_preload_bo(struct vmw_validation_context *ctx); 268 int vmw_validation_preload_res(struct vmw_validation_context *ctx, 269 unsigned int size); 270 void vmw_validation_res_set_dirty(struct vmw_validation_context *ctx, 271 void *val_private, u32 dirty); 272 #endif 273