1dff96888SDirk Hohndel (VMware) // SPDX-License-Identifier: GPL-2.0 OR MIT 2d80efd5cSThomas Hellstrom /************************************************************************** 3d80efd5cSThomas Hellstrom * 4dff96888SDirk Hohndel (VMware) * Copyright 2015 VMware, Inc., Palo Alto, CA., USA 5d80efd5cSThomas Hellstrom * 6d80efd5cSThomas Hellstrom * Permission is hereby granted, free of charge, to any person obtaining a 7d80efd5cSThomas Hellstrom * copy of this software and associated documentation files (the 8d80efd5cSThomas Hellstrom * "Software"), to deal in the Software without restriction, including 9d80efd5cSThomas Hellstrom * without limitation the rights to use, copy, modify, merge, publish, 10d80efd5cSThomas Hellstrom * distribute, sub license, and/or sell copies of the Software, and to 11d80efd5cSThomas Hellstrom * permit persons to whom the Software is furnished to do so, subject to 12d80efd5cSThomas Hellstrom * the following conditions: 13d80efd5cSThomas Hellstrom * 14d80efd5cSThomas Hellstrom * The above copyright notice and this permission notice (including the 15d80efd5cSThomas Hellstrom * next paragraph) shall be included in all copies or substantial portions 16d80efd5cSThomas Hellstrom * of the Software. 17d80efd5cSThomas Hellstrom * 18d80efd5cSThomas Hellstrom * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19d80efd5cSThomas Hellstrom * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20d80efd5cSThomas Hellstrom * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL 21d80efd5cSThomas Hellstrom * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, 22d80efd5cSThomas Hellstrom * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 23d80efd5cSThomas Hellstrom * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE 24d80efd5cSThomas Hellstrom * USE OR OTHER DEALINGS IN THE SOFTWARE. 25d80efd5cSThomas Hellstrom * 26d80efd5cSThomas Hellstrom **************************************************************************/ 27d80efd5cSThomas Hellstrom /* 28d80efd5cSThomas Hellstrom * This file implements the vmwgfx context binding manager, 29d80efd5cSThomas Hellstrom * The sole reason for having to use this code is that vmware guest 30d80efd5cSThomas Hellstrom * backed contexts can be swapped out to their backing mobs by the device 31d80efd5cSThomas Hellstrom * at any time, also swapped in at any time. At swapin time, the device 32d80efd5cSThomas Hellstrom * validates the context bindings to make sure they point to valid resources. 33d80efd5cSThomas Hellstrom * It's this outside-of-drawcall validation (that can happen at any time), 34d80efd5cSThomas Hellstrom * that makes this code necessary. 35d80efd5cSThomas Hellstrom * 36d80efd5cSThomas Hellstrom * We therefore need to kill any context bindings pointing to a resource 37d80efd5cSThomas Hellstrom * when the resource is swapped out. Furthermore, if the vmwgfx driver has 38d80efd5cSThomas Hellstrom * swapped out the context we can't swap it in again to kill bindings because 39d80efd5cSThomas Hellstrom * of backing mob reservation lockdep violations, so as part of 40d80efd5cSThomas Hellstrom * context swapout, also kill all bindings of a context, so that they are 41d80efd5cSThomas Hellstrom * already killed if a resource to which a binding points 42d80efd5cSThomas Hellstrom * needs to be swapped out. 43d80efd5cSThomas Hellstrom * 44d80efd5cSThomas Hellstrom * Note that a resource can be pointed to by bindings from multiple contexts, 45d80efd5cSThomas Hellstrom * Therefore we can't easily protect this data by a per context mutex 46d80efd5cSThomas Hellstrom * (unless we use deadlock-safe WW mutexes). So we use a global binding_mutex 47d80efd5cSThomas Hellstrom * to protect all binding manager data. 48d80efd5cSThomas Hellstrom * 49d80efd5cSThomas Hellstrom * Finally, any association between a context and a global resource 50d80efd5cSThomas Hellstrom * (surface, shader or even DX query) is conceptually a context binding that 51d80efd5cSThomas Hellstrom * needs to be tracked by this code. 52d80efd5cSThomas Hellstrom */ 53d80efd5cSThomas Hellstrom 54d80efd5cSThomas Hellstrom #include "vmwgfx_drv.h" 55d80efd5cSThomas Hellstrom #include "vmwgfx_binding.h" 56d80efd5cSThomas Hellstrom #include "device_include/svga3d_reg.h" 57d80efd5cSThomas Hellstrom 58d80efd5cSThomas Hellstrom #define VMW_BINDING_RT_BIT 0 59d80efd5cSThomas Hellstrom #define VMW_BINDING_PS_BIT 1 60403fef50SDeepak Rawat #define VMW_BINDING_SO_T_BIT 2 61d80efd5cSThomas Hellstrom #define VMW_BINDING_VB_BIT 3 625e8ec0d9SDeepak Rawat #define VMW_BINDING_UAV_BIT 4 635e8ec0d9SDeepak Rawat #define VMW_BINDING_CS_UAV_BIT 5 645e8ec0d9SDeepak Rawat #define VMW_BINDING_NUM_BITS 6 65d80efd5cSThomas Hellstrom 66d80efd5cSThomas Hellstrom #define VMW_BINDING_PS_SR_BIT 0 67d80efd5cSThomas Hellstrom 68d80efd5cSThomas Hellstrom /** 69d80efd5cSThomas Hellstrom * struct vmw_ctx_binding_state - per context binding state 70d80efd5cSThomas Hellstrom * 71d80efd5cSThomas Hellstrom * @dev_priv: Pointer to device private structure. 72d80efd5cSThomas Hellstrom * @list: linked list of individual active bindings. 73d80efd5cSThomas Hellstrom * @render_targets: Render target bindings. 74d80efd5cSThomas Hellstrom * @texture_units: Texture units bindings. 75d80efd5cSThomas Hellstrom * @ds_view: Depth-stencil view binding. 76d80efd5cSThomas Hellstrom * @so_targets: StreamOutput target bindings. 77d80efd5cSThomas Hellstrom * @vertex_buffers: Vertex buffer bindings. 78d80efd5cSThomas Hellstrom * @index_buffer: Index buffer binding. 79d80efd5cSThomas Hellstrom * @per_shader: Per shader-type bindings. 805e8ec0d9SDeepak Rawat * @ua_views: UAV bindings. 81d80efd5cSThomas Hellstrom * @dirty: Bitmap tracking per binding-type changes that have not yet 82d80efd5cSThomas Hellstrom * been emitted to the device. 83d80efd5cSThomas Hellstrom * @dirty_vb: Bitmap tracking individual vertex buffer binding changes that 84d80efd5cSThomas Hellstrom * have not yet been emitted to the device. 85d80efd5cSThomas Hellstrom * @bind_cmd_buffer: Scratch space used to construct binding commands. 86d80efd5cSThomas Hellstrom * @bind_cmd_count: Number of binding command data entries in @bind_cmd_buffer 87d80efd5cSThomas Hellstrom * @bind_first_slot: Used together with @bind_cmd_buffer to indicate the 88d80efd5cSThomas Hellstrom * device binding slot of the first command data entry in @bind_cmd_buffer. 89d80efd5cSThomas Hellstrom * 90d80efd5cSThomas Hellstrom * Note that this structure also provides storage space for the individual 91d80efd5cSThomas Hellstrom * struct vmw_ctx_binding objects, so that no dynamic allocation is needed 92d80efd5cSThomas Hellstrom * for individual bindings. 93d80efd5cSThomas Hellstrom * 94d80efd5cSThomas Hellstrom */ 95d80efd5cSThomas Hellstrom struct vmw_ctx_binding_state { 96d80efd5cSThomas Hellstrom struct vmw_private *dev_priv; 97d80efd5cSThomas Hellstrom struct list_head list; 98d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_view render_targets[SVGA3D_RT_MAX]; 99d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_tex texture_units[SVGA3D_NUM_TEXTURE_UNITS]; 100d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_view ds_view; 101403fef50SDeepak Rawat struct vmw_ctx_bindinfo_so_target so_targets[SVGA3D_DX_MAX_SOTARGETS]; 102d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_vb vertex_buffers[SVGA3D_DX_MAX_VERTEXBUFFERS]; 103d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_ib index_buffer; 104d2e90ab3SDeepak Rawat struct vmw_dx_shader_bindings per_shader[SVGA3D_NUM_SHADERTYPE]; 1055e8ec0d9SDeepak Rawat struct vmw_ctx_bindinfo_uav ua_views[VMW_MAX_UAV_BIND_TYPE]; 106d80efd5cSThomas Hellstrom 107d80efd5cSThomas Hellstrom unsigned long dirty; 108d80efd5cSThomas Hellstrom DECLARE_BITMAP(dirty_vb, SVGA3D_DX_MAX_VERTEXBUFFERS); 109d80efd5cSThomas Hellstrom 110d80efd5cSThomas Hellstrom u32 bind_cmd_buffer[VMW_MAX_VIEW_BINDINGS]; 111d80efd5cSThomas Hellstrom u32 bind_cmd_count; 112d80efd5cSThomas Hellstrom u32 bind_first_slot; 113d80efd5cSThomas Hellstrom }; 114d80efd5cSThomas Hellstrom 115d80efd5cSThomas Hellstrom static int vmw_binding_scrub_shader(struct vmw_ctx_bindinfo *bi, bool rebind); 116d80efd5cSThomas Hellstrom static int vmw_binding_scrub_render_target(struct vmw_ctx_bindinfo *bi, 117d80efd5cSThomas Hellstrom bool rebind); 118d80efd5cSThomas Hellstrom static int vmw_binding_scrub_texture(struct vmw_ctx_bindinfo *bi, bool rebind); 119d80efd5cSThomas Hellstrom static int vmw_binding_scrub_cb(struct vmw_ctx_bindinfo *bi, bool rebind); 120d80efd5cSThomas Hellstrom static int vmw_binding_scrub_dx_rt(struct vmw_ctx_bindinfo *bi, bool rebind); 121d80efd5cSThomas Hellstrom static int vmw_binding_scrub_sr(struct vmw_ctx_bindinfo *bi, bool rebind); 122403fef50SDeepak Rawat static int vmw_binding_scrub_so_target(struct vmw_ctx_bindinfo *bi, bool rebind); 123d80efd5cSThomas Hellstrom static int vmw_binding_emit_dirty(struct vmw_ctx_binding_state *cbs); 124d80efd5cSThomas Hellstrom static int vmw_binding_scrub_dx_shader(struct vmw_ctx_bindinfo *bi, 125d80efd5cSThomas Hellstrom bool rebind); 126d80efd5cSThomas Hellstrom static int vmw_binding_scrub_ib(struct vmw_ctx_bindinfo *bi, bool rebind); 127d80efd5cSThomas Hellstrom static int vmw_binding_scrub_vb(struct vmw_ctx_bindinfo *bi, bool rebind); 1285e8ec0d9SDeepak Rawat static int vmw_binding_scrub_uav(struct vmw_ctx_bindinfo *bi, bool rebind); 1295e8ec0d9SDeepak Rawat static int vmw_binding_scrub_cs_uav(struct vmw_ctx_bindinfo *bi, bool rebind); 1305e8ec0d9SDeepak Rawat 131d80efd5cSThomas Hellstrom static void vmw_binding_build_asserts(void) __attribute__ ((unused)); 132d80efd5cSThomas Hellstrom 133d80efd5cSThomas Hellstrom typedef int (*vmw_scrub_func)(struct vmw_ctx_bindinfo *, bool); 134d80efd5cSThomas Hellstrom 135d80efd5cSThomas Hellstrom /** 136d80efd5cSThomas Hellstrom * struct vmw_binding_info - Per binding type information for the binding 137d80efd5cSThomas Hellstrom * manager 138d80efd5cSThomas Hellstrom * 139d80efd5cSThomas Hellstrom * @size: The size of the struct binding derived from a struct vmw_ctx_bindinfo. 140d80efd5cSThomas Hellstrom * @offsets: array[shader_slot] of offsets to the array[slot] 141d80efd5cSThomas Hellstrom * of struct bindings for the binding type. 142d80efd5cSThomas Hellstrom * @scrub_func: Pointer to the scrub function for this binding type. 143d80efd5cSThomas Hellstrom * 144d80efd5cSThomas Hellstrom * Holds static information to help optimize the binding manager and avoid 145d80efd5cSThomas Hellstrom * an excessive amount of switch statements. 146d80efd5cSThomas Hellstrom */ 147d80efd5cSThomas Hellstrom struct vmw_binding_info { 148d80efd5cSThomas Hellstrom size_t size; 149d80efd5cSThomas Hellstrom const size_t *offsets; 150d80efd5cSThomas Hellstrom vmw_scrub_func scrub_func; 151d80efd5cSThomas Hellstrom }; 152d80efd5cSThomas Hellstrom 153d80efd5cSThomas Hellstrom /* 154d80efd5cSThomas Hellstrom * A number of static variables that help determine the scrub func and the 155d80efd5cSThomas Hellstrom * location of the struct vmw_ctx_bindinfo slots for each binding type. 156d80efd5cSThomas Hellstrom */ 157d80efd5cSThomas Hellstrom static const size_t vmw_binding_shader_offsets[] = { 158d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, per_shader[0].shader), 159d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, per_shader[1].shader), 160d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, per_shader[2].shader), 161d2e90ab3SDeepak Rawat offsetof(struct vmw_ctx_binding_state, per_shader[3].shader), 162d2e90ab3SDeepak Rawat offsetof(struct vmw_ctx_binding_state, per_shader[4].shader), 163d2e90ab3SDeepak Rawat offsetof(struct vmw_ctx_binding_state, per_shader[5].shader), 164d80efd5cSThomas Hellstrom }; 165d80efd5cSThomas Hellstrom static const size_t vmw_binding_rt_offsets[] = { 166d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, render_targets), 167d80efd5cSThomas Hellstrom }; 168d80efd5cSThomas Hellstrom static const size_t vmw_binding_tex_offsets[] = { 169d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, texture_units), 170d80efd5cSThomas Hellstrom }; 171d80efd5cSThomas Hellstrom static const size_t vmw_binding_cb_offsets[] = { 172d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, per_shader[0].const_buffers), 173d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, per_shader[1].const_buffers), 174d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, per_shader[2].const_buffers), 175d2e90ab3SDeepak Rawat offsetof(struct vmw_ctx_binding_state, per_shader[3].const_buffers), 176d2e90ab3SDeepak Rawat offsetof(struct vmw_ctx_binding_state, per_shader[4].const_buffers), 177d2e90ab3SDeepak Rawat offsetof(struct vmw_ctx_binding_state, per_shader[5].const_buffers), 178d80efd5cSThomas Hellstrom }; 179d80efd5cSThomas Hellstrom static const size_t vmw_binding_dx_ds_offsets[] = { 180d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, ds_view), 181d80efd5cSThomas Hellstrom }; 182d80efd5cSThomas Hellstrom static const size_t vmw_binding_sr_offsets[] = { 183d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, per_shader[0].shader_res), 184d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, per_shader[1].shader_res), 185d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, per_shader[2].shader_res), 186d2e90ab3SDeepak Rawat offsetof(struct vmw_ctx_binding_state, per_shader[3].shader_res), 187d2e90ab3SDeepak Rawat offsetof(struct vmw_ctx_binding_state, per_shader[4].shader_res), 188d2e90ab3SDeepak Rawat offsetof(struct vmw_ctx_binding_state, per_shader[5].shader_res), 189d80efd5cSThomas Hellstrom }; 190403fef50SDeepak Rawat static const size_t vmw_binding_so_target_offsets[] = { 191d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, so_targets), 192d80efd5cSThomas Hellstrom }; 193d80efd5cSThomas Hellstrom static const size_t vmw_binding_vb_offsets[] = { 194d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, vertex_buffers), 195d80efd5cSThomas Hellstrom }; 196d80efd5cSThomas Hellstrom static const size_t vmw_binding_ib_offsets[] = { 197d80efd5cSThomas Hellstrom offsetof(struct vmw_ctx_binding_state, index_buffer), 198d80efd5cSThomas Hellstrom }; 1995e8ec0d9SDeepak Rawat static const size_t vmw_binding_uav_offsets[] = { 2005e8ec0d9SDeepak Rawat offsetof(struct vmw_ctx_binding_state, ua_views[0].views), 2015e8ec0d9SDeepak Rawat }; 2025e8ec0d9SDeepak Rawat static const size_t vmw_binding_cs_uav_offsets[] = { 2035e8ec0d9SDeepak Rawat offsetof(struct vmw_ctx_binding_state, ua_views[1].views), 2045e8ec0d9SDeepak Rawat }; 205d80efd5cSThomas Hellstrom 206d80efd5cSThomas Hellstrom static const struct vmw_binding_info vmw_binding_infos[] = { 207d80efd5cSThomas Hellstrom [vmw_ctx_binding_shader] = { 208d80efd5cSThomas Hellstrom .size = sizeof(struct vmw_ctx_bindinfo_shader), 209d80efd5cSThomas Hellstrom .offsets = vmw_binding_shader_offsets, 210d80efd5cSThomas Hellstrom .scrub_func = vmw_binding_scrub_shader}, 211d80efd5cSThomas Hellstrom [vmw_ctx_binding_rt] = { 212d80efd5cSThomas Hellstrom .size = sizeof(struct vmw_ctx_bindinfo_view), 213d80efd5cSThomas Hellstrom .offsets = vmw_binding_rt_offsets, 214d80efd5cSThomas Hellstrom .scrub_func = vmw_binding_scrub_render_target}, 215d80efd5cSThomas Hellstrom [vmw_ctx_binding_tex] = { 216d80efd5cSThomas Hellstrom .size = sizeof(struct vmw_ctx_bindinfo_tex), 217d80efd5cSThomas Hellstrom .offsets = vmw_binding_tex_offsets, 218d80efd5cSThomas Hellstrom .scrub_func = vmw_binding_scrub_texture}, 219d80efd5cSThomas Hellstrom [vmw_ctx_binding_cb] = { 220d80efd5cSThomas Hellstrom .size = sizeof(struct vmw_ctx_bindinfo_cb), 221d80efd5cSThomas Hellstrom .offsets = vmw_binding_cb_offsets, 222d80efd5cSThomas Hellstrom .scrub_func = vmw_binding_scrub_cb}, 223d80efd5cSThomas Hellstrom [vmw_ctx_binding_dx_shader] = { 224d80efd5cSThomas Hellstrom .size = sizeof(struct vmw_ctx_bindinfo_shader), 225d80efd5cSThomas Hellstrom .offsets = vmw_binding_shader_offsets, 226d80efd5cSThomas Hellstrom .scrub_func = vmw_binding_scrub_dx_shader}, 227d80efd5cSThomas Hellstrom [vmw_ctx_binding_dx_rt] = { 228d80efd5cSThomas Hellstrom .size = sizeof(struct vmw_ctx_bindinfo_view), 229d80efd5cSThomas Hellstrom .offsets = vmw_binding_rt_offsets, 230d80efd5cSThomas Hellstrom .scrub_func = vmw_binding_scrub_dx_rt}, 231d80efd5cSThomas Hellstrom [vmw_ctx_binding_sr] = { 232d80efd5cSThomas Hellstrom .size = sizeof(struct vmw_ctx_bindinfo_view), 233d80efd5cSThomas Hellstrom .offsets = vmw_binding_sr_offsets, 234d80efd5cSThomas Hellstrom .scrub_func = vmw_binding_scrub_sr}, 235d80efd5cSThomas Hellstrom [vmw_ctx_binding_ds] = { 236d80efd5cSThomas Hellstrom .size = sizeof(struct vmw_ctx_bindinfo_view), 237d80efd5cSThomas Hellstrom .offsets = vmw_binding_dx_ds_offsets, 238d80efd5cSThomas Hellstrom .scrub_func = vmw_binding_scrub_dx_rt}, 239403fef50SDeepak Rawat [vmw_ctx_binding_so_target] = { 240403fef50SDeepak Rawat .size = sizeof(struct vmw_ctx_bindinfo_so_target), 241403fef50SDeepak Rawat .offsets = vmw_binding_so_target_offsets, 242403fef50SDeepak Rawat .scrub_func = vmw_binding_scrub_so_target}, 243d80efd5cSThomas Hellstrom [vmw_ctx_binding_vb] = { 244d80efd5cSThomas Hellstrom .size = sizeof(struct vmw_ctx_bindinfo_vb), 245d80efd5cSThomas Hellstrom .offsets = vmw_binding_vb_offsets, 246d80efd5cSThomas Hellstrom .scrub_func = vmw_binding_scrub_vb}, 247d80efd5cSThomas Hellstrom [vmw_ctx_binding_ib] = { 248d80efd5cSThomas Hellstrom .size = sizeof(struct vmw_ctx_bindinfo_ib), 249d80efd5cSThomas Hellstrom .offsets = vmw_binding_ib_offsets, 250d80efd5cSThomas Hellstrom .scrub_func = vmw_binding_scrub_ib}, 2515e8ec0d9SDeepak Rawat [vmw_ctx_binding_uav] = { 2525e8ec0d9SDeepak Rawat .size = sizeof(struct vmw_ctx_bindinfo_view), 2535e8ec0d9SDeepak Rawat .offsets = vmw_binding_uav_offsets, 2545e8ec0d9SDeepak Rawat .scrub_func = vmw_binding_scrub_uav}, 2555e8ec0d9SDeepak Rawat [vmw_ctx_binding_cs_uav] = { 2565e8ec0d9SDeepak Rawat .size = sizeof(struct vmw_ctx_bindinfo_view), 2575e8ec0d9SDeepak Rawat .offsets = vmw_binding_cs_uav_offsets, 2585e8ec0d9SDeepak Rawat .scrub_func = vmw_binding_scrub_cs_uav}, 259d80efd5cSThomas Hellstrom }; 260d80efd5cSThomas Hellstrom 261d80efd5cSThomas Hellstrom /** 262d80efd5cSThomas Hellstrom * vmw_cbs_context - Return a pointer to the context resource of a 263d80efd5cSThomas Hellstrom * context binding state tracker. 264d80efd5cSThomas Hellstrom * 265d80efd5cSThomas Hellstrom * @cbs: The context binding state tracker. 266d80efd5cSThomas Hellstrom * 267d80efd5cSThomas Hellstrom * Provided there are any active bindings, this function will return an 268d80efd5cSThomas Hellstrom * unreferenced pointer to the context resource that owns the context 269d80efd5cSThomas Hellstrom * binding state tracker. If there are no active bindings, this function 270d80efd5cSThomas Hellstrom * will return NULL. Note that the caller must somehow ensure that a reference 271d80efd5cSThomas Hellstrom * is held on the context resource prior to calling this function. 272d80efd5cSThomas Hellstrom */ 273d80efd5cSThomas Hellstrom static const struct vmw_resource * 274d80efd5cSThomas Hellstrom vmw_cbs_context(const struct vmw_ctx_binding_state *cbs) 275d80efd5cSThomas Hellstrom { 276d80efd5cSThomas Hellstrom if (list_empty(&cbs->list)) 277d80efd5cSThomas Hellstrom return NULL; 278d80efd5cSThomas Hellstrom 279d80efd5cSThomas Hellstrom return list_first_entry(&cbs->list, struct vmw_ctx_bindinfo, 280d80efd5cSThomas Hellstrom ctx_list)->ctx; 281d80efd5cSThomas Hellstrom } 282d80efd5cSThomas Hellstrom 283d80efd5cSThomas Hellstrom /** 284d80efd5cSThomas Hellstrom * vmw_binding_loc - determine the struct vmw_ctx_bindinfo slot location. 285d80efd5cSThomas Hellstrom * 286d80efd5cSThomas Hellstrom * @cbs: Pointer to a struct vmw_ctx_binding state which holds the slot. 287d80efd5cSThomas Hellstrom * @bt: The binding type. 288d80efd5cSThomas Hellstrom * @shader_slot: The shader slot of the binding. If none, then set to 0. 289d80efd5cSThomas Hellstrom * @slot: The slot of the binding. 290d80efd5cSThomas Hellstrom */ 291d80efd5cSThomas Hellstrom static struct vmw_ctx_bindinfo * 292d80efd5cSThomas Hellstrom vmw_binding_loc(struct vmw_ctx_binding_state *cbs, 293d80efd5cSThomas Hellstrom enum vmw_ctx_binding_type bt, u32 shader_slot, u32 slot) 294d80efd5cSThomas Hellstrom { 295d80efd5cSThomas Hellstrom const struct vmw_binding_info *b = &vmw_binding_infos[bt]; 296d80efd5cSThomas Hellstrom size_t offset = b->offsets[shader_slot] + b->size*slot; 297d80efd5cSThomas Hellstrom 298d80efd5cSThomas Hellstrom return (struct vmw_ctx_bindinfo *)((u8 *) cbs + offset); 299d80efd5cSThomas Hellstrom } 300d80efd5cSThomas Hellstrom 301d80efd5cSThomas Hellstrom /** 302d80efd5cSThomas Hellstrom * vmw_binding_drop: Stop tracking a context binding 303d80efd5cSThomas Hellstrom * 304d80efd5cSThomas Hellstrom * @bi: Pointer to binding tracker storage. 305d80efd5cSThomas Hellstrom * 306d80efd5cSThomas Hellstrom * Stops tracking a context binding, and re-initializes its storage. 307d80efd5cSThomas Hellstrom * Typically used when the context binding is replaced with a binding to 308d80efd5cSThomas Hellstrom * another (or the same, for that matter) resource. 309d80efd5cSThomas Hellstrom */ 310d80efd5cSThomas Hellstrom static void vmw_binding_drop(struct vmw_ctx_bindinfo *bi) 311d80efd5cSThomas Hellstrom { 312d80efd5cSThomas Hellstrom list_del(&bi->ctx_list); 313d80efd5cSThomas Hellstrom if (!list_empty(&bi->res_list)) 314d80efd5cSThomas Hellstrom list_del(&bi->res_list); 315d80efd5cSThomas Hellstrom bi->ctx = NULL; 316d80efd5cSThomas Hellstrom } 317d80efd5cSThomas Hellstrom 318d80efd5cSThomas Hellstrom /** 319d80efd5cSThomas Hellstrom * vmw_binding_add: Start tracking a context binding 320d80efd5cSThomas Hellstrom * 321d80efd5cSThomas Hellstrom * @cbs: Pointer to the context binding state tracker. 322d80efd5cSThomas Hellstrom * @bi: Information about the binding to track. 323d80efd5cSThomas Hellstrom * 324d80efd5cSThomas Hellstrom * Starts tracking the binding in the context binding 325d80efd5cSThomas Hellstrom * state structure @cbs. 326d80efd5cSThomas Hellstrom */ 327d80efd5cSThomas Hellstrom void vmw_binding_add(struct vmw_ctx_binding_state *cbs, 328d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo *bi, 329d80efd5cSThomas Hellstrom u32 shader_slot, u32 slot) 330d80efd5cSThomas Hellstrom { 331d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo *loc = 332d80efd5cSThomas Hellstrom vmw_binding_loc(cbs, bi->bt, shader_slot, slot); 333d80efd5cSThomas Hellstrom const struct vmw_binding_info *b = &vmw_binding_infos[bi->bt]; 334d80efd5cSThomas Hellstrom 335d80efd5cSThomas Hellstrom if (loc->ctx != NULL) 336d80efd5cSThomas Hellstrom vmw_binding_drop(loc); 337d80efd5cSThomas Hellstrom 338d80efd5cSThomas Hellstrom memcpy(loc, bi, b->size); 339d80efd5cSThomas Hellstrom loc->scrubbed = false; 340d80efd5cSThomas Hellstrom list_add(&loc->ctx_list, &cbs->list); 341d80efd5cSThomas Hellstrom INIT_LIST_HEAD(&loc->res_list); 342d80efd5cSThomas Hellstrom } 343d80efd5cSThomas Hellstrom 344d80efd5cSThomas Hellstrom /** 3455e8ec0d9SDeepak Rawat * vmw_binding_add_uav_index - Add UAV index for tracking. 3465e8ec0d9SDeepak Rawat * @cbs: Pointer to the context binding state tracker. 3475e8ec0d9SDeepak Rawat * @slot: UAV type to which bind this index. 3485e8ec0d9SDeepak Rawat * @index: The splice index to track. 3495e8ec0d9SDeepak Rawat */ 3505e8ec0d9SDeepak Rawat void vmw_binding_add_uav_index(struct vmw_ctx_binding_state *cbs, uint32 slot, 3515e8ec0d9SDeepak Rawat uint32 index) 3525e8ec0d9SDeepak Rawat { 3535e8ec0d9SDeepak Rawat cbs->ua_views[slot].index = index; 3545e8ec0d9SDeepak Rawat } 3555e8ec0d9SDeepak Rawat 3565e8ec0d9SDeepak Rawat /** 357d80efd5cSThomas Hellstrom * vmw_binding_transfer: Transfer a context binding tracking entry. 358d80efd5cSThomas Hellstrom * 359d80efd5cSThomas Hellstrom * @cbs: Pointer to the persistent context binding state tracker. 360d80efd5cSThomas Hellstrom * @bi: Information about the binding to track. 361d80efd5cSThomas Hellstrom * 362d80efd5cSThomas Hellstrom */ 363d80efd5cSThomas Hellstrom static void vmw_binding_transfer(struct vmw_ctx_binding_state *cbs, 364d80efd5cSThomas Hellstrom const struct vmw_ctx_binding_state *from, 365d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo *bi) 366d80efd5cSThomas Hellstrom { 367d80efd5cSThomas Hellstrom size_t offset = (unsigned long)bi - (unsigned long)from; 368d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo *loc = (struct vmw_ctx_bindinfo *) 369d80efd5cSThomas Hellstrom ((unsigned long) cbs + offset); 370d80efd5cSThomas Hellstrom 371d80efd5cSThomas Hellstrom if (loc->ctx != NULL) { 372d80efd5cSThomas Hellstrom WARN_ON(bi->scrubbed); 373d80efd5cSThomas Hellstrom 374d80efd5cSThomas Hellstrom vmw_binding_drop(loc); 375d80efd5cSThomas Hellstrom } 376d80efd5cSThomas Hellstrom 377d80efd5cSThomas Hellstrom if (bi->res != NULL) { 378d80efd5cSThomas Hellstrom memcpy(loc, bi, vmw_binding_infos[bi->bt].size); 379d80efd5cSThomas Hellstrom list_add_tail(&loc->ctx_list, &cbs->list); 380d80efd5cSThomas Hellstrom list_add_tail(&loc->res_list, &loc->res->binding_head); 381d80efd5cSThomas Hellstrom } 382d80efd5cSThomas Hellstrom } 383d80efd5cSThomas Hellstrom 384d80efd5cSThomas Hellstrom /** 385d80efd5cSThomas Hellstrom * vmw_binding_state_kill - Kill all bindings associated with a 386d80efd5cSThomas Hellstrom * struct vmw_ctx_binding state structure, and re-initialize the structure. 387d80efd5cSThomas Hellstrom * 388d80efd5cSThomas Hellstrom * @cbs: Pointer to the context binding state tracker. 389d80efd5cSThomas Hellstrom * 390d80efd5cSThomas Hellstrom * Emits commands to scrub all bindings associated with the 391d80efd5cSThomas Hellstrom * context binding state tracker. Then re-initializes the whole structure. 392d80efd5cSThomas Hellstrom */ 393d80efd5cSThomas Hellstrom void vmw_binding_state_kill(struct vmw_ctx_binding_state *cbs) 394d80efd5cSThomas Hellstrom { 395d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo *entry, *next; 396d80efd5cSThomas Hellstrom 397d80efd5cSThomas Hellstrom vmw_binding_state_scrub(cbs); 398d80efd5cSThomas Hellstrom list_for_each_entry_safe(entry, next, &cbs->list, ctx_list) 399d80efd5cSThomas Hellstrom vmw_binding_drop(entry); 400d80efd5cSThomas Hellstrom } 401d80efd5cSThomas Hellstrom 402d80efd5cSThomas Hellstrom /** 403d80efd5cSThomas Hellstrom * vmw_binding_state_scrub - Scrub all bindings associated with a 404d80efd5cSThomas Hellstrom * struct vmw_ctx_binding state structure. 405d80efd5cSThomas Hellstrom * 406d80efd5cSThomas Hellstrom * @cbs: Pointer to the context binding state tracker. 407d80efd5cSThomas Hellstrom * 408d80efd5cSThomas Hellstrom * Emits commands to scrub all bindings associated with the 409d80efd5cSThomas Hellstrom * context binding state tracker. 410d80efd5cSThomas Hellstrom */ 411d80efd5cSThomas Hellstrom void vmw_binding_state_scrub(struct vmw_ctx_binding_state *cbs) 412d80efd5cSThomas Hellstrom { 413d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo *entry; 414d80efd5cSThomas Hellstrom 415d80efd5cSThomas Hellstrom list_for_each_entry(entry, &cbs->list, ctx_list) { 416d80efd5cSThomas Hellstrom if (!entry->scrubbed) { 417d80efd5cSThomas Hellstrom (void) vmw_binding_infos[entry->bt].scrub_func 418d80efd5cSThomas Hellstrom (entry, false); 419d80efd5cSThomas Hellstrom entry->scrubbed = true; 420d80efd5cSThomas Hellstrom } 421d80efd5cSThomas Hellstrom } 422d80efd5cSThomas Hellstrom 423d80efd5cSThomas Hellstrom (void) vmw_binding_emit_dirty(cbs); 424d80efd5cSThomas Hellstrom } 425d80efd5cSThomas Hellstrom 426d80efd5cSThomas Hellstrom /** 427d80efd5cSThomas Hellstrom * vmw_binding_res_list_kill - Kill all bindings on a 428d80efd5cSThomas Hellstrom * resource binding list 429d80efd5cSThomas Hellstrom * 430d80efd5cSThomas Hellstrom * @head: list head of resource binding list 431d80efd5cSThomas Hellstrom * 432d80efd5cSThomas Hellstrom * Kills all bindings associated with a specific resource. Typically 433d80efd5cSThomas Hellstrom * called before the resource is destroyed. 434d80efd5cSThomas Hellstrom */ 435d80efd5cSThomas Hellstrom void vmw_binding_res_list_kill(struct list_head *head) 436d80efd5cSThomas Hellstrom { 437d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo *entry, *next; 438d80efd5cSThomas Hellstrom 439d80efd5cSThomas Hellstrom vmw_binding_res_list_scrub(head); 440d80efd5cSThomas Hellstrom list_for_each_entry_safe(entry, next, head, res_list) 441d80efd5cSThomas Hellstrom vmw_binding_drop(entry); 442d80efd5cSThomas Hellstrom } 443d80efd5cSThomas Hellstrom 444d80efd5cSThomas Hellstrom /** 445d80efd5cSThomas Hellstrom * vmw_binding_res_list_scrub - Scrub all bindings on a 446d80efd5cSThomas Hellstrom * resource binding list 447d80efd5cSThomas Hellstrom * 448d80efd5cSThomas Hellstrom * @head: list head of resource binding list 449d80efd5cSThomas Hellstrom * 450d80efd5cSThomas Hellstrom * Scrub all bindings associated with a specific resource. Typically 451d80efd5cSThomas Hellstrom * called before the resource is evicted. 452d80efd5cSThomas Hellstrom */ 453d80efd5cSThomas Hellstrom void vmw_binding_res_list_scrub(struct list_head *head) 454d80efd5cSThomas Hellstrom { 455d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo *entry; 456d80efd5cSThomas Hellstrom 457d80efd5cSThomas Hellstrom list_for_each_entry(entry, head, res_list) { 458d80efd5cSThomas Hellstrom if (!entry->scrubbed) { 459d80efd5cSThomas Hellstrom (void) vmw_binding_infos[entry->bt].scrub_func 460d80efd5cSThomas Hellstrom (entry, false); 461d80efd5cSThomas Hellstrom entry->scrubbed = true; 462d80efd5cSThomas Hellstrom } 463d80efd5cSThomas Hellstrom } 464d80efd5cSThomas Hellstrom 465d80efd5cSThomas Hellstrom list_for_each_entry(entry, head, res_list) { 466d80efd5cSThomas Hellstrom struct vmw_ctx_binding_state *cbs = 467d80efd5cSThomas Hellstrom vmw_context_binding_state(entry->ctx); 468d80efd5cSThomas Hellstrom 469d80efd5cSThomas Hellstrom (void) vmw_binding_emit_dirty(cbs); 470d80efd5cSThomas Hellstrom } 471d80efd5cSThomas Hellstrom } 472d80efd5cSThomas Hellstrom 473d80efd5cSThomas Hellstrom 474d80efd5cSThomas Hellstrom /** 475d80efd5cSThomas Hellstrom * vmw_binding_state_commit - Commit staged binding info 476d80efd5cSThomas Hellstrom * 477d80efd5cSThomas Hellstrom * @ctx: Pointer to context to commit the staged binding info to. 478d80efd5cSThomas Hellstrom * @from: Staged binding info built during execbuf. 479d80efd5cSThomas Hellstrom * @scrubbed: Transfer only scrubbed bindings. 480d80efd5cSThomas Hellstrom * 481d80efd5cSThomas Hellstrom * Transfers binding info from a temporary structure 482d80efd5cSThomas Hellstrom * (typically used by execbuf) to the persistent 483d80efd5cSThomas Hellstrom * structure in the context. This can be done once commands have been 484d80efd5cSThomas Hellstrom * submitted to hardware 485d80efd5cSThomas Hellstrom */ 486d80efd5cSThomas Hellstrom void vmw_binding_state_commit(struct vmw_ctx_binding_state *to, 487d80efd5cSThomas Hellstrom struct vmw_ctx_binding_state *from) 488d80efd5cSThomas Hellstrom { 489d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo *entry, *next; 490d80efd5cSThomas Hellstrom 491d80efd5cSThomas Hellstrom list_for_each_entry_safe(entry, next, &from->list, ctx_list) { 492d80efd5cSThomas Hellstrom vmw_binding_transfer(to, from, entry); 493d80efd5cSThomas Hellstrom vmw_binding_drop(entry); 494d80efd5cSThomas Hellstrom } 4955e8ec0d9SDeepak Rawat 4965e8ec0d9SDeepak Rawat /* Also transfer uav splice indices */ 4975e8ec0d9SDeepak Rawat to->ua_views[0].index = from->ua_views[0].index; 4985e8ec0d9SDeepak Rawat to->ua_views[1].index = from->ua_views[1].index; 499d80efd5cSThomas Hellstrom } 500d80efd5cSThomas Hellstrom 501d80efd5cSThomas Hellstrom /** 502d80efd5cSThomas Hellstrom * vmw_binding_rebind_all - Rebind all scrubbed bindings of a context 503d80efd5cSThomas Hellstrom * 504d80efd5cSThomas Hellstrom * @ctx: The context resource 505d80efd5cSThomas Hellstrom * 506d80efd5cSThomas Hellstrom * Walks through the context binding list and rebinds all scrubbed 507d80efd5cSThomas Hellstrom * resources. 508d80efd5cSThomas Hellstrom */ 509d80efd5cSThomas Hellstrom int vmw_binding_rebind_all(struct vmw_ctx_binding_state *cbs) 510d80efd5cSThomas Hellstrom { 511d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo *entry; 512d80efd5cSThomas Hellstrom int ret; 513d80efd5cSThomas Hellstrom 514d80efd5cSThomas Hellstrom list_for_each_entry(entry, &cbs->list, ctx_list) { 515d80efd5cSThomas Hellstrom if (likely(!entry->scrubbed)) 516d80efd5cSThomas Hellstrom continue; 517d80efd5cSThomas Hellstrom 518d80efd5cSThomas Hellstrom if ((entry->res == NULL || entry->res->id == 519d80efd5cSThomas Hellstrom SVGA3D_INVALID_ID)) 520d80efd5cSThomas Hellstrom continue; 521d80efd5cSThomas Hellstrom 522d80efd5cSThomas Hellstrom ret = vmw_binding_infos[entry->bt].scrub_func(entry, true); 523d80efd5cSThomas Hellstrom if (unlikely(ret != 0)) 524d80efd5cSThomas Hellstrom return ret; 525d80efd5cSThomas Hellstrom 526d80efd5cSThomas Hellstrom entry->scrubbed = false; 527d80efd5cSThomas Hellstrom } 528d80efd5cSThomas Hellstrom 529d80efd5cSThomas Hellstrom return vmw_binding_emit_dirty(cbs); 530d80efd5cSThomas Hellstrom } 531d80efd5cSThomas Hellstrom 532d80efd5cSThomas Hellstrom /** 533d80efd5cSThomas Hellstrom * vmw_binding_scrub_shader - scrub a shader binding from a context. 534d80efd5cSThomas Hellstrom * 535d80efd5cSThomas Hellstrom * @bi: single binding information. 536d80efd5cSThomas Hellstrom * @rebind: Whether to issue a bind instead of scrub command. 537d80efd5cSThomas Hellstrom */ 538d80efd5cSThomas Hellstrom static int vmw_binding_scrub_shader(struct vmw_ctx_bindinfo *bi, bool rebind) 539d80efd5cSThomas Hellstrom { 540d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_shader *binding = 541d80efd5cSThomas Hellstrom container_of(bi, typeof(*binding), bi); 542d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = bi->ctx->dev_priv; 543d80efd5cSThomas Hellstrom struct { 544d80efd5cSThomas Hellstrom SVGA3dCmdHeader header; 545d80efd5cSThomas Hellstrom SVGA3dCmdSetShader body; 546d80efd5cSThomas Hellstrom } *cmd; 547d80efd5cSThomas Hellstrom 54811c45419SDeepak Rawat cmd = VMW_FIFO_RESERVE(dev_priv, sizeof(*cmd)); 54911c45419SDeepak Rawat if (unlikely(cmd == NULL)) 550d80efd5cSThomas Hellstrom return -ENOMEM; 551d80efd5cSThomas Hellstrom 552d80efd5cSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_SET_SHADER; 553d80efd5cSThomas Hellstrom cmd->header.size = sizeof(cmd->body); 554d80efd5cSThomas Hellstrom cmd->body.cid = bi->ctx->id; 555d80efd5cSThomas Hellstrom cmd->body.type = binding->shader_slot + SVGA3D_SHADERTYPE_MIN; 556d80efd5cSThomas Hellstrom cmd->body.shid = ((rebind) ? bi->res->id : SVGA3D_INVALID_ID); 557d80efd5cSThomas Hellstrom vmw_fifo_commit(dev_priv, sizeof(*cmd)); 558d80efd5cSThomas Hellstrom 559d80efd5cSThomas Hellstrom return 0; 560d80efd5cSThomas Hellstrom } 561d80efd5cSThomas Hellstrom 562d80efd5cSThomas Hellstrom /** 563d80efd5cSThomas Hellstrom * vmw_binding_scrub_render_target - scrub a render target binding 564d80efd5cSThomas Hellstrom * from a context. 565d80efd5cSThomas Hellstrom * 566d80efd5cSThomas Hellstrom * @bi: single binding information. 567d80efd5cSThomas Hellstrom * @rebind: Whether to issue a bind instead of scrub command. 568d80efd5cSThomas Hellstrom */ 569d80efd5cSThomas Hellstrom static int vmw_binding_scrub_render_target(struct vmw_ctx_bindinfo *bi, 570d80efd5cSThomas Hellstrom bool rebind) 571d80efd5cSThomas Hellstrom { 572d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_view *binding = 573d80efd5cSThomas Hellstrom container_of(bi, typeof(*binding), bi); 574d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = bi->ctx->dev_priv; 575d80efd5cSThomas Hellstrom struct { 576d80efd5cSThomas Hellstrom SVGA3dCmdHeader header; 577d80efd5cSThomas Hellstrom SVGA3dCmdSetRenderTarget body; 578d80efd5cSThomas Hellstrom } *cmd; 579d80efd5cSThomas Hellstrom 58011c45419SDeepak Rawat cmd = VMW_FIFO_RESERVE(dev_priv, sizeof(*cmd)); 58111c45419SDeepak Rawat if (unlikely(cmd == NULL)) 582d80efd5cSThomas Hellstrom return -ENOMEM; 583d80efd5cSThomas Hellstrom 584d80efd5cSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_SETRENDERTARGET; 585d80efd5cSThomas Hellstrom cmd->header.size = sizeof(cmd->body); 586d80efd5cSThomas Hellstrom cmd->body.cid = bi->ctx->id; 587d80efd5cSThomas Hellstrom cmd->body.type = binding->slot; 588d80efd5cSThomas Hellstrom cmd->body.target.sid = ((rebind) ? bi->res->id : SVGA3D_INVALID_ID); 589d80efd5cSThomas Hellstrom cmd->body.target.face = 0; 590d80efd5cSThomas Hellstrom cmd->body.target.mipmap = 0; 591d80efd5cSThomas Hellstrom vmw_fifo_commit(dev_priv, sizeof(*cmd)); 592d80efd5cSThomas Hellstrom 593d80efd5cSThomas Hellstrom return 0; 594d80efd5cSThomas Hellstrom } 595d80efd5cSThomas Hellstrom 596d80efd5cSThomas Hellstrom /** 597d80efd5cSThomas Hellstrom * vmw_binding_scrub_texture - scrub a texture binding from a context. 598d80efd5cSThomas Hellstrom * 599d80efd5cSThomas Hellstrom * @bi: single binding information. 600d80efd5cSThomas Hellstrom * @rebind: Whether to issue a bind instead of scrub command. 601d80efd5cSThomas Hellstrom * 602d80efd5cSThomas Hellstrom * TODO: Possibly complement this function with a function that takes 603d80efd5cSThomas Hellstrom * a list of texture bindings and combines them to a single command. 604d80efd5cSThomas Hellstrom */ 605d80efd5cSThomas Hellstrom static int vmw_binding_scrub_texture(struct vmw_ctx_bindinfo *bi, 606d80efd5cSThomas Hellstrom bool rebind) 607d80efd5cSThomas Hellstrom { 608d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_tex *binding = 609d80efd5cSThomas Hellstrom container_of(bi, typeof(*binding), bi); 610d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = bi->ctx->dev_priv; 611d80efd5cSThomas Hellstrom struct { 612d80efd5cSThomas Hellstrom SVGA3dCmdHeader header; 613d80efd5cSThomas Hellstrom struct { 614d80efd5cSThomas Hellstrom SVGA3dCmdSetTextureState c; 615d80efd5cSThomas Hellstrom SVGA3dTextureState s1; 616d80efd5cSThomas Hellstrom } body; 617d80efd5cSThomas Hellstrom } *cmd; 618d80efd5cSThomas Hellstrom 61911c45419SDeepak Rawat cmd = VMW_FIFO_RESERVE(dev_priv, sizeof(*cmd)); 62011c45419SDeepak Rawat if (unlikely(cmd == NULL)) 621d80efd5cSThomas Hellstrom return -ENOMEM; 622d80efd5cSThomas Hellstrom 623d80efd5cSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_SETTEXTURESTATE; 624d80efd5cSThomas Hellstrom cmd->header.size = sizeof(cmd->body); 625d80efd5cSThomas Hellstrom cmd->body.c.cid = bi->ctx->id; 626d80efd5cSThomas Hellstrom cmd->body.s1.stage = binding->texture_stage; 627d80efd5cSThomas Hellstrom cmd->body.s1.name = SVGA3D_TS_BIND_TEXTURE; 628d80efd5cSThomas Hellstrom cmd->body.s1.value = ((rebind) ? bi->res->id : SVGA3D_INVALID_ID); 629d80efd5cSThomas Hellstrom vmw_fifo_commit(dev_priv, sizeof(*cmd)); 630d80efd5cSThomas Hellstrom 631d80efd5cSThomas Hellstrom return 0; 632d80efd5cSThomas Hellstrom } 633d80efd5cSThomas Hellstrom 634d80efd5cSThomas Hellstrom /** 635d80efd5cSThomas Hellstrom * vmw_binding_scrub_dx_shader - scrub a dx shader binding from a context. 636d80efd5cSThomas Hellstrom * 637d80efd5cSThomas Hellstrom * @bi: single binding information. 638d80efd5cSThomas Hellstrom * @rebind: Whether to issue a bind instead of scrub command. 639d80efd5cSThomas Hellstrom */ 640d80efd5cSThomas Hellstrom static int vmw_binding_scrub_dx_shader(struct vmw_ctx_bindinfo *bi, bool rebind) 641d80efd5cSThomas Hellstrom { 642d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_shader *binding = 643d80efd5cSThomas Hellstrom container_of(bi, typeof(*binding), bi); 644d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = bi->ctx->dev_priv; 645d80efd5cSThomas Hellstrom struct { 646d80efd5cSThomas Hellstrom SVGA3dCmdHeader header; 647d80efd5cSThomas Hellstrom SVGA3dCmdDXSetShader body; 648d80efd5cSThomas Hellstrom } *cmd; 649d80efd5cSThomas Hellstrom 65011c45419SDeepak Rawat cmd = VMW_FIFO_RESERVE_DX(dev_priv, sizeof(*cmd), bi->ctx->id); 65111c45419SDeepak Rawat if (unlikely(cmd == NULL)) 652d80efd5cSThomas Hellstrom return -ENOMEM; 65311c45419SDeepak Rawat 654d80efd5cSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_DX_SET_SHADER; 655d80efd5cSThomas Hellstrom cmd->header.size = sizeof(cmd->body); 656d80efd5cSThomas Hellstrom cmd->body.type = binding->shader_slot + SVGA3D_SHADERTYPE_MIN; 657d80efd5cSThomas Hellstrom cmd->body.shaderId = ((rebind) ? bi->res->id : SVGA3D_INVALID_ID); 658d80efd5cSThomas Hellstrom vmw_fifo_commit(dev_priv, sizeof(*cmd)); 659d80efd5cSThomas Hellstrom 660d80efd5cSThomas Hellstrom return 0; 661d80efd5cSThomas Hellstrom } 662d80efd5cSThomas Hellstrom 663d80efd5cSThomas Hellstrom /** 664d80efd5cSThomas Hellstrom * vmw_binding_scrub_cb - scrub a constant buffer binding from a context. 665d80efd5cSThomas Hellstrom * 666d80efd5cSThomas Hellstrom * @bi: single binding information. 667d80efd5cSThomas Hellstrom * @rebind: Whether to issue a bind instead of scrub command. 668d80efd5cSThomas Hellstrom */ 669d80efd5cSThomas Hellstrom static int vmw_binding_scrub_cb(struct vmw_ctx_bindinfo *bi, bool rebind) 670d80efd5cSThomas Hellstrom { 671d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_cb *binding = 672d80efd5cSThomas Hellstrom container_of(bi, typeof(*binding), bi); 673d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = bi->ctx->dev_priv; 674d80efd5cSThomas Hellstrom struct { 675d80efd5cSThomas Hellstrom SVGA3dCmdHeader header; 676d80efd5cSThomas Hellstrom SVGA3dCmdDXSetSingleConstantBuffer body; 677d80efd5cSThomas Hellstrom } *cmd; 678d80efd5cSThomas Hellstrom 67911c45419SDeepak Rawat cmd = VMW_FIFO_RESERVE_DX(dev_priv, sizeof(*cmd), bi->ctx->id); 68011c45419SDeepak Rawat if (unlikely(cmd == NULL)) 681d80efd5cSThomas Hellstrom return -ENOMEM; 682d80efd5cSThomas Hellstrom 683d80efd5cSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_DX_SET_SINGLE_CONSTANT_BUFFER; 684d80efd5cSThomas Hellstrom cmd->header.size = sizeof(cmd->body); 685d80efd5cSThomas Hellstrom cmd->body.slot = binding->slot; 686d80efd5cSThomas Hellstrom cmd->body.type = binding->shader_slot + SVGA3D_SHADERTYPE_MIN; 687d80efd5cSThomas Hellstrom if (rebind) { 688d80efd5cSThomas Hellstrom cmd->body.offsetInBytes = binding->offset; 689d80efd5cSThomas Hellstrom cmd->body.sizeInBytes = binding->size; 690d80efd5cSThomas Hellstrom cmd->body.sid = bi->res->id; 691d80efd5cSThomas Hellstrom } else { 692d80efd5cSThomas Hellstrom cmd->body.offsetInBytes = 0; 693d80efd5cSThomas Hellstrom cmd->body.sizeInBytes = 0; 694d80efd5cSThomas Hellstrom cmd->body.sid = SVGA3D_INVALID_ID; 695d80efd5cSThomas Hellstrom } 696d80efd5cSThomas Hellstrom vmw_fifo_commit(dev_priv, sizeof(*cmd)); 697d80efd5cSThomas Hellstrom 698d80efd5cSThomas Hellstrom return 0; 699d80efd5cSThomas Hellstrom } 700d80efd5cSThomas Hellstrom 701d80efd5cSThomas Hellstrom /** 702d80efd5cSThomas Hellstrom * vmw_collect_view_ids - Build view id data for a view binding command 703d80efd5cSThomas Hellstrom * without checking which bindings actually need to be emitted 704d80efd5cSThomas Hellstrom * 705d80efd5cSThomas Hellstrom * @cbs: Pointer to the context's struct vmw_ctx_binding_state 706d80efd5cSThomas Hellstrom * @bi: Pointer to where the binding info array is stored in @cbs 707d80efd5cSThomas Hellstrom * @max_num: Maximum number of entries in the @bi array. 708d80efd5cSThomas Hellstrom * 709d80efd5cSThomas Hellstrom * Scans the @bi array for bindings and builds a buffer of view id data. 710d80efd5cSThomas Hellstrom * Stops at the first non-existing binding in the @bi array. 711d80efd5cSThomas Hellstrom * On output, @cbs->bind_cmd_count contains the number of bindings to be 712d80efd5cSThomas Hellstrom * emitted, @cbs->bind_first_slot is set to zero, and @cbs->bind_cmd_buffer 713d80efd5cSThomas Hellstrom * contains the command data. 714d80efd5cSThomas Hellstrom */ 715d80efd5cSThomas Hellstrom static void vmw_collect_view_ids(struct vmw_ctx_binding_state *cbs, 716d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo *bi, 717d80efd5cSThomas Hellstrom u32 max_num) 718d80efd5cSThomas Hellstrom { 719d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo_view *biv = 720d80efd5cSThomas Hellstrom container_of(bi, struct vmw_ctx_bindinfo_view, bi); 721d80efd5cSThomas Hellstrom unsigned long i; 722d80efd5cSThomas Hellstrom 723d80efd5cSThomas Hellstrom cbs->bind_cmd_count = 0; 724d80efd5cSThomas Hellstrom cbs->bind_first_slot = 0; 725d80efd5cSThomas Hellstrom 726d80efd5cSThomas Hellstrom for (i = 0; i < max_num; ++i, ++biv) { 727d80efd5cSThomas Hellstrom if (!biv->bi.ctx) 728d80efd5cSThomas Hellstrom break; 729d80efd5cSThomas Hellstrom 730d80efd5cSThomas Hellstrom cbs->bind_cmd_buffer[cbs->bind_cmd_count++] = 731d80efd5cSThomas Hellstrom ((biv->bi.scrubbed) ? 732d80efd5cSThomas Hellstrom SVGA3D_INVALID_ID : biv->bi.res->id); 733d80efd5cSThomas Hellstrom } 734d80efd5cSThomas Hellstrom } 735d80efd5cSThomas Hellstrom 736d80efd5cSThomas Hellstrom /** 737d80efd5cSThomas Hellstrom * vmw_collect_dirty_view_ids - Build view id data for a view binding command 738d80efd5cSThomas Hellstrom * 739d80efd5cSThomas Hellstrom * @cbs: Pointer to the context's struct vmw_ctx_binding_state 740d80efd5cSThomas Hellstrom * @bi: Pointer to where the binding info array is stored in @cbs 741d80efd5cSThomas Hellstrom * @dirty: Bitmap indicating which bindings need to be emitted. 742d80efd5cSThomas Hellstrom * @max_num: Maximum number of entries in the @bi array. 743d80efd5cSThomas Hellstrom * 744d80efd5cSThomas Hellstrom * Scans the @bi array for bindings that need to be emitted and 745d80efd5cSThomas Hellstrom * builds a buffer of view id data. 746d80efd5cSThomas Hellstrom * On output, @cbs->bind_cmd_count contains the number of bindings to be 747d80efd5cSThomas Hellstrom * emitted, @cbs->bind_first_slot indicates the index of the first emitted 748d80efd5cSThomas Hellstrom * binding, and @cbs->bind_cmd_buffer contains the command data. 749d80efd5cSThomas Hellstrom */ 750d80efd5cSThomas Hellstrom static void vmw_collect_dirty_view_ids(struct vmw_ctx_binding_state *cbs, 751d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo *bi, 752d80efd5cSThomas Hellstrom unsigned long *dirty, 753d80efd5cSThomas Hellstrom u32 max_num) 754d80efd5cSThomas Hellstrom { 755d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo_view *biv = 756d80efd5cSThomas Hellstrom container_of(bi, struct vmw_ctx_bindinfo_view, bi); 757d80efd5cSThomas Hellstrom unsigned long i, next_bit; 758d80efd5cSThomas Hellstrom 759d80efd5cSThomas Hellstrom cbs->bind_cmd_count = 0; 760d80efd5cSThomas Hellstrom i = find_first_bit(dirty, max_num); 761d80efd5cSThomas Hellstrom next_bit = i; 762d80efd5cSThomas Hellstrom cbs->bind_first_slot = i; 763d80efd5cSThomas Hellstrom 764d80efd5cSThomas Hellstrom biv += i; 765d80efd5cSThomas Hellstrom for (; i < max_num; ++i, ++biv) { 766d80efd5cSThomas Hellstrom cbs->bind_cmd_buffer[cbs->bind_cmd_count++] = 767d80efd5cSThomas Hellstrom ((!biv->bi.ctx || biv->bi.scrubbed) ? 768d80efd5cSThomas Hellstrom SVGA3D_INVALID_ID : biv->bi.res->id); 769d80efd5cSThomas Hellstrom 770d80efd5cSThomas Hellstrom if (next_bit == i) { 771d80efd5cSThomas Hellstrom next_bit = find_next_bit(dirty, max_num, i + 1); 772d80efd5cSThomas Hellstrom if (next_bit >= max_num) 773d80efd5cSThomas Hellstrom break; 774d80efd5cSThomas Hellstrom } 775d80efd5cSThomas Hellstrom } 776d80efd5cSThomas Hellstrom } 777d80efd5cSThomas Hellstrom 778d80efd5cSThomas Hellstrom /** 779d80efd5cSThomas Hellstrom * vmw_binding_emit_set_sr - Issue delayed DX shader resource binding commands 780d80efd5cSThomas Hellstrom * 781d80efd5cSThomas Hellstrom * @cbs: Pointer to the context's struct vmw_ctx_binding_state 782d80efd5cSThomas Hellstrom */ 783d80efd5cSThomas Hellstrom static int vmw_emit_set_sr(struct vmw_ctx_binding_state *cbs, 784d80efd5cSThomas Hellstrom int shader_slot) 785d80efd5cSThomas Hellstrom { 786d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo *loc = 787d80efd5cSThomas Hellstrom &cbs->per_shader[shader_slot].shader_res[0].bi; 788d80efd5cSThomas Hellstrom struct { 789d80efd5cSThomas Hellstrom SVGA3dCmdHeader header; 790d80efd5cSThomas Hellstrom SVGA3dCmdDXSetShaderResources body; 791d80efd5cSThomas Hellstrom } *cmd; 792d80efd5cSThomas Hellstrom size_t cmd_size, view_id_size; 793d80efd5cSThomas Hellstrom const struct vmw_resource *ctx = vmw_cbs_context(cbs); 794d80efd5cSThomas Hellstrom 795d80efd5cSThomas Hellstrom vmw_collect_dirty_view_ids(cbs, loc, 796d80efd5cSThomas Hellstrom cbs->per_shader[shader_slot].dirty_sr, 797d80efd5cSThomas Hellstrom SVGA3D_DX_MAX_SRVIEWS); 798d80efd5cSThomas Hellstrom if (cbs->bind_cmd_count == 0) 799d80efd5cSThomas Hellstrom return 0; 800d80efd5cSThomas Hellstrom 801d80efd5cSThomas Hellstrom view_id_size = cbs->bind_cmd_count*sizeof(uint32); 802d80efd5cSThomas Hellstrom cmd_size = sizeof(*cmd) + view_id_size; 80311c45419SDeepak Rawat cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id); 80411c45419SDeepak Rawat if (unlikely(cmd == NULL)) 805d80efd5cSThomas Hellstrom return -ENOMEM; 806d80efd5cSThomas Hellstrom 807d80efd5cSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_DX_SET_SHADER_RESOURCES; 808d80efd5cSThomas Hellstrom cmd->header.size = sizeof(cmd->body) + view_id_size; 809d80efd5cSThomas Hellstrom cmd->body.type = shader_slot + SVGA3D_SHADERTYPE_MIN; 810d80efd5cSThomas Hellstrom cmd->body.startView = cbs->bind_first_slot; 811d80efd5cSThomas Hellstrom 812d80efd5cSThomas Hellstrom memcpy(&cmd[1], cbs->bind_cmd_buffer, view_id_size); 813d80efd5cSThomas Hellstrom 814d80efd5cSThomas Hellstrom vmw_fifo_commit(ctx->dev_priv, cmd_size); 815d80efd5cSThomas Hellstrom bitmap_clear(cbs->per_shader[shader_slot].dirty_sr, 816d80efd5cSThomas Hellstrom cbs->bind_first_slot, cbs->bind_cmd_count); 817d80efd5cSThomas Hellstrom 818d80efd5cSThomas Hellstrom return 0; 819d80efd5cSThomas Hellstrom } 820d80efd5cSThomas Hellstrom 821d80efd5cSThomas Hellstrom /** 822d80efd5cSThomas Hellstrom * vmw_binding_emit_set_rt - Issue delayed DX rendertarget binding commands 823d80efd5cSThomas Hellstrom * 824d80efd5cSThomas Hellstrom * @cbs: Pointer to the context's struct vmw_ctx_binding_state 825d80efd5cSThomas Hellstrom */ 826d80efd5cSThomas Hellstrom static int vmw_emit_set_rt(struct vmw_ctx_binding_state *cbs) 827d80efd5cSThomas Hellstrom { 828d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo *loc = &cbs->render_targets[0].bi; 829d80efd5cSThomas Hellstrom struct { 830d80efd5cSThomas Hellstrom SVGA3dCmdHeader header; 831d80efd5cSThomas Hellstrom SVGA3dCmdDXSetRenderTargets body; 832d80efd5cSThomas Hellstrom } *cmd; 833d80efd5cSThomas Hellstrom size_t cmd_size, view_id_size; 834d80efd5cSThomas Hellstrom const struct vmw_resource *ctx = vmw_cbs_context(cbs); 835d80efd5cSThomas Hellstrom 836d80efd5cSThomas Hellstrom vmw_collect_view_ids(cbs, loc, SVGA3D_MAX_SIMULTANEOUS_RENDER_TARGETS); 837d80efd5cSThomas Hellstrom view_id_size = cbs->bind_cmd_count*sizeof(uint32); 838d80efd5cSThomas Hellstrom cmd_size = sizeof(*cmd) + view_id_size; 83911c45419SDeepak Rawat cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id); 84011c45419SDeepak Rawat if (unlikely(cmd == NULL)) 841d80efd5cSThomas Hellstrom return -ENOMEM; 842d80efd5cSThomas Hellstrom 843d80efd5cSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_DX_SET_RENDERTARGETS; 844d80efd5cSThomas Hellstrom cmd->header.size = sizeof(cmd->body) + view_id_size; 845d80efd5cSThomas Hellstrom 846d80efd5cSThomas Hellstrom if (cbs->ds_view.bi.ctx && !cbs->ds_view.bi.scrubbed) 847d80efd5cSThomas Hellstrom cmd->body.depthStencilViewId = cbs->ds_view.bi.res->id; 848d80efd5cSThomas Hellstrom else 849d80efd5cSThomas Hellstrom cmd->body.depthStencilViewId = SVGA3D_INVALID_ID; 850d80efd5cSThomas Hellstrom 851d80efd5cSThomas Hellstrom memcpy(&cmd[1], cbs->bind_cmd_buffer, view_id_size); 852d80efd5cSThomas Hellstrom 853d80efd5cSThomas Hellstrom vmw_fifo_commit(ctx->dev_priv, cmd_size); 854d80efd5cSThomas Hellstrom 855d80efd5cSThomas Hellstrom return 0; 856d80efd5cSThomas Hellstrom 857d80efd5cSThomas Hellstrom } 858d80efd5cSThomas Hellstrom 859d80efd5cSThomas Hellstrom /** 860d80efd5cSThomas Hellstrom * vmw_collect_so_targets - Build SVGA3dSoTarget data for a binding command 861d80efd5cSThomas Hellstrom * without checking which bindings actually need to be emitted 862d80efd5cSThomas Hellstrom * 863d80efd5cSThomas Hellstrom * @cbs: Pointer to the context's struct vmw_ctx_binding_state 864d80efd5cSThomas Hellstrom * @bi: Pointer to where the binding info array is stored in @cbs 865d80efd5cSThomas Hellstrom * @max_num: Maximum number of entries in the @bi array. 866d80efd5cSThomas Hellstrom * 867d80efd5cSThomas Hellstrom * Scans the @bi array for bindings and builds a buffer of SVGA3dSoTarget data. 868d80efd5cSThomas Hellstrom * Stops at the first non-existing binding in the @bi array. 869d80efd5cSThomas Hellstrom * On output, @cbs->bind_cmd_count contains the number of bindings to be 870d80efd5cSThomas Hellstrom * emitted, @cbs->bind_first_slot is set to zero, and @cbs->bind_cmd_buffer 871d80efd5cSThomas Hellstrom * contains the command data. 872d80efd5cSThomas Hellstrom */ 873d80efd5cSThomas Hellstrom static void vmw_collect_so_targets(struct vmw_ctx_binding_state *cbs, 874d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo *bi, 875d80efd5cSThomas Hellstrom u32 max_num) 876d80efd5cSThomas Hellstrom { 877403fef50SDeepak Rawat const struct vmw_ctx_bindinfo_so_target *biso = 878403fef50SDeepak Rawat container_of(bi, struct vmw_ctx_bindinfo_so_target, bi); 879d80efd5cSThomas Hellstrom unsigned long i; 880d80efd5cSThomas Hellstrom SVGA3dSoTarget *so_buffer = (SVGA3dSoTarget *) cbs->bind_cmd_buffer; 881d80efd5cSThomas Hellstrom 882d80efd5cSThomas Hellstrom cbs->bind_cmd_count = 0; 883d80efd5cSThomas Hellstrom cbs->bind_first_slot = 0; 884d80efd5cSThomas Hellstrom 885d80efd5cSThomas Hellstrom for (i = 0; i < max_num; ++i, ++biso, ++so_buffer, 886d80efd5cSThomas Hellstrom ++cbs->bind_cmd_count) { 887d80efd5cSThomas Hellstrom if (!biso->bi.ctx) 888d80efd5cSThomas Hellstrom break; 889d80efd5cSThomas Hellstrom 890d80efd5cSThomas Hellstrom if (!biso->bi.scrubbed) { 891d80efd5cSThomas Hellstrom so_buffer->sid = biso->bi.res->id; 892d80efd5cSThomas Hellstrom so_buffer->offset = biso->offset; 893d80efd5cSThomas Hellstrom so_buffer->sizeInBytes = biso->size; 894d80efd5cSThomas Hellstrom } else { 895d80efd5cSThomas Hellstrom so_buffer->sid = SVGA3D_INVALID_ID; 896d80efd5cSThomas Hellstrom so_buffer->offset = 0; 897d80efd5cSThomas Hellstrom so_buffer->sizeInBytes = 0; 898d80efd5cSThomas Hellstrom } 899d80efd5cSThomas Hellstrom } 900d80efd5cSThomas Hellstrom } 901d80efd5cSThomas Hellstrom 902d80efd5cSThomas Hellstrom /** 903403fef50SDeepak Rawat * vmw_emit_set_so_target - Issue delayed streamout binding commands 904d80efd5cSThomas Hellstrom * 905d80efd5cSThomas Hellstrom * @cbs: Pointer to the context's struct vmw_ctx_binding_state 906d80efd5cSThomas Hellstrom */ 907403fef50SDeepak Rawat static int vmw_emit_set_so_target(struct vmw_ctx_binding_state *cbs) 908d80efd5cSThomas Hellstrom { 909d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo *loc = &cbs->so_targets[0].bi; 910d80efd5cSThomas Hellstrom struct { 911d80efd5cSThomas Hellstrom SVGA3dCmdHeader header; 912d80efd5cSThomas Hellstrom SVGA3dCmdDXSetSOTargets body; 913d80efd5cSThomas Hellstrom } *cmd; 914d80efd5cSThomas Hellstrom size_t cmd_size, so_target_size; 915d80efd5cSThomas Hellstrom const struct vmw_resource *ctx = vmw_cbs_context(cbs); 916d80efd5cSThomas Hellstrom 917d80efd5cSThomas Hellstrom vmw_collect_so_targets(cbs, loc, SVGA3D_DX_MAX_SOTARGETS); 918d80efd5cSThomas Hellstrom if (cbs->bind_cmd_count == 0) 919d80efd5cSThomas Hellstrom return 0; 920d80efd5cSThomas Hellstrom 921d80efd5cSThomas Hellstrom so_target_size = cbs->bind_cmd_count*sizeof(SVGA3dSoTarget); 922d80efd5cSThomas Hellstrom cmd_size = sizeof(*cmd) + so_target_size; 92311c45419SDeepak Rawat cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id); 92411c45419SDeepak Rawat if (unlikely(cmd == NULL)) 925d80efd5cSThomas Hellstrom return -ENOMEM; 926d80efd5cSThomas Hellstrom 927d80efd5cSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_DX_SET_SOTARGETS; 928d80efd5cSThomas Hellstrom cmd->header.size = sizeof(cmd->body) + so_target_size; 929d80efd5cSThomas Hellstrom memcpy(&cmd[1], cbs->bind_cmd_buffer, so_target_size); 930d80efd5cSThomas Hellstrom 931d80efd5cSThomas Hellstrom vmw_fifo_commit(ctx->dev_priv, cmd_size); 932d80efd5cSThomas Hellstrom 933d80efd5cSThomas Hellstrom return 0; 934d80efd5cSThomas Hellstrom 935d80efd5cSThomas Hellstrom } 936d80efd5cSThomas Hellstrom 937d80efd5cSThomas Hellstrom /** 938d80efd5cSThomas Hellstrom * vmw_binding_emit_dirty_ps - Issue delayed per shader binding commands 939d80efd5cSThomas Hellstrom * 940d80efd5cSThomas Hellstrom * @cbs: Pointer to the context's struct vmw_ctx_binding_state 941d80efd5cSThomas Hellstrom * 942d80efd5cSThomas Hellstrom */ 943d80efd5cSThomas Hellstrom static int vmw_binding_emit_dirty_ps(struct vmw_ctx_binding_state *cbs) 944d80efd5cSThomas Hellstrom { 945d80efd5cSThomas Hellstrom struct vmw_dx_shader_bindings *sb = &cbs->per_shader[0]; 946d80efd5cSThomas Hellstrom u32 i; 947d80efd5cSThomas Hellstrom int ret; 948d80efd5cSThomas Hellstrom 949d80efd5cSThomas Hellstrom for (i = 0; i < SVGA3D_NUM_SHADERTYPE_DX10; ++i, ++sb) { 950d80efd5cSThomas Hellstrom if (!test_bit(VMW_BINDING_PS_SR_BIT, &sb->dirty)) 951d80efd5cSThomas Hellstrom continue; 952d80efd5cSThomas Hellstrom 953d80efd5cSThomas Hellstrom ret = vmw_emit_set_sr(cbs, i); 954d80efd5cSThomas Hellstrom if (ret) 955d80efd5cSThomas Hellstrom break; 956d80efd5cSThomas Hellstrom 957d80efd5cSThomas Hellstrom __clear_bit(VMW_BINDING_PS_SR_BIT, &sb->dirty); 958d80efd5cSThomas Hellstrom } 959d80efd5cSThomas Hellstrom 960d80efd5cSThomas Hellstrom return 0; 961d80efd5cSThomas Hellstrom } 962d80efd5cSThomas Hellstrom 963d80efd5cSThomas Hellstrom /** 964d80efd5cSThomas Hellstrom * vmw_collect_dirty_vbs - Build SVGA3dVertexBuffer data for a 965d80efd5cSThomas Hellstrom * SVGA3dCmdDXSetVertexBuffers command 966d80efd5cSThomas Hellstrom * 967d80efd5cSThomas Hellstrom * @cbs: Pointer to the context's struct vmw_ctx_binding_state 968d80efd5cSThomas Hellstrom * @bi: Pointer to where the binding info array is stored in @cbs 969d80efd5cSThomas Hellstrom * @dirty: Bitmap indicating which bindings need to be emitted. 970d80efd5cSThomas Hellstrom * @max_num: Maximum number of entries in the @bi array. 971d80efd5cSThomas Hellstrom * 972d80efd5cSThomas Hellstrom * Scans the @bi array for bindings that need to be emitted and 973d80efd5cSThomas Hellstrom * builds a buffer of SVGA3dVertexBuffer data. 974d80efd5cSThomas Hellstrom * On output, @cbs->bind_cmd_count contains the number of bindings to be 975d80efd5cSThomas Hellstrom * emitted, @cbs->bind_first_slot indicates the index of the first emitted 976d80efd5cSThomas Hellstrom * binding, and @cbs->bind_cmd_buffer contains the command data. 977d80efd5cSThomas Hellstrom */ 978d80efd5cSThomas Hellstrom static void vmw_collect_dirty_vbs(struct vmw_ctx_binding_state *cbs, 979d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo *bi, 980d80efd5cSThomas Hellstrom unsigned long *dirty, 981d80efd5cSThomas Hellstrom u32 max_num) 982d80efd5cSThomas Hellstrom { 983d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo_vb *biv = 984d80efd5cSThomas Hellstrom container_of(bi, struct vmw_ctx_bindinfo_vb, bi); 985d80efd5cSThomas Hellstrom unsigned long i, next_bit; 986d80efd5cSThomas Hellstrom SVGA3dVertexBuffer *vbs = (SVGA3dVertexBuffer *) &cbs->bind_cmd_buffer; 987d80efd5cSThomas Hellstrom 988d80efd5cSThomas Hellstrom cbs->bind_cmd_count = 0; 989d80efd5cSThomas Hellstrom i = find_first_bit(dirty, max_num); 990d80efd5cSThomas Hellstrom next_bit = i; 991d80efd5cSThomas Hellstrom cbs->bind_first_slot = i; 992d80efd5cSThomas Hellstrom 993d80efd5cSThomas Hellstrom biv += i; 994d80efd5cSThomas Hellstrom for (; i < max_num; ++i, ++biv, ++vbs) { 995d80efd5cSThomas Hellstrom if (!biv->bi.ctx || biv->bi.scrubbed) { 996d80efd5cSThomas Hellstrom vbs->sid = SVGA3D_INVALID_ID; 997d80efd5cSThomas Hellstrom vbs->stride = 0; 998d80efd5cSThomas Hellstrom vbs->offset = 0; 999d80efd5cSThomas Hellstrom } else { 1000d80efd5cSThomas Hellstrom vbs->sid = biv->bi.res->id; 1001d80efd5cSThomas Hellstrom vbs->stride = biv->stride; 1002d80efd5cSThomas Hellstrom vbs->offset = biv->offset; 1003d80efd5cSThomas Hellstrom } 1004d80efd5cSThomas Hellstrom cbs->bind_cmd_count++; 1005d80efd5cSThomas Hellstrom if (next_bit == i) { 1006d80efd5cSThomas Hellstrom next_bit = find_next_bit(dirty, max_num, i + 1); 1007d80efd5cSThomas Hellstrom if (next_bit >= max_num) 1008d80efd5cSThomas Hellstrom break; 1009d80efd5cSThomas Hellstrom } 1010d80efd5cSThomas Hellstrom } 1011d80efd5cSThomas Hellstrom } 1012d80efd5cSThomas Hellstrom 1013d80efd5cSThomas Hellstrom /** 1014d80efd5cSThomas Hellstrom * vmw_binding_emit_set_vb - Issue delayed vertex buffer binding commands 1015d80efd5cSThomas Hellstrom * 1016d80efd5cSThomas Hellstrom * @cbs: Pointer to the context's struct vmw_ctx_binding_state 1017d80efd5cSThomas Hellstrom * 1018d80efd5cSThomas Hellstrom */ 1019d80efd5cSThomas Hellstrom static int vmw_emit_set_vb(struct vmw_ctx_binding_state *cbs) 1020d80efd5cSThomas Hellstrom { 1021d80efd5cSThomas Hellstrom const struct vmw_ctx_bindinfo *loc = 1022d80efd5cSThomas Hellstrom &cbs->vertex_buffers[0].bi; 1023d80efd5cSThomas Hellstrom struct { 1024d80efd5cSThomas Hellstrom SVGA3dCmdHeader header; 1025d80efd5cSThomas Hellstrom SVGA3dCmdDXSetVertexBuffers body; 1026d80efd5cSThomas Hellstrom } *cmd; 1027d80efd5cSThomas Hellstrom size_t cmd_size, set_vb_size; 1028d80efd5cSThomas Hellstrom const struct vmw_resource *ctx = vmw_cbs_context(cbs); 1029d80efd5cSThomas Hellstrom 1030d80efd5cSThomas Hellstrom vmw_collect_dirty_vbs(cbs, loc, cbs->dirty_vb, 1031d80efd5cSThomas Hellstrom SVGA3D_DX_MAX_VERTEXBUFFERS); 1032d80efd5cSThomas Hellstrom if (cbs->bind_cmd_count == 0) 1033d80efd5cSThomas Hellstrom return 0; 1034d80efd5cSThomas Hellstrom 1035d80efd5cSThomas Hellstrom set_vb_size = cbs->bind_cmd_count*sizeof(SVGA3dVertexBuffer); 1036d80efd5cSThomas Hellstrom cmd_size = sizeof(*cmd) + set_vb_size; 103711c45419SDeepak Rawat cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id); 103811c45419SDeepak Rawat if (unlikely(cmd == NULL)) 1039d80efd5cSThomas Hellstrom return -ENOMEM; 1040d80efd5cSThomas Hellstrom 1041d80efd5cSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_DX_SET_VERTEX_BUFFERS; 1042d80efd5cSThomas Hellstrom cmd->header.size = sizeof(cmd->body) + set_vb_size; 1043d80efd5cSThomas Hellstrom cmd->body.startBuffer = cbs->bind_first_slot; 1044d80efd5cSThomas Hellstrom 1045d80efd5cSThomas Hellstrom memcpy(&cmd[1], cbs->bind_cmd_buffer, set_vb_size); 1046d80efd5cSThomas Hellstrom 1047d80efd5cSThomas Hellstrom vmw_fifo_commit(ctx->dev_priv, cmd_size); 1048d80efd5cSThomas Hellstrom bitmap_clear(cbs->dirty_vb, 1049d80efd5cSThomas Hellstrom cbs->bind_first_slot, cbs->bind_cmd_count); 1050d80efd5cSThomas Hellstrom 1051d80efd5cSThomas Hellstrom return 0; 1052d80efd5cSThomas Hellstrom } 1053d80efd5cSThomas Hellstrom 10545e8ec0d9SDeepak Rawat static int vmw_emit_set_uav(struct vmw_ctx_binding_state *cbs) 10555e8ec0d9SDeepak Rawat { 10565e8ec0d9SDeepak Rawat const struct vmw_ctx_bindinfo *loc = &cbs->ua_views[0].views[0].bi; 10575e8ec0d9SDeepak Rawat struct { 10585e8ec0d9SDeepak Rawat SVGA3dCmdHeader header; 10595e8ec0d9SDeepak Rawat SVGA3dCmdDXSetUAViews body; 10605e8ec0d9SDeepak Rawat } *cmd; 10615e8ec0d9SDeepak Rawat size_t cmd_size, view_id_size; 10625e8ec0d9SDeepak Rawat const struct vmw_resource *ctx = vmw_cbs_context(cbs); 10635e8ec0d9SDeepak Rawat 10645e8ec0d9SDeepak Rawat vmw_collect_view_ids(cbs, loc, SVGA3D_MAX_UAVIEWS); 10655e8ec0d9SDeepak Rawat view_id_size = cbs->bind_cmd_count*sizeof(uint32); 10665e8ec0d9SDeepak Rawat cmd_size = sizeof(*cmd) + view_id_size; 10675e8ec0d9SDeepak Rawat cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id); 10685e8ec0d9SDeepak Rawat if (!cmd) 10695e8ec0d9SDeepak Rawat return -ENOMEM; 10705e8ec0d9SDeepak Rawat 10715e8ec0d9SDeepak Rawat cmd->header.id = SVGA_3D_CMD_DX_SET_UA_VIEWS; 10725e8ec0d9SDeepak Rawat cmd->header.size = sizeof(cmd->body) + view_id_size; 10735e8ec0d9SDeepak Rawat 10745e8ec0d9SDeepak Rawat /* Splice index is specified user-space */ 10755e8ec0d9SDeepak Rawat cmd->body.uavSpliceIndex = cbs->ua_views[0].index; 10765e8ec0d9SDeepak Rawat 10775e8ec0d9SDeepak Rawat memcpy(&cmd[1], cbs->bind_cmd_buffer, view_id_size); 10785e8ec0d9SDeepak Rawat 10795e8ec0d9SDeepak Rawat vmw_fifo_commit(ctx->dev_priv, cmd_size); 10805e8ec0d9SDeepak Rawat 10815e8ec0d9SDeepak Rawat return 0; 10825e8ec0d9SDeepak Rawat } 10835e8ec0d9SDeepak Rawat 10845e8ec0d9SDeepak Rawat static int vmw_emit_set_cs_uav(struct vmw_ctx_binding_state *cbs) 10855e8ec0d9SDeepak Rawat { 10865e8ec0d9SDeepak Rawat const struct vmw_ctx_bindinfo *loc = &cbs->ua_views[1].views[0].bi; 10875e8ec0d9SDeepak Rawat struct { 10885e8ec0d9SDeepak Rawat SVGA3dCmdHeader header; 10895e8ec0d9SDeepak Rawat SVGA3dCmdDXSetCSUAViews body; 10905e8ec0d9SDeepak Rawat } *cmd; 10915e8ec0d9SDeepak Rawat size_t cmd_size, view_id_size; 10925e8ec0d9SDeepak Rawat const struct vmw_resource *ctx = vmw_cbs_context(cbs); 10935e8ec0d9SDeepak Rawat 10945e8ec0d9SDeepak Rawat vmw_collect_view_ids(cbs, loc, SVGA3D_MAX_UAVIEWS); 10955e8ec0d9SDeepak Rawat view_id_size = cbs->bind_cmd_count*sizeof(uint32); 10965e8ec0d9SDeepak Rawat cmd_size = sizeof(*cmd) + view_id_size; 10975e8ec0d9SDeepak Rawat cmd = VMW_FIFO_RESERVE_DX(ctx->dev_priv, cmd_size, ctx->id); 10985e8ec0d9SDeepak Rawat if (!cmd) 10995e8ec0d9SDeepak Rawat return -ENOMEM; 11005e8ec0d9SDeepak Rawat 11015e8ec0d9SDeepak Rawat cmd->header.id = SVGA_3D_CMD_DX_SET_CS_UA_VIEWS; 11025e8ec0d9SDeepak Rawat cmd->header.size = sizeof(cmd->body) + view_id_size; 11035e8ec0d9SDeepak Rawat 11045e8ec0d9SDeepak Rawat /* Start index is specified user-space */ 11055e8ec0d9SDeepak Rawat cmd->body.startIndex = cbs->ua_views[1].index; 11065e8ec0d9SDeepak Rawat 11075e8ec0d9SDeepak Rawat memcpy(&cmd[1], cbs->bind_cmd_buffer, view_id_size); 11085e8ec0d9SDeepak Rawat 11095e8ec0d9SDeepak Rawat vmw_fifo_commit(ctx->dev_priv, cmd_size); 11105e8ec0d9SDeepak Rawat 11115e8ec0d9SDeepak Rawat return 0; 11125e8ec0d9SDeepak Rawat } 11135e8ec0d9SDeepak Rawat 1114d80efd5cSThomas Hellstrom /** 1115d80efd5cSThomas Hellstrom * vmw_binding_emit_dirty - Issue delayed binding commands 1116d80efd5cSThomas Hellstrom * 1117d80efd5cSThomas Hellstrom * @cbs: Pointer to the context's struct vmw_ctx_binding_state 1118d80efd5cSThomas Hellstrom * 1119d80efd5cSThomas Hellstrom * This function issues the delayed binding commands that arise from 1120d80efd5cSThomas Hellstrom * previous scrub / unscrub calls. These binding commands are typically 1121d80efd5cSThomas Hellstrom * commands that batch a number of bindings and therefore it makes sense 1122d80efd5cSThomas Hellstrom * to delay them. 1123d80efd5cSThomas Hellstrom */ 1124d80efd5cSThomas Hellstrom static int vmw_binding_emit_dirty(struct vmw_ctx_binding_state *cbs) 1125d80efd5cSThomas Hellstrom { 1126d80efd5cSThomas Hellstrom int ret = 0; 1127d80efd5cSThomas Hellstrom unsigned long hit = 0; 1128d80efd5cSThomas Hellstrom 1129d80efd5cSThomas Hellstrom while ((hit = find_next_bit(&cbs->dirty, VMW_BINDING_NUM_BITS, hit)) 1130d80efd5cSThomas Hellstrom < VMW_BINDING_NUM_BITS) { 1131d80efd5cSThomas Hellstrom 1132d80efd5cSThomas Hellstrom switch (hit) { 1133d80efd5cSThomas Hellstrom case VMW_BINDING_RT_BIT: 1134d80efd5cSThomas Hellstrom ret = vmw_emit_set_rt(cbs); 1135d80efd5cSThomas Hellstrom break; 1136d80efd5cSThomas Hellstrom case VMW_BINDING_PS_BIT: 1137d80efd5cSThomas Hellstrom ret = vmw_binding_emit_dirty_ps(cbs); 1138d80efd5cSThomas Hellstrom break; 1139403fef50SDeepak Rawat case VMW_BINDING_SO_T_BIT: 1140403fef50SDeepak Rawat ret = vmw_emit_set_so_target(cbs); 1141d80efd5cSThomas Hellstrom break; 1142d80efd5cSThomas Hellstrom case VMW_BINDING_VB_BIT: 1143d80efd5cSThomas Hellstrom ret = vmw_emit_set_vb(cbs); 1144d80efd5cSThomas Hellstrom break; 11455e8ec0d9SDeepak Rawat case VMW_BINDING_UAV_BIT: 11465e8ec0d9SDeepak Rawat ret = vmw_emit_set_uav(cbs); 11475e8ec0d9SDeepak Rawat break; 11485e8ec0d9SDeepak Rawat case VMW_BINDING_CS_UAV_BIT: 11495e8ec0d9SDeepak Rawat ret = vmw_emit_set_cs_uav(cbs); 11505e8ec0d9SDeepak Rawat break; 1151d80efd5cSThomas Hellstrom default: 1152d80efd5cSThomas Hellstrom BUG(); 1153d80efd5cSThomas Hellstrom } 1154d80efd5cSThomas Hellstrom if (ret) 1155d80efd5cSThomas Hellstrom return ret; 1156d80efd5cSThomas Hellstrom 1157d80efd5cSThomas Hellstrom __clear_bit(hit, &cbs->dirty); 1158d80efd5cSThomas Hellstrom hit++; 1159d80efd5cSThomas Hellstrom } 1160d80efd5cSThomas Hellstrom 1161d80efd5cSThomas Hellstrom return 0; 1162d80efd5cSThomas Hellstrom } 1163d80efd5cSThomas Hellstrom 1164d80efd5cSThomas Hellstrom /** 1165d80efd5cSThomas Hellstrom * vmw_binding_scrub_sr - Schedule a dx shaderresource binding 1166d80efd5cSThomas Hellstrom * scrub from a context 1167d80efd5cSThomas Hellstrom * 1168d80efd5cSThomas Hellstrom * @bi: single binding information. 1169d80efd5cSThomas Hellstrom * @rebind: Whether to issue a bind instead of scrub command. 1170d80efd5cSThomas Hellstrom */ 1171d80efd5cSThomas Hellstrom static int vmw_binding_scrub_sr(struct vmw_ctx_bindinfo *bi, bool rebind) 1172d80efd5cSThomas Hellstrom { 1173d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_view *biv = 1174d80efd5cSThomas Hellstrom container_of(bi, struct vmw_ctx_bindinfo_view, bi); 1175d80efd5cSThomas Hellstrom struct vmw_ctx_binding_state *cbs = 1176d80efd5cSThomas Hellstrom vmw_context_binding_state(bi->ctx); 1177d80efd5cSThomas Hellstrom 1178d80efd5cSThomas Hellstrom __set_bit(biv->slot, cbs->per_shader[biv->shader_slot].dirty_sr); 1179d80efd5cSThomas Hellstrom __set_bit(VMW_BINDING_PS_SR_BIT, 1180d80efd5cSThomas Hellstrom &cbs->per_shader[biv->shader_slot].dirty); 1181d80efd5cSThomas Hellstrom __set_bit(VMW_BINDING_PS_BIT, &cbs->dirty); 1182d80efd5cSThomas Hellstrom 1183d80efd5cSThomas Hellstrom return 0; 1184d80efd5cSThomas Hellstrom } 1185d80efd5cSThomas Hellstrom 1186d80efd5cSThomas Hellstrom /** 1187d80efd5cSThomas Hellstrom * vmw_binding_scrub_dx_rt - Schedule a dx rendertarget binding 1188d80efd5cSThomas Hellstrom * scrub from a context 1189d80efd5cSThomas Hellstrom * 1190d80efd5cSThomas Hellstrom * @bi: single binding information. 1191d80efd5cSThomas Hellstrom * @rebind: Whether to issue a bind instead of scrub command. 1192d80efd5cSThomas Hellstrom */ 1193d80efd5cSThomas Hellstrom static int vmw_binding_scrub_dx_rt(struct vmw_ctx_bindinfo *bi, bool rebind) 1194d80efd5cSThomas Hellstrom { 1195d80efd5cSThomas Hellstrom struct vmw_ctx_binding_state *cbs = 1196d80efd5cSThomas Hellstrom vmw_context_binding_state(bi->ctx); 1197d80efd5cSThomas Hellstrom 1198d80efd5cSThomas Hellstrom __set_bit(VMW_BINDING_RT_BIT, &cbs->dirty); 1199d80efd5cSThomas Hellstrom 1200d80efd5cSThomas Hellstrom return 0; 1201d80efd5cSThomas Hellstrom } 1202d80efd5cSThomas Hellstrom 1203d80efd5cSThomas Hellstrom /** 1204403fef50SDeepak Rawat * vmw_binding_scrub_so_target - Schedule a dx streamoutput buffer binding 1205d80efd5cSThomas Hellstrom * scrub from a context 1206d80efd5cSThomas Hellstrom * 1207d80efd5cSThomas Hellstrom * @bi: single binding information. 1208d80efd5cSThomas Hellstrom * @rebind: Whether to issue a bind instead of scrub command. 1209d80efd5cSThomas Hellstrom */ 1210403fef50SDeepak Rawat static int vmw_binding_scrub_so_target(struct vmw_ctx_bindinfo *bi, bool rebind) 1211d80efd5cSThomas Hellstrom { 1212d80efd5cSThomas Hellstrom struct vmw_ctx_binding_state *cbs = 1213d80efd5cSThomas Hellstrom vmw_context_binding_state(bi->ctx); 1214d80efd5cSThomas Hellstrom 1215403fef50SDeepak Rawat __set_bit(VMW_BINDING_SO_T_BIT, &cbs->dirty); 1216d80efd5cSThomas Hellstrom 1217d80efd5cSThomas Hellstrom return 0; 1218d80efd5cSThomas Hellstrom } 1219d80efd5cSThomas Hellstrom 1220d80efd5cSThomas Hellstrom /** 1221d80efd5cSThomas Hellstrom * vmw_binding_scrub_vb - Schedule a dx vertex buffer binding 1222d80efd5cSThomas Hellstrom * scrub from a context 1223d80efd5cSThomas Hellstrom * 1224d80efd5cSThomas Hellstrom * @bi: single binding information. 1225d80efd5cSThomas Hellstrom * @rebind: Whether to issue a bind instead of scrub command. 1226d80efd5cSThomas Hellstrom */ 1227d80efd5cSThomas Hellstrom static int vmw_binding_scrub_vb(struct vmw_ctx_bindinfo *bi, bool rebind) 1228d80efd5cSThomas Hellstrom { 1229d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_vb *bivb = 1230d80efd5cSThomas Hellstrom container_of(bi, struct vmw_ctx_bindinfo_vb, bi); 1231d80efd5cSThomas Hellstrom struct vmw_ctx_binding_state *cbs = 1232d80efd5cSThomas Hellstrom vmw_context_binding_state(bi->ctx); 1233d80efd5cSThomas Hellstrom 1234d80efd5cSThomas Hellstrom __set_bit(bivb->slot, cbs->dirty_vb); 1235d80efd5cSThomas Hellstrom __set_bit(VMW_BINDING_VB_BIT, &cbs->dirty); 1236d80efd5cSThomas Hellstrom 1237d80efd5cSThomas Hellstrom return 0; 1238d80efd5cSThomas Hellstrom } 1239d80efd5cSThomas Hellstrom 1240d80efd5cSThomas Hellstrom /** 1241d80efd5cSThomas Hellstrom * vmw_binding_scrub_ib - scrub a dx index buffer binding from a context 1242d80efd5cSThomas Hellstrom * 1243d80efd5cSThomas Hellstrom * @bi: single binding information. 1244d80efd5cSThomas Hellstrom * @rebind: Whether to issue a bind instead of scrub command. 1245d80efd5cSThomas Hellstrom */ 1246d80efd5cSThomas Hellstrom static int vmw_binding_scrub_ib(struct vmw_ctx_bindinfo *bi, bool rebind) 1247d80efd5cSThomas Hellstrom { 1248d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo_ib *binding = 1249d80efd5cSThomas Hellstrom container_of(bi, typeof(*binding), bi); 1250d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = bi->ctx->dev_priv; 1251d80efd5cSThomas Hellstrom struct { 1252d80efd5cSThomas Hellstrom SVGA3dCmdHeader header; 1253d80efd5cSThomas Hellstrom SVGA3dCmdDXSetIndexBuffer body; 1254d80efd5cSThomas Hellstrom } *cmd; 1255d80efd5cSThomas Hellstrom 125611c45419SDeepak Rawat cmd = VMW_FIFO_RESERVE_DX(dev_priv, sizeof(*cmd), bi->ctx->id); 125711c45419SDeepak Rawat if (unlikely(cmd == NULL)) 1258d80efd5cSThomas Hellstrom return -ENOMEM; 125911c45419SDeepak Rawat 1260d80efd5cSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_DX_SET_INDEX_BUFFER; 1261d80efd5cSThomas Hellstrom cmd->header.size = sizeof(cmd->body); 1262d80efd5cSThomas Hellstrom if (rebind) { 1263d80efd5cSThomas Hellstrom cmd->body.sid = bi->res->id; 1264d80efd5cSThomas Hellstrom cmd->body.format = binding->format; 1265d80efd5cSThomas Hellstrom cmd->body.offset = binding->offset; 1266d80efd5cSThomas Hellstrom } else { 1267d80efd5cSThomas Hellstrom cmd->body.sid = SVGA3D_INVALID_ID; 1268d80efd5cSThomas Hellstrom cmd->body.format = 0; 1269d80efd5cSThomas Hellstrom cmd->body.offset = 0; 1270d80efd5cSThomas Hellstrom } 1271d80efd5cSThomas Hellstrom 1272d80efd5cSThomas Hellstrom vmw_fifo_commit(dev_priv, sizeof(*cmd)); 1273d80efd5cSThomas Hellstrom 1274d80efd5cSThomas Hellstrom return 0; 1275d80efd5cSThomas Hellstrom } 1276d80efd5cSThomas Hellstrom 12775e8ec0d9SDeepak Rawat static int vmw_binding_scrub_uav(struct vmw_ctx_bindinfo *bi, bool rebind) 12785e8ec0d9SDeepak Rawat { 12795e8ec0d9SDeepak Rawat struct vmw_ctx_binding_state *cbs = vmw_context_binding_state(bi->ctx); 12805e8ec0d9SDeepak Rawat 12815e8ec0d9SDeepak Rawat __set_bit(VMW_BINDING_UAV_BIT, &cbs->dirty); 12825e8ec0d9SDeepak Rawat return 0; 12835e8ec0d9SDeepak Rawat } 12845e8ec0d9SDeepak Rawat 12855e8ec0d9SDeepak Rawat static int vmw_binding_scrub_cs_uav(struct vmw_ctx_bindinfo *bi, bool rebind) 12865e8ec0d9SDeepak Rawat { 12875e8ec0d9SDeepak Rawat struct vmw_ctx_binding_state *cbs = vmw_context_binding_state(bi->ctx); 12885e8ec0d9SDeepak Rawat 12895e8ec0d9SDeepak Rawat __set_bit(VMW_BINDING_CS_UAV_BIT, &cbs->dirty); 12905e8ec0d9SDeepak Rawat return 0; 12915e8ec0d9SDeepak Rawat } 12925e8ec0d9SDeepak Rawat 1293d80efd5cSThomas Hellstrom /** 1294d80efd5cSThomas Hellstrom * vmw_binding_state_alloc - Allocate a struct vmw_ctx_binding_state with 1295d80efd5cSThomas Hellstrom * memory accounting. 1296d80efd5cSThomas Hellstrom * 1297d80efd5cSThomas Hellstrom * @dev_priv: Pointer to a device private structure. 1298d80efd5cSThomas Hellstrom * 1299d80efd5cSThomas Hellstrom * Returns a pointer to a newly allocated struct or an error pointer on error. 1300d80efd5cSThomas Hellstrom */ 1301d80efd5cSThomas Hellstrom struct vmw_ctx_binding_state * 1302d80efd5cSThomas Hellstrom vmw_binding_state_alloc(struct vmw_private *dev_priv) 1303d80efd5cSThomas Hellstrom { 1304d80efd5cSThomas Hellstrom struct vmw_ctx_binding_state *cbs; 1305279c01f6SRoger He struct ttm_operation_ctx ctx = { 1306279c01f6SRoger He .interruptible = false, 1307279c01f6SRoger He .no_wait_gpu = false 1308279c01f6SRoger He }; 1309d80efd5cSThomas Hellstrom int ret; 1310d80efd5cSThomas Hellstrom 1311d80efd5cSThomas Hellstrom ret = ttm_mem_global_alloc(vmw_mem_glob(dev_priv), sizeof(*cbs), 1312279c01f6SRoger He &ctx); 1313d80efd5cSThomas Hellstrom if (ret) 1314d80efd5cSThomas Hellstrom return ERR_PTR(ret); 1315d80efd5cSThomas Hellstrom 1316d80efd5cSThomas Hellstrom cbs = vzalloc(sizeof(*cbs)); 1317d80efd5cSThomas Hellstrom if (!cbs) { 1318d80efd5cSThomas Hellstrom ttm_mem_global_free(vmw_mem_glob(dev_priv), sizeof(*cbs)); 1319d80efd5cSThomas Hellstrom return ERR_PTR(-ENOMEM); 1320d80efd5cSThomas Hellstrom } 1321d80efd5cSThomas Hellstrom 1322d80efd5cSThomas Hellstrom cbs->dev_priv = dev_priv; 1323d80efd5cSThomas Hellstrom INIT_LIST_HEAD(&cbs->list); 1324d80efd5cSThomas Hellstrom 1325d80efd5cSThomas Hellstrom return cbs; 1326d80efd5cSThomas Hellstrom } 1327d80efd5cSThomas Hellstrom 1328d80efd5cSThomas Hellstrom /** 1329d80efd5cSThomas Hellstrom * vmw_binding_state_free - Free a struct vmw_ctx_binding_state and its 1330d80efd5cSThomas Hellstrom * memory accounting info. 1331d80efd5cSThomas Hellstrom * 1332d80efd5cSThomas Hellstrom * @cbs: Pointer to the struct vmw_ctx_binding_state to be freed. 1333d80efd5cSThomas Hellstrom */ 1334d80efd5cSThomas Hellstrom void vmw_binding_state_free(struct vmw_ctx_binding_state *cbs) 1335d80efd5cSThomas Hellstrom { 1336d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = cbs->dev_priv; 1337d80efd5cSThomas Hellstrom 1338d80efd5cSThomas Hellstrom vfree(cbs); 1339d80efd5cSThomas Hellstrom ttm_mem_global_free(vmw_mem_glob(dev_priv), sizeof(*cbs)); 1340d80efd5cSThomas Hellstrom } 1341d80efd5cSThomas Hellstrom 1342d80efd5cSThomas Hellstrom /** 1343d80efd5cSThomas Hellstrom * vmw_binding_state_list - Get the binding list of a 1344d80efd5cSThomas Hellstrom * struct vmw_ctx_binding_state 1345d80efd5cSThomas Hellstrom * 1346d80efd5cSThomas Hellstrom * @cbs: Pointer to the struct vmw_ctx_binding_state 1347d80efd5cSThomas Hellstrom * 1348d80efd5cSThomas Hellstrom * Returns the binding list which can be used to traverse through the bindings 1349d80efd5cSThomas Hellstrom * and access the resource information of all bindings. 1350d80efd5cSThomas Hellstrom */ 1351d80efd5cSThomas Hellstrom struct list_head *vmw_binding_state_list(struct vmw_ctx_binding_state *cbs) 1352d80efd5cSThomas Hellstrom { 1353d80efd5cSThomas Hellstrom return &cbs->list; 1354d80efd5cSThomas Hellstrom } 1355d80efd5cSThomas Hellstrom 1356d80efd5cSThomas Hellstrom /** 1357d80efd5cSThomas Hellstrom * vmwgfx_binding_state_reset - clear a struct vmw_ctx_binding_state 1358d80efd5cSThomas Hellstrom * 1359d80efd5cSThomas Hellstrom * @cbs: Pointer to the struct vmw_ctx_binding_state to be cleared 1360d80efd5cSThomas Hellstrom * 1361d80efd5cSThomas Hellstrom * Drops all bindings registered in @cbs. No device binding actions are 1362d80efd5cSThomas Hellstrom * performed. 1363d80efd5cSThomas Hellstrom */ 1364d80efd5cSThomas Hellstrom void vmw_binding_state_reset(struct vmw_ctx_binding_state *cbs) 1365d80efd5cSThomas Hellstrom { 1366d80efd5cSThomas Hellstrom struct vmw_ctx_bindinfo *entry, *next; 1367d80efd5cSThomas Hellstrom 1368d80efd5cSThomas Hellstrom list_for_each_entry_safe(entry, next, &cbs->list, ctx_list) 1369d80efd5cSThomas Hellstrom vmw_binding_drop(entry); 1370d80efd5cSThomas Hellstrom } 1371d80efd5cSThomas Hellstrom 1372a9f58c45SThomas Hellstrom /** 1373a9f58c45SThomas Hellstrom * vmw_binding_dirtying - Return whether a binding type is dirtying its resource 1374a9f58c45SThomas Hellstrom * @binding_type: The binding type 1375a9f58c45SThomas Hellstrom * 1376a9f58c45SThomas Hellstrom * Each time a resource is put on the validation list as the result of a 1377a9f58c45SThomas Hellstrom * context binding referencing it, we need to determine whether that resource 1378a9f58c45SThomas Hellstrom * will be dirtied (written to by the GPU) as a result of the corresponding 13795e8ec0d9SDeepak Rawat * GPU operation. Currently rendertarget-, depth-stencil-, stream-output-target 13805e8ec0d9SDeepak Rawat * and unordered access view bindings are capable of dirtying its resource. 1381a9f58c45SThomas Hellstrom * 1382a9f58c45SThomas Hellstrom * Return: Whether the binding type dirties the resource its binding points to. 1383a9f58c45SThomas Hellstrom */ 1384a9f58c45SThomas Hellstrom u32 vmw_binding_dirtying(enum vmw_ctx_binding_type binding_type) 1385a9f58c45SThomas Hellstrom { 1386a9f58c45SThomas Hellstrom static u32 is_binding_dirtying[vmw_ctx_binding_max] = { 1387a9f58c45SThomas Hellstrom [vmw_ctx_binding_rt] = VMW_RES_DIRTY_SET, 1388a9f58c45SThomas Hellstrom [vmw_ctx_binding_dx_rt] = VMW_RES_DIRTY_SET, 1389a9f58c45SThomas Hellstrom [vmw_ctx_binding_ds] = VMW_RES_DIRTY_SET, 1390403fef50SDeepak Rawat [vmw_ctx_binding_so_target] = VMW_RES_DIRTY_SET, 13915e8ec0d9SDeepak Rawat [vmw_ctx_binding_uav] = VMW_RES_DIRTY_SET, 13925e8ec0d9SDeepak Rawat [vmw_ctx_binding_cs_uav] = VMW_RES_DIRTY_SET, 1393a9f58c45SThomas Hellstrom }; 1394a9f58c45SThomas Hellstrom 1395a9f58c45SThomas Hellstrom /* Review this function as new bindings are added. */ 13965e8ec0d9SDeepak Rawat BUILD_BUG_ON(vmw_ctx_binding_max != 13); 1397a9f58c45SThomas Hellstrom return is_binding_dirtying[binding_type]; 1398a9f58c45SThomas Hellstrom } 1399a9f58c45SThomas Hellstrom 1400d80efd5cSThomas Hellstrom /* 1401d80efd5cSThomas Hellstrom * This function is unused at run-time, and only used to hold various build 1402d80efd5cSThomas Hellstrom * asserts important for code optimization assumptions. 1403d80efd5cSThomas Hellstrom */ 1404d80efd5cSThomas Hellstrom static void vmw_binding_build_asserts(void) 1405d80efd5cSThomas Hellstrom { 1406d80efd5cSThomas Hellstrom BUILD_BUG_ON(SVGA3D_NUM_SHADERTYPE_DX10 != 3); 1407d80efd5cSThomas Hellstrom BUILD_BUG_ON(SVGA3D_MAX_SIMULTANEOUS_RENDER_TARGETS > SVGA3D_RT_MAX); 1408d80efd5cSThomas Hellstrom BUILD_BUG_ON(sizeof(uint32) != sizeof(u32)); 1409d80efd5cSThomas Hellstrom 1410d80efd5cSThomas Hellstrom /* 1411d80efd5cSThomas Hellstrom * struct vmw_ctx_binding_state::bind_cmd_buffer is used for various 1412d80efd5cSThomas Hellstrom * view id arrays. 1413d80efd5cSThomas Hellstrom */ 1414d80efd5cSThomas Hellstrom BUILD_BUG_ON(VMW_MAX_VIEW_BINDINGS < SVGA3D_RT_MAX); 1415d80efd5cSThomas Hellstrom BUILD_BUG_ON(VMW_MAX_VIEW_BINDINGS < SVGA3D_DX_MAX_SRVIEWS); 1416d80efd5cSThomas Hellstrom BUILD_BUG_ON(VMW_MAX_VIEW_BINDINGS < SVGA3D_DX_MAX_CONSTBUFFERS); 1417d80efd5cSThomas Hellstrom 1418d80efd5cSThomas Hellstrom /* 1419d80efd5cSThomas Hellstrom * struct vmw_ctx_binding_state::bind_cmd_buffer is used for 1420d80efd5cSThomas Hellstrom * u32 view ids, SVGA3dSoTargets and SVGA3dVertexBuffers 1421d80efd5cSThomas Hellstrom */ 1422d80efd5cSThomas Hellstrom BUILD_BUG_ON(SVGA3D_DX_MAX_SOTARGETS*sizeof(SVGA3dSoTarget) > 1423d80efd5cSThomas Hellstrom VMW_MAX_VIEW_BINDINGS*sizeof(u32)); 1424d80efd5cSThomas Hellstrom BUILD_BUG_ON(SVGA3D_DX_MAX_VERTEXBUFFERS*sizeof(SVGA3dVertexBuffer) > 1425d80efd5cSThomas Hellstrom VMW_MAX_VIEW_BINDINGS*sizeof(u32)); 1426d80efd5cSThomas Hellstrom } 1427