1c74c162fSThomas Hellstrom /************************************************************************** 2c74c162fSThomas Hellstrom * 3c74c162fSThomas Hellstrom * Copyright © 2009-2012 VMware, Inc., Palo Alto, CA., USA 4c74c162fSThomas Hellstrom * All Rights Reserved. 5c74c162fSThomas Hellstrom * 6c74c162fSThomas Hellstrom * Permission is hereby granted, free of charge, to any person obtaining a 7c74c162fSThomas Hellstrom * copy of this software and associated documentation files (the 8c74c162fSThomas Hellstrom * "Software"), to deal in the Software without restriction, including 9c74c162fSThomas Hellstrom * without limitation the rights to use, copy, modify, merge, publish, 10c74c162fSThomas Hellstrom * distribute, sub license, and/or sell copies of the Software, and to 11c74c162fSThomas Hellstrom * permit persons to whom the Software is furnished to do so, subject to 12c74c162fSThomas Hellstrom * the following conditions: 13c74c162fSThomas Hellstrom * 14c74c162fSThomas Hellstrom * The above copyright notice and this permission notice (including the 15c74c162fSThomas Hellstrom * next paragraph) shall be included in all copies or substantial portions 16c74c162fSThomas Hellstrom * of the Software. 17c74c162fSThomas Hellstrom * 18c74c162fSThomas Hellstrom * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19c74c162fSThomas Hellstrom * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20c74c162fSThomas Hellstrom * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL 21c74c162fSThomas Hellstrom * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, 22c74c162fSThomas Hellstrom * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR 23c74c162fSThomas Hellstrom * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE 24c74c162fSThomas Hellstrom * USE OR OTHER DEALINGS IN THE SOFTWARE. 25c74c162fSThomas Hellstrom * 26c74c162fSThomas Hellstrom **************************************************************************/ 27c74c162fSThomas Hellstrom 28c74c162fSThomas Hellstrom #include "vmwgfx_drv.h" 29c74c162fSThomas Hellstrom #include "vmwgfx_resource_priv.h" 30d80efd5cSThomas Hellstrom #include "vmwgfx_binding.h" 31c74c162fSThomas Hellstrom #include "ttm/ttm_placement.h" 32c74c162fSThomas Hellstrom 33c74c162fSThomas Hellstrom struct vmw_shader { 34c74c162fSThomas Hellstrom struct vmw_resource res; 35c74c162fSThomas Hellstrom SVGA3dShaderType type; 36c74c162fSThomas Hellstrom uint32_t size; 37d80efd5cSThomas Hellstrom uint8_t num_input_sig; 38d80efd5cSThomas Hellstrom uint8_t num_output_sig; 39c74c162fSThomas Hellstrom }; 40c74c162fSThomas Hellstrom 41c74c162fSThomas Hellstrom struct vmw_user_shader { 42c74c162fSThomas Hellstrom struct ttm_base_object base; 43c74c162fSThomas Hellstrom struct vmw_shader shader; 44c74c162fSThomas Hellstrom }; 45c74c162fSThomas Hellstrom 46d80efd5cSThomas Hellstrom struct vmw_dx_shader { 47d80efd5cSThomas Hellstrom struct vmw_resource res; 48d80efd5cSThomas Hellstrom struct vmw_resource *ctx; 49d80efd5cSThomas Hellstrom struct vmw_resource *cotable; 50d80efd5cSThomas Hellstrom u32 id; 51d80efd5cSThomas Hellstrom bool committed; 52d80efd5cSThomas Hellstrom struct list_head cotable_head; 53d80efd5cSThomas Hellstrom }; 54d80efd5cSThomas Hellstrom 5518e4a466SThomas Hellstrom static uint64_t vmw_user_shader_size; 5618e4a466SThomas Hellstrom static uint64_t vmw_shader_size; 57d80efd5cSThomas Hellstrom static size_t vmw_shader_dx_size; 58d5bde956SThomas Hellstrom 59c74c162fSThomas Hellstrom static void vmw_user_shader_free(struct vmw_resource *res); 60c74c162fSThomas Hellstrom static struct vmw_resource * 61c74c162fSThomas Hellstrom vmw_user_shader_base_to_res(struct ttm_base_object *base); 62c74c162fSThomas Hellstrom 63c74c162fSThomas Hellstrom static int vmw_gb_shader_create(struct vmw_resource *res); 64c74c162fSThomas Hellstrom static int vmw_gb_shader_bind(struct vmw_resource *res, 65c74c162fSThomas Hellstrom struct ttm_validate_buffer *val_buf); 66c74c162fSThomas Hellstrom static int vmw_gb_shader_unbind(struct vmw_resource *res, 67c74c162fSThomas Hellstrom bool readback, 68c74c162fSThomas Hellstrom struct ttm_validate_buffer *val_buf); 69c74c162fSThomas Hellstrom static int vmw_gb_shader_destroy(struct vmw_resource *res); 70c74c162fSThomas Hellstrom 71d80efd5cSThomas Hellstrom static int vmw_dx_shader_create(struct vmw_resource *res); 72d80efd5cSThomas Hellstrom static int vmw_dx_shader_bind(struct vmw_resource *res, 73d80efd5cSThomas Hellstrom struct ttm_validate_buffer *val_buf); 74d80efd5cSThomas Hellstrom static int vmw_dx_shader_unbind(struct vmw_resource *res, 75d80efd5cSThomas Hellstrom bool readback, 76d80efd5cSThomas Hellstrom struct ttm_validate_buffer *val_buf); 77d80efd5cSThomas Hellstrom static void vmw_dx_shader_commit_notify(struct vmw_resource *res, 78d80efd5cSThomas Hellstrom enum vmw_cmdbuf_res_state state); 79d80efd5cSThomas Hellstrom static bool vmw_shader_id_ok(u32 user_key, SVGA3dShaderType shader_type); 80d80efd5cSThomas Hellstrom static u32 vmw_shader_key(u32 user_key, SVGA3dShaderType shader_type); 81d80efd5cSThomas Hellstrom static uint64_t vmw_user_shader_size; 82d80efd5cSThomas Hellstrom 83c74c162fSThomas Hellstrom static const struct vmw_user_resource_conv user_shader_conv = { 84c74c162fSThomas Hellstrom .object_type = VMW_RES_SHADER, 85c74c162fSThomas Hellstrom .base_obj_to_res = vmw_user_shader_base_to_res, 86c74c162fSThomas Hellstrom .res_free = vmw_user_shader_free 87c74c162fSThomas Hellstrom }; 88c74c162fSThomas Hellstrom 89c74c162fSThomas Hellstrom const struct vmw_user_resource_conv *user_shader_converter = 90c74c162fSThomas Hellstrom &user_shader_conv; 91c74c162fSThomas Hellstrom 92c74c162fSThomas Hellstrom 93c74c162fSThomas Hellstrom static const struct vmw_res_func vmw_gb_shader_func = { 94c74c162fSThomas Hellstrom .res_type = vmw_res_shader, 95c74c162fSThomas Hellstrom .needs_backup = true, 96c74c162fSThomas Hellstrom .may_evict = true, 97c74c162fSThomas Hellstrom .type_name = "guest backed shaders", 98c74c162fSThomas Hellstrom .backup_placement = &vmw_mob_placement, 99c74c162fSThomas Hellstrom .create = vmw_gb_shader_create, 100c74c162fSThomas Hellstrom .destroy = vmw_gb_shader_destroy, 101c74c162fSThomas Hellstrom .bind = vmw_gb_shader_bind, 102c74c162fSThomas Hellstrom .unbind = vmw_gb_shader_unbind 103c74c162fSThomas Hellstrom }; 104c74c162fSThomas Hellstrom 105d80efd5cSThomas Hellstrom static const struct vmw_res_func vmw_dx_shader_func = { 106d80efd5cSThomas Hellstrom .res_type = vmw_res_shader, 107d80efd5cSThomas Hellstrom .needs_backup = true, 108d80efd5cSThomas Hellstrom .may_evict = false, 109d80efd5cSThomas Hellstrom .type_name = "dx shaders", 110d80efd5cSThomas Hellstrom .backup_placement = &vmw_mob_placement, 111d80efd5cSThomas Hellstrom .create = vmw_dx_shader_create, 112d80efd5cSThomas Hellstrom /* 113d80efd5cSThomas Hellstrom * The destroy callback is only called with a committed resource on 114d80efd5cSThomas Hellstrom * context destroy, in which case we destroy the cotable anyway, 115d80efd5cSThomas Hellstrom * so there's no need to destroy DX shaders separately. 116d80efd5cSThomas Hellstrom */ 117d80efd5cSThomas Hellstrom .destroy = NULL, 118d80efd5cSThomas Hellstrom .bind = vmw_dx_shader_bind, 119d80efd5cSThomas Hellstrom .unbind = vmw_dx_shader_unbind, 120d80efd5cSThomas Hellstrom .commit_notify = vmw_dx_shader_commit_notify, 121d80efd5cSThomas Hellstrom }; 122d80efd5cSThomas Hellstrom 123c74c162fSThomas Hellstrom /** 124c74c162fSThomas Hellstrom * Shader management: 125c74c162fSThomas Hellstrom */ 126c74c162fSThomas Hellstrom 127c74c162fSThomas Hellstrom static inline struct vmw_shader * 128c74c162fSThomas Hellstrom vmw_res_to_shader(struct vmw_resource *res) 129c74c162fSThomas Hellstrom { 130c74c162fSThomas Hellstrom return container_of(res, struct vmw_shader, res); 131c74c162fSThomas Hellstrom } 132c74c162fSThomas Hellstrom 133d80efd5cSThomas Hellstrom /** 134d80efd5cSThomas Hellstrom * vmw_res_to_dx_shader - typecast a struct vmw_resource to a 135d80efd5cSThomas Hellstrom * struct vmw_dx_shader 136d80efd5cSThomas Hellstrom * 137d80efd5cSThomas Hellstrom * @res: Pointer to the struct vmw_resource. 138d80efd5cSThomas Hellstrom */ 139d80efd5cSThomas Hellstrom static inline struct vmw_dx_shader * 140d80efd5cSThomas Hellstrom vmw_res_to_dx_shader(struct vmw_resource *res) 141d80efd5cSThomas Hellstrom { 142d80efd5cSThomas Hellstrom return container_of(res, struct vmw_dx_shader, res); 143d80efd5cSThomas Hellstrom } 144d80efd5cSThomas Hellstrom 145c74c162fSThomas Hellstrom static void vmw_hw_shader_destroy(struct vmw_resource *res) 146c74c162fSThomas Hellstrom { 147d80efd5cSThomas Hellstrom if (likely(res->func->destroy)) 148d80efd5cSThomas Hellstrom (void) res->func->destroy(res); 149d80efd5cSThomas Hellstrom else 150d80efd5cSThomas Hellstrom res->id = -1; 151c74c162fSThomas Hellstrom } 152c74c162fSThomas Hellstrom 153d80efd5cSThomas Hellstrom 154c74c162fSThomas Hellstrom static int vmw_gb_shader_init(struct vmw_private *dev_priv, 155c74c162fSThomas Hellstrom struct vmw_resource *res, 156c74c162fSThomas Hellstrom uint32_t size, 157c74c162fSThomas Hellstrom uint64_t offset, 158c74c162fSThomas Hellstrom SVGA3dShaderType type, 159d80efd5cSThomas Hellstrom uint8_t num_input_sig, 160d80efd5cSThomas Hellstrom uint8_t num_output_sig, 161c74c162fSThomas Hellstrom struct vmw_dma_buffer *byte_code, 162c74c162fSThomas Hellstrom void (*res_free) (struct vmw_resource *res)) 163c74c162fSThomas Hellstrom { 164c74c162fSThomas Hellstrom struct vmw_shader *shader = vmw_res_to_shader(res); 165c74c162fSThomas Hellstrom int ret; 166c74c162fSThomas Hellstrom 167d80efd5cSThomas Hellstrom ret = vmw_resource_init(dev_priv, res, true, res_free, 168d80efd5cSThomas Hellstrom &vmw_gb_shader_func); 169c74c162fSThomas Hellstrom 170c74c162fSThomas Hellstrom if (unlikely(ret != 0)) { 171c74c162fSThomas Hellstrom if (res_free) 172c74c162fSThomas Hellstrom res_free(res); 173c74c162fSThomas Hellstrom else 174c74c162fSThomas Hellstrom kfree(res); 175c74c162fSThomas Hellstrom return ret; 176c74c162fSThomas Hellstrom } 177c74c162fSThomas Hellstrom 178c74c162fSThomas Hellstrom res->backup_size = size; 179c74c162fSThomas Hellstrom if (byte_code) { 180c74c162fSThomas Hellstrom res->backup = vmw_dmabuf_reference(byte_code); 181c74c162fSThomas Hellstrom res->backup_offset = offset; 182c74c162fSThomas Hellstrom } 183c74c162fSThomas Hellstrom shader->size = size; 184c74c162fSThomas Hellstrom shader->type = type; 185d80efd5cSThomas Hellstrom shader->num_input_sig = num_input_sig; 186d80efd5cSThomas Hellstrom shader->num_output_sig = num_output_sig; 187c74c162fSThomas Hellstrom 188c74c162fSThomas Hellstrom vmw_resource_activate(res, vmw_hw_shader_destroy); 189c74c162fSThomas Hellstrom return 0; 190c74c162fSThomas Hellstrom } 191c74c162fSThomas Hellstrom 192d80efd5cSThomas Hellstrom /* 193d80efd5cSThomas Hellstrom * GB shader code: 194d80efd5cSThomas Hellstrom */ 195d80efd5cSThomas Hellstrom 196c74c162fSThomas Hellstrom static int vmw_gb_shader_create(struct vmw_resource *res) 197c74c162fSThomas Hellstrom { 198c74c162fSThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 199c74c162fSThomas Hellstrom struct vmw_shader *shader = vmw_res_to_shader(res); 200c74c162fSThomas Hellstrom int ret; 201c74c162fSThomas Hellstrom struct { 202c74c162fSThomas Hellstrom SVGA3dCmdHeader header; 203c74c162fSThomas Hellstrom SVGA3dCmdDefineGBShader body; 204c74c162fSThomas Hellstrom } *cmd; 205c74c162fSThomas Hellstrom 206c74c162fSThomas Hellstrom if (likely(res->id != -1)) 207c74c162fSThomas Hellstrom return 0; 208c74c162fSThomas Hellstrom 209c74c162fSThomas Hellstrom ret = vmw_resource_alloc_id(res); 210c74c162fSThomas Hellstrom if (unlikely(ret != 0)) { 211c74c162fSThomas Hellstrom DRM_ERROR("Failed to allocate a shader id.\n"); 212c74c162fSThomas Hellstrom goto out_no_id; 213c74c162fSThomas Hellstrom } 214c74c162fSThomas Hellstrom 215c74c162fSThomas Hellstrom if (unlikely(res->id >= VMWGFX_NUM_GB_SHADER)) { 216c74c162fSThomas Hellstrom ret = -EBUSY; 217c74c162fSThomas Hellstrom goto out_no_fifo; 218c74c162fSThomas Hellstrom } 219c74c162fSThomas Hellstrom 220c74c162fSThomas Hellstrom cmd = vmw_fifo_reserve(dev_priv, sizeof(*cmd)); 221c74c162fSThomas Hellstrom if (unlikely(cmd == NULL)) { 222c74c162fSThomas Hellstrom DRM_ERROR("Failed reserving FIFO space for shader " 223c74c162fSThomas Hellstrom "creation.\n"); 224c74c162fSThomas Hellstrom ret = -ENOMEM; 225c74c162fSThomas Hellstrom goto out_no_fifo; 226c74c162fSThomas Hellstrom } 227c74c162fSThomas Hellstrom 228c74c162fSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_DEFINE_GB_SHADER; 229c74c162fSThomas Hellstrom cmd->header.size = sizeof(cmd->body); 230c74c162fSThomas Hellstrom cmd->body.shid = res->id; 231c74c162fSThomas Hellstrom cmd->body.type = shader->type; 232c74c162fSThomas Hellstrom cmd->body.sizeInBytes = shader->size; 233c74c162fSThomas Hellstrom vmw_fifo_commit(dev_priv, sizeof(*cmd)); 234153b3d5bSThomas Hellstrom vmw_fifo_resource_inc(dev_priv); 235c74c162fSThomas Hellstrom 236c74c162fSThomas Hellstrom return 0; 237c74c162fSThomas Hellstrom 238c74c162fSThomas Hellstrom out_no_fifo: 239c74c162fSThomas Hellstrom vmw_resource_release_id(res); 240c74c162fSThomas Hellstrom out_no_id: 241c74c162fSThomas Hellstrom return ret; 242c74c162fSThomas Hellstrom } 243c74c162fSThomas Hellstrom 244c74c162fSThomas Hellstrom static int vmw_gb_shader_bind(struct vmw_resource *res, 245c74c162fSThomas Hellstrom struct ttm_validate_buffer *val_buf) 246c74c162fSThomas Hellstrom { 247c74c162fSThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 248c74c162fSThomas Hellstrom struct { 249c74c162fSThomas Hellstrom SVGA3dCmdHeader header; 250c74c162fSThomas Hellstrom SVGA3dCmdBindGBShader body; 251c74c162fSThomas Hellstrom } *cmd; 252c74c162fSThomas Hellstrom struct ttm_buffer_object *bo = val_buf->bo; 253c74c162fSThomas Hellstrom 254c74c162fSThomas Hellstrom BUG_ON(bo->mem.mem_type != VMW_PL_MOB); 255c74c162fSThomas Hellstrom 256c74c162fSThomas Hellstrom cmd = vmw_fifo_reserve(dev_priv, sizeof(*cmd)); 257c74c162fSThomas Hellstrom if (unlikely(cmd == NULL)) { 258c74c162fSThomas Hellstrom DRM_ERROR("Failed reserving FIFO space for shader " 259c74c162fSThomas Hellstrom "binding.\n"); 260c74c162fSThomas Hellstrom return -ENOMEM; 261c74c162fSThomas Hellstrom } 262c74c162fSThomas Hellstrom 263c74c162fSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_BIND_GB_SHADER; 264c74c162fSThomas Hellstrom cmd->header.size = sizeof(cmd->body); 265c74c162fSThomas Hellstrom cmd->body.shid = res->id; 266c74c162fSThomas Hellstrom cmd->body.mobid = bo->mem.start; 267b8ccd1e4SThomas Hellstrom cmd->body.offsetInBytes = res->backup_offset; 268c74c162fSThomas Hellstrom res->backup_dirty = false; 269c74c162fSThomas Hellstrom vmw_fifo_commit(dev_priv, sizeof(*cmd)); 270c74c162fSThomas Hellstrom 271c74c162fSThomas Hellstrom return 0; 272c74c162fSThomas Hellstrom } 273c74c162fSThomas Hellstrom 274c74c162fSThomas Hellstrom static int vmw_gb_shader_unbind(struct vmw_resource *res, 275c74c162fSThomas Hellstrom bool readback, 276c74c162fSThomas Hellstrom struct ttm_validate_buffer *val_buf) 277c74c162fSThomas Hellstrom { 278c74c162fSThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 279c74c162fSThomas Hellstrom struct { 280c74c162fSThomas Hellstrom SVGA3dCmdHeader header; 281c74c162fSThomas Hellstrom SVGA3dCmdBindGBShader body; 282c74c162fSThomas Hellstrom } *cmd; 283c74c162fSThomas Hellstrom struct vmw_fence_obj *fence; 284c74c162fSThomas Hellstrom 285c74c162fSThomas Hellstrom BUG_ON(res->backup->base.mem.mem_type != VMW_PL_MOB); 286c74c162fSThomas Hellstrom 287c74c162fSThomas Hellstrom cmd = vmw_fifo_reserve(dev_priv, sizeof(*cmd)); 288c74c162fSThomas Hellstrom if (unlikely(cmd == NULL)) { 289c74c162fSThomas Hellstrom DRM_ERROR("Failed reserving FIFO space for shader " 290c74c162fSThomas Hellstrom "unbinding.\n"); 291c74c162fSThomas Hellstrom return -ENOMEM; 292c74c162fSThomas Hellstrom } 293c74c162fSThomas Hellstrom 294c74c162fSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_BIND_GB_SHADER; 295c74c162fSThomas Hellstrom cmd->header.size = sizeof(cmd->body); 296c74c162fSThomas Hellstrom cmd->body.shid = res->id; 297c74c162fSThomas Hellstrom cmd->body.mobid = SVGA3D_INVALID_ID; 298c74c162fSThomas Hellstrom cmd->body.offsetInBytes = 0; 299c74c162fSThomas Hellstrom vmw_fifo_commit(dev_priv, sizeof(*cmd)); 300c74c162fSThomas Hellstrom 301c74c162fSThomas Hellstrom /* 302c74c162fSThomas Hellstrom * Create a fence object and fence the backup buffer. 303c74c162fSThomas Hellstrom */ 304c74c162fSThomas Hellstrom 305c74c162fSThomas Hellstrom (void) vmw_execbuf_fence_commands(NULL, dev_priv, 306c74c162fSThomas Hellstrom &fence, NULL); 307c74c162fSThomas Hellstrom 308c74c162fSThomas Hellstrom vmw_fence_single_bo(val_buf->bo, fence); 309c74c162fSThomas Hellstrom 310c74c162fSThomas Hellstrom if (likely(fence != NULL)) 311c74c162fSThomas Hellstrom vmw_fence_obj_unreference(&fence); 312c74c162fSThomas Hellstrom 313c74c162fSThomas Hellstrom return 0; 314c74c162fSThomas Hellstrom } 315c74c162fSThomas Hellstrom 316c74c162fSThomas Hellstrom static int vmw_gb_shader_destroy(struct vmw_resource *res) 317c74c162fSThomas Hellstrom { 318c74c162fSThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 319c74c162fSThomas Hellstrom struct { 320c74c162fSThomas Hellstrom SVGA3dCmdHeader header; 321c74c162fSThomas Hellstrom SVGA3dCmdDestroyGBShader body; 322c74c162fSThomas Hellstrom } *cmd; 323c74c162fSThomas Hellstrom 324c74c162fSThomas Hellstrom if (likely(res->id == -1)) 325c74c162fSThomas Hellstrom return 0; 326c74c162fSThomas Hellstrom 327173fb7d4SThomas Hellstrom mutex_lock(&dev_priv->binding_mutex); 328d80efd5cSThomas Hellstrom vmw_binding_res_list_scrub(&res->binding_head); 329173fb7d4SThomas Hellstrom 330c74c162fSThomas Hellstrom cmd = vmw_fifo_reserve(dev_priv, sizeof(*cmd)); 331c74c162fSThomas Hellstrom if (unlikely(cmd == NULL)) { 332c74c162fSThomas Hellstrom DRM_ERROR("Failed reserving FIFO space for shader " 333c74c162fSThomas Hellstrom "destruction.\n"); 3343e894a62SThomas Hellstrom mutex_unlock(&dev_priv->binding_mutex); 335c74c162fSThomas Hellstrom return -ENOMEM; 336c74c162fSThomas Hellstrom } 337c74c162fSThomas Hellstrom 338c74c162fSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_DESTROY_GB_SHADER; 339c74c162fSThomas Hellstrom cmd->header.size = sizeof(cmd->body); 340c74c162fSThomas Hellstrom cmd->body.shid = res->id; 341c74c162fSThomas Hellstrom vmw_fifo_commit(dev_priv, sizeof(*cmd)); 342173fb7d4SThomas Hellstrom mutex_unlock(&dev_priv->binding_mutex); 343c74c162fSThomas Hellstrom vmw_resource_release_id(res); 344153b3d5bSThomas Hellstrom vmw_fifo_resource_dec(dev_priv); 345c74c162fSThomas Hellstrom 346c74c162fSThomas Hellstrom return 0; 347c74c162fSThomas Hellstrom } 348c74c162fSThomas Hellstrom 349d80efd5cSThomas Hellstrom /* 350d80efd5cSThomas Hellstrom * DX shader code: 351d80efd5cSThomas Hellstrom */ 352d80efd5cSThomas Hellstrom 353d80efd5cSThomas Hellstrom /** 354d80efd5cSThomas Hellstrom * vmw_dx_shader_commit_notify - Notify that a shader operation has been 355d80efd5cSThomas Hellstrom * committed to hardware from a user-supplied command stream. 356d80efd5cSThomas Hellstrom * 357d80efd5cSThomas Hellstrom * @res: Pointer to the shader resource. 358d80efd5cSThomas Hellstrom * @state: Indicating whether a creation or removal has been committed. 359d80efd5cSThomas Hellstrom * 360d80efd5cSThomas Hellstrom */ 361d80efd5cSThomas Hellstrom static void vmw_dx_shader_commit_notify(struct vmw_resource *res, 362d80efd5cSThomas Hellstrom enum vmw_cmdbuf_res_state state) 363d80efd5cSThomas Hellstrom { 364d80efd5cSThomas Hellstrom struct vmw_dx_shader *shader = vmw_res_to_dx_shader(res); 365d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 366d80efd5cSThomas Hellstrom 367d80efd5cSThomas Hellstrom if (state == VMW_CMDBUF_RES_ADD) { 368d80efd5cSThomas Hellstrom mutex_lock(&dev_priv->binding_mutex); 369d80efd5cSThomas Hellstrom vmw_cotable_add_resource(shader->cotable, 370d80efd5cSThomas Hellstrom &shader->cotable_head); 371d80efd5cSThomas Hellstrom shader->committed = true; 372d80efd5cSThomas Hellstrom res->id = shader->id; 373d80efd5cSThomas Hellstrom mutex_unlock(&dev_priv->binding_mutex); 374d80efd5cSThomas Hellstrom } else { 375d80efd5cSThomas Hellstrom mutex_lock(&dev_priv->binding_mutex); 376d80efd5cSThomas Hellstrom list_del_init(&shader->cotable_head); 377d80efd5cSThomas Hellstrom shader->committed = false; 378d80efd5cSThomas Hellstrom res->id = -1; 379d80efd5cSThomas Hellstrom mutex_unlock(&dev_priv->binding_mutex); 380d80efd5cSThomas Hellstrom } 381d80efd5cSThomas Hellstrom } 382d80efd5cSThomas Hellstrom 383d80efd5cSThomas Hellstrom /** 384d80efd5cSThomas Hellstrom * vmw_dx_shader_unscrub - Have the device reattach a MOB to a DX shader. 385d80efd5cSThomas Hellstrom * 386d80efd5cSThomas Hellstrom * @res: The shader resource 387d80efd5cSThomas Hellstrom * 388d80efd5cSThomas Hellstrom * This function reverts a scrub operation. 389d80efd5cSThomas Hellstrom */ 390d80efd5cSThomas Hellstrom static int vmw_dx_shader_unscrub(struct vmw_resource *res) 391d80efd5cSThomas Hellstrom { 392d80efd5cSThomas Hellstrom struct vmw_dx_shader *shader = vmw_res_to_dx_shader(res); 393d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 394d80efd5cSThomas Hellstrom struct { 395d80efd5cSThomas Hellstrom SVGA3dCmdHeader header; 396d80efd5cSThomas Hellstrom SVGA3dCmdDXBindShader body; 397d80efd5cSThomas Hellstrom } *cmd; 398d80efd5cSThomas Hellstrom 399d80efd5cSThomas Hellstrom if (!list_empty(&shader->cotable_head) || !shader->committed) 400d80efd5cSThomas Hellstrom return 0; 401d80efd5cSThomas Hellstrom 402d80efd5cSThomas Hellstrom cmd = vmw_fifo_reserve_dx(dev_priv, sizeof(*cmd), 403d80efd5cSThomas Hellstrom shader->ctx->id); 404d80efd5cSThomas Hellstrom if (unlikely(cmd == NULL)) { 405d80efd5cSThomas Hellstrom DRM_ERROR("Failed reserving FIFO space for shader " 406d80efd5cSThomas Hellstrom "scrubbing.\n"); 407d80efd5cSThomas Hellstrom return -ENOMEM; 408d80efd5cSThomas Hellstrom } 409d80efd5cSThomas Hellstrom 410d80efd5cSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_DX_BIND_SHADER; 411d80efd5cSThomas Hellstrom cmd->header.size = sizeof(cmd->body); 412d80efd5cSThomas Hellstrom cmd->body.cid = shader->ctx->id; 413d80efd5cSThomas Hellstrom cmd->body.shid = shader->id; 414d80efd5cSThomas Hellstrom cmd->body.mobid = res->backup->base.mem.start; 415d80efd5cSThomas Hellstrom cmd->body.offsetInBytes = res->backup_offset; 416d80efd5cSThomas Hellstrom vmw_fifo_commit(dev_priv, sizeof(*cmd)); 417d80efd5cSThomas Hellstrom 418d80efd5cSThomas Hellstrom vmw_cotable_add_resource(shader->cotable, &shader->cotable_head); 419d80efd5cSThomas Hellstrom 420d80efd5cSThomas Hellstrom return 0; 421d80efd5cSThomas Hellstrom } 422d80efd5cSThomas Hellstrom 423d80efd5cSThomas Hellstrom /** 424d80efd5cSThomas Hellstrom * vmw_dx_shader_create - The DX shader create callback 425d80efd5cSThomas Hellstrom * 426d80efd5cSThomas Hellstrom * @res: The DX shader resource 427d80efd5cSThomas Hellstrom * 428d80efd5cSThomas Hellstrom * The create callback is called as part of resource validation and 429d80efd5cSThomas Hellstrom * makes sure that we unscrub the shader if it's previously been scrubbed. 430d80efd5cSThomas Hellstrom */ 431d80efd5cSThomas Hellstrom static int vmw_dx_shader_create(struct vmw_resource *res) 432d80efd5cSThomas Hellstrom { 433d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 434d80efd5cSThomas Hellstrom struct vmw_dx_shader *shader = vmw_res_to_dx_shader(res); 435d80efd5cSThomas Hellstrom int ret = 0; 436d80efd5cSThomas Hellstrom 437d80efd5cSThomas Hellstrom WARN_ON_ONCE(!shader->committed); 438d80efd5cSThomas Hellstrom 439d80efd5cSThomas Hellstrom if (!list_empty(&res->mob_head)) { 440d80efd5cSThomas Hellstrom mutex_lock(&dev_priv->binding_mutex); 441d80efd5cSThomas Hellstrom ret = vmw_dx_shader_unscrub(res); 442d80efd5cSThomas Hellstrom mutex_unlock(&dev_priv->binding_mutex); 443d80efd5cSThomas Hellstrom } 444d80efd5cSThomas Hellstrom 445d80efd5cSThomas Hellstrom res->id = shader->id; 446d80efd5cSThomas Hellstrom return ret; 447d80efd5cSThomas Hellstrom } 448d80efd5cSThomas Hellstrom 449d80efd5cSThomas Hellstrom /** 450d80efd5cSThomas Hellstrom * vmw_dx_shader_bind - The DX shader bind callback 451d80efd5cSThomas Hellstrom * 452d80efd5cSThomas Hellstrom * @res: The DX shader resource 453d80efd5cSThomas Hellstrom * @val_buf: Pointer to the validate buffer. 454d80efd5cSThomas Hellstrom * 455d80efd5cSThomas Hellstrom */ 456d80efd5cSThomas Hellstrom static int vmw_dx_shader_bind(struct vmw_resource *res, 457d80efd5cSThomas Hellstrom struct ttm_validate_buffer *val_buf) 458d80efd5cSThomas Hellstrom { 459d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 460d80efd5cSThomas Hellstrom struct ttm_buffer_object *bo = val_buf->bo; 461d80efd5cSThomas Hellstrom 462d80efd5cSThomas Hellstrom BUG_ON(bo->mem.mem_type != VMW_PL_MOB); 463d80efd5cSThomas Hellstrom mutex_lock(&dev_priv->binding_mutex); 464d80efd5cSThomas Hellstrom vmw_dx_shader_unscrub(res); 465d80efd5cSThomas Hellstrom mutex_unlock(&dev_priv->binding_mutex); 466d80efd5cSThomas Hellstrom 467d80efd5cSThomas Hellstrom return 0; 468d80efd5cSThomas Hellstrom } 469d80efd5cSThomas Hellstrom 470d80efd5cSThomas Hellstrom /** 471d80efd5cSThomas Hellstrom * vmw_dx_shader_scrub - Have the device unbind a MOB from a DX shader. 472d80efd5cSThomas Hellstrom * 473d80efd5cSThomas Hellstrom * @res: The shader resource 474d80efd5cSThomas Hellstrom * 475d80efd5cSThomas Hellstrom * This function unbinds a MOB from the DX shader without requiring the 476d80efd5cSThomas Hellstrom * MOB dma_buffer to be reserved. The driver still considers the MOB bound. 477d80efd5cSThomas Hellstrom * However, once the driver eventually decides to unbind the MOB, it doesn't 478d80efd5cSThomas Hellstrom * need to access the context. 479d80efd5cSThomas Hellstrom */ 480d80efd5cSThomas Hellstrom static int vmw_dx_shader_scrub(struct vmw_resource *res) 481d80efd5cSThomas Hellstrom { 482d80efd5cSThomas Hellstrom struct vmw_dx_shader *shader = vmw_res_to_dx_shader(res); 483d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 484d80efd5cSThomas Hellstrom struct { 485d80efd5cSThomas Hellstrom SVGA3dCmdHeader header; 486d80efd5cSThomas Hellstrom SVGA3dCmdDXBindShader body; 487d80efd5cSThomas Hellstrom } *cmd; 488d80efd5cSThomas Hellstrom 489d80efd5cSThomas Hellstrom if (list_empty(&shader->cotable_head)) 490d80efd5cSThomas Hellstrom return 0; 491d80efd5cSThomas Hellstrom 492d80efd5cSThomas Hellstrom WARN_ON_ONCE(!shader->committed); 493d80efd5cSThomas Hellstrom cmd = vmw_fifo_reserve(dev_priv, sizeof(*cmd)); 494d80efd5cSThomas Hellstrom if (unlikely(cmd == NULL)) { 495d80efd5cSThomas Hellstrom DRM_ERROR("Failed reserving FIFO space for shader " 496d80efd5cSThomas Hellstrom "scrubbing.\n"); 497d80efd5cSThomas Hellstrom return -ENOMEM; 498d80efd5cSThomas Hellstrom } 499d80efd5cSThomas Hellstrom 500d80efd5cSThomas Hellstrom cmd->header.id = SVGA_3D_CMD_DX_BIND_SHADER; 501d80efd5cSThomas Hellstrom cmd->header.size = sizeof(cmd->body); 502d80efd5cSThomas Hellstrom cmd->body.cid = shader->ctx->id; 503d80efd5cSThomas Hellstrom cmd->body.shid = res->id; 504d80efd5cSThomas Hellstrom cmd->body.mobid = SVGA3D_INVALID_ID; 505d80efd5cSThomas Hellstrom cmd->body.offsetInBytes = 0; 506d80efd5cSThomas Hellstrom vmw_fifo_commit(dev_priv, sizeof(*cmd)); 507d80efd5cSThomas Hellstrom res->id = -1; 508d80efd5cSThomas Hellstrom list_del_init(&shader->cotable_head); 509d80efd5cSThomas Hellstrom 510d80efd5cSThomas Hellstrom return 0; 511d80efd5cSThomas Hellstrom } 512d80efd5cSThomas Hellstrom 513d80efd5cSThomas Hellstrom /** 514d80efd5cSThomas Hellstrom * vmw_dx_shader_unbind - The dx shader unbind callback. 515d80efd5cSThomas Hellstrom * 516d80efd5cSThomas Hellstrom * @res: The shader resource 517d80efd5cSThomas Hellstrom * @readback: Whether this is a readback unbind. Currently unused. 518d80efd5cSThomas Hellstrom * @val_buf: MOB buffer information. 519d80efd5cSThomas Hellstrom */ 520d80efd5cSThomas Hellstrom static int vmw_dx_shader_unbind(struct vmw_resource *res, 521d80efd5cSThomas Hellstrom bool readback, 522d80efd5cSThomas Hellstrom struct ttm_validate_buffer *val_buf) 523d80efd5cSThomas Hellstrom { 524d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 525d80efd5cSThomas Hellstrom struct vmw_fence_obj *fence; 526d80efd5cSThomas Hellstrom int ret; 527d80efd5cSThomas Hellstrom 528d80efd5cSThomas Hellstrom BUG_ON(res->backup->base.mem.mem_type != VMW_PL_MOB); 529d80efd5cSThomas Hellstrom 530d80efd5cSThomas Hellstrom mutex_lock(&dev_priv->binding_mutex); 531d80efd5cSThomas Hellstrom ret = vmw_dx_shader_scrub(res); 532d80efd5cSThomas Hellstrom mutex_unlock(&dev_priv->binding_mutex); 533d80efd5cSThomas Hellstrom 534d80efd5cSThomas Hellstrom if (ret) 535d80efd5cSThomas Hellstrom return ret; 536d80efd5cSThomas Hellstrom 537d80efd5cSThomas Hellstrom (void) vmw_execbuf_fence_commands(NULL, dev_priv, 538d80efd5cSThomas Hellstrom &fence, NULL); 539d80efd5cSThomas Hellstrom vmw_fence_single_bo(val_buf->bo, fence); 540d80efd5cSThomas Hellstrom 541d80efd5cSThomas Hellstrom if (likely(fence != NULL)) 542d80efd5cSThomas Hellstrom vmw_fence_obj_unreference(&fence); 543d80efd5cSThomas Hellstrom 544d80efd5cSThomas Hellstrom return 0; 545d80efd5cSThomas Hellstrom } 546d80efd5cSThomas Hellstrom 547d80efd5cSThomas Hellstrom /** 548d80efd5cSThomas Hellstrom * vmw_dx_shader_cotable_list_scrub - The cotable unbind_func callback for 549d80efd5cSThomas Hellstrom * DX shaders. 550d80efd5cSThomas Hellstrom * 551d80efd5cSThomas Hellstrom * @dev_priv: Pointer to device private structure. 552d80efd5cSThomas Hellstrom * @list: The list of cotable resources. 553d80efd5cSThomas Hellstrom * @readback: Whether the call was part of a readback unbind. 554d80efd5cSThomas Hellstrom * 555d80efd5cSThomas Hellstrom * Scrubs all shader MOBs so that any subsequent shader unbind or shader 556d80efd5cSThomas Hellstrom * destroy operation won't need to swap in the context. 557d80efd5cSThomas Hellstrom */ 558d80efd5cSThomas Hellstrom void vmw_dx_shader_cotable_list_scrub(struct vmw_private *dev_priv, 559d80efd5cSThomas Hellstrom struct list_head *list, 560d80efd5cSThomas Hellstrom bool readback) 561d80efd5cSThomas Hellstrom { 562d80efd5cSThomas Hellstrom struct vmw_dx_shader *entry, *next; 563d80efd5cSThomas Hellstrom 564d80efd5cSThomas Hellstrom WARN_ON_ONCE(!mutex_is_locked(&dev_priv->binding_mutex)); 565d80efd5cSThomas Hellstrom 566d80efd5cSThomas Hellstrom list_for_each_entry_safe(entry, next, list, cotable_head) { 567d80efd5cSThomas Hellstrom WARN_ON(vmw_dx_shader_scrub(&entry->res)); 568d80efd5cSThomas Hellstrom if (!readback) 569d80efd5cSThomas Hellstrom entry->committed = false; 570d80efd5cSThomas Hellstrom } 571d80efd5cSThomas Hellstrom } 572d80efd5cSThomas Hellstrom 573d80efd5cSThomas Hellstrom /** 574d80efd5cSThomas Hellstrom * vmw_dx_shader_res_free - The DX shader free callback 575d80efd5cSThomas Hellstrom * 576d80efd5cSThomas Hellstrom * @res: The shader resource 577d80efd5cSThomas Hellstrom * 578d80efd5cSThomas Hellstrom * Frees the DX shader resource and updates memory accounting. 579d80efd5cSThomas Hellstrom */ 580d80efd5cSThomas Hellstrom static void vmw_dx_shader_res_free(struct vmw_resource *res) 581d80efd5cSThomas Hellstrom { 582d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 583d80efd5cSThomas Hellstrom struct vmw_dx_shader *shader = vmw_res_to_dx_shader(res); 584d80efd5cSThomas Hellstrom 585d80efd5cSThomas Hellstrom vmw_resource_unreference(&shader->cotable); 586d80efd5cSThomas Hellstrom kfree(shader); 587d80efd5cSThomas Hellstrom ttm_mem_global_free(vmw_mem_glob(dev_priv), vmw_shader_dx_size); 588d80efd5cSThomas Hellstrom } 589d80efd5cSThomas Hellstrom 590d80efd5cSThomas Hellstrom /** 591d80efd5cSThomas Hellstrom * vmw_dx_shader_add - Add a shader resource as a command buffer managed 592d80efd5cSThomas Hellstrom * resource. 593d80efd5cSThomas Hellstrom * 594d80efd5cSThomas Hellstrom * @man: The command buffer resource manager. 595d80efd5cSThomas Hellstrom * @ctx: Pointer to the context resource. 596d80efd5cSThomas Hellstrom * @user_key: The id used for this shader. 597d80efd5cSThomas Hellstrom * @shader_type: The shader type. 598d80efd5cSThomas Hellstrom * @list: The list of staged command buffer managed resources. 599d80efd5cSThomas Hellstrom */ 600d80efd5cSThomas Hellstrom int vmw_dx_shader_add(struct vmw_cmdbuf_res_manager *man, 601d80efd5cSThomas Hellstrom struct vmw_resource *ctx, 602d80efd5cSThomas Hellstrom u32 user_key, 603d80efd5cSThomas Hellstrom SVGA3dShaderType shader_type, 604d80efd5cSThomas Hellstrom struct list_head *list) 605d80efd5cSThomas Hellstrom { 606d80efd5cSThomas Hellstrom struct vmw_dx_shader *shader; 607d80efd5cSThomas Hellstrom struct vmw_resource *res; 608d80efd5cSThomas Hellstrom struct vmw_private *dev_priv = ctx->dev_priv; 609d80efd5cSThomas Hellstrom int ret; 610d80efd5cSThomas Hellstrom 611d80efd5cSThomas Hellstrom if (!vmw_shader_dx_size) 612d80efd5cSThomas Hellstrom vmw_shader_dx_size = ttm_round_pot(sizeof(*shader)); 613d80efd5cSThomas Hellstrom 614d80efd5cSThomas Hellstrom if (!vmw_shader_id_ok(user_key, shader_type)) 615d80efd5cSThomas Hellstrom return -EINVAL; 616d80efd5cSThomas Hellstrom 617d80efd5cSThomas Hellstrom ret = ttm_mem_global_alloc(vmw_mem_glob(dev_priv), vmw_shader_dx_size, 618d80efd5cSThomas Hellstrom false, true); 619d80efd5cSThomas Hellstrom if (ret) { 620d80efd5cSThomas Hellstrom if (ret != -ERESTARTSYS) 621d80efd5cSThomas Hellstrom DRM_ERROR("Out of graphics memory for shader " 622d80efd5cSThomas Hellstrom "creation.\n"); 623d80efd5cSThomas Hellstrom return ret; 624d80efd5cSThomas Hellstrom } 625d80efd5cSThomas Hellstrom 626d80efd5cSThomas Hellstrom shader = kmalloc(sizeof(*shader), GFP_KERNEL); 627d80efd5cSThomas Hellstrom if (!shader) { 628d80efd5cSThomas Hellstrom ttm_mem_global_free(vmw_mem_glob(dev_priv), vmw_shader_dx_size); 629d80efd5cSThomas Hellstrom return -ENOMEM; 630d80efd5cSThomas Hellstrom } 631d80efd5cSThomas Hellstrom 632d80efd5cSThomas Hellstrom res = &shader->res; 633d80efd5cSThomas Hellstrom shader->ctx = ctx; 634d80efd5cSThomas Hellstrom shader->cotable = vmw_context_cotable(ctx, SVGA_COTABLE_DXSHADER); 635d80efd5cSThomas Hellstrom shader->id = user_key; 636d80efd5cSThomas Hellstrom shader->committed = false; 637d80efd5cSThomas Hellstrom INIT_LIST_HEAD(&shader->cotable_head); 638d80efd5cSThomas Hellstrom ret = vmw_resource_init(dev_priv, res, true, 639d80efd5cSThomas Hellstrom vmw_dx_shader_res_free, &vmw_dx_shader_func); 640d80efd5cSThomas Hellstrom if (ret) 641d80efd5cSThomas Hellstrom goto out_resource_init; 642d80efd5cSThomas Hellstrom 643d80efd5cSThomas Hellstrom /* 644d80efd5cSThomas Hellstrom * The user_key name-space is not per shader type for DX shaders, 645d80efd5cSThomas Hellstrom * so when hashing, use a single zero shader type. 646d80efd5cSThomas Hellstrom */ 647d80efd5cSThomas Hellstrom ret = vmw_cmdbuf_res_add(man, vmw_cmdbuf_res_shader, 648d80efd5cSThomas Hellstrom vmw_shader_key(user_key, 0), 649d80efd5cSThomas Hellstrom res, list); 650d80efd5cSThomas Hellstrom if (ret) 651d80efd5cSThomas Hellstrom goto out_resource_init; 652d80efd5cSThomas Hellstrom 653d80efd5cSThomas Hellstrom res->id = shader->id; 654d80efd5cSThomas Hellstrom vmw_resource_activate(res, vmw_hw_shader_destroy); 655d80efd5cSThomas Hellstrom 656d80efd5cSThomas Hellstrom out_resource_init: 657d80efd5cSThomas Hellstrom vmw_resource_unreference(&res); 658d80efd5cSThomas Hellstrom 659d80efd5cSThomas Hellstrom return ret; 660d80efd5cSThomas Hellstrom } 661d80efd5cSThomas Hellstrom 662d80efd5cSThomas Hellstrom 663d80efd5cSThomas Hellstrom 664c74c162fSThomas Hellstrom /** 665c74c162fSThomas Hellstrom * User-space shader management: 666c74c162fSThomas Hellstrom */ 667c74c162fSThomas Hellstrom 668c74c162fSThomas Hellstrom static struct vmw_resource * 669c74c162fSThomas Hellstrom vmw_user_shader_base_to_res(struct ttm_base_object *base) 670c74c162fSThomas Hellstrom { 671c74c162fSThomas Hellstrom return &(container_of(base, struct vmw_user_shader, base)-> 672c74c162fSThomas Hellstrom shader.res); 673c74c162fSThomas Hellstrom } 674c74c162fSThomas Hellstrom 675c74c162fSThomas Hellstrom static void vmw_user_shader_free(struct vmw_resource *res) 676c74c162fSThomas Hellstrom { 677c74c162fSThomas Hellstrom struct vmw_user_shader *ushader = 678c74c162fSThomas Hellstrom container_of(res, struct vmw_user_shader, shader.res); 679c74c162fSThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 680c74c162fSThomas Hellstrom 681c74c162fSThomas Hellstrom ttm_base_object_kfree(ushader, base); 682c74c162fSThomas Hellstrom ttm_mem_global_free(vmw_mem_glob(dev_priv), 683c74c162fSThomas Hellstrom vmw_user_shader_size); 684c74c162fSThomas Hellstrom } 685c74c162fSThomas Hellstrom 68618e4a466SThomas Hellstrom static void vmw_shader_free(struct vmw_resource *res) 68718e4a466SThomas Hellstrom { 68818e4a466SThomas Hellstrom struct vmw_shader *shader = vmw_res_to_shader(res); 68918e4a466SThomas Hellstrom struct vmw_private *dev_priv = res->dev_priv; 69018e4a466SThomas Hellstrom 69118e4a466SThomas Hellstrom kfree(shader); 69218e4a466SThomas Hellstrom ttm_mem_global_free(vmw_mem_glob(dev_priv), 69318e4a466SThomas Hellstrom vmw_shader_size); 69418e4a466SThomas Hellstrom } 69518e4a466SThomas Hellstrom 696c74c162fSThomas Hellstrom /** 697c74c162fSThomas Hellstrom * This function is called when user space has no more references on the 698c74c162fSThomas Hellstrom * base object. It releases the base-object's reference on the resource object. 699c74c162fSThomas Hellstrom */ 700c74c162fSThomas Hellstrom 701c74c162fSThomas Hellstrom static void vmw_user_shader_base_release(struct ttm_base_object **p_base) 702c74c162fSThomas Hellstrom { 703c74c162fSThomas Hellstrom struct ttm_base_object *base = *p_base; 704c74c162fSThomas Hellstrom struct vmw_resource *res = vmw_user_shader_base_to_res(base); 705c74c162fSThomas Hellstrom 706c74c162fSThomas Hellstrom *p_base = NULL; 707c74c162fSThomas Hellstrom vmw_resource_unreference(&res); 708c74c162fSThomas Hellstrom } 709c74c162fSThomas Hellstrom 710c74c162fSThomas Hellstrom int vmw_shader_destroy_ioctl(struct drm_device *dev, void *data, 711c74c162fSThomas Hellstrom struct drm_file *file_priv) 712c74c162fSThomas Hellstrom { 713c74c162fSThomas Hellstrom struct drm_vmw_shader_arg *arg = (struct drm_vmw_shader_arg *)data; 714c74c162fSThomas Hellstrom struct ttm_object_file *tfile = vmw_fpriv(file_priv)->tfile; 715c74c162fSThomas Hellstrom 716c74c162fSThomas Hellstrom return ttm_ref_object_base_unref(tfile, arg->handle, 717c74c162fSThomas Hellstrom TTM_REF_USAGE); 718c74c162fSThomas Hellstrom } 719c74c162fSThomas Hellstrom 72018e4a466SThomas Hellstrom static int vmw_user_shader_alloc(struct vmw_private *dev_priv, 721d5bde956SThomas Hellstrom struct vmw_dma_buffer *buffer, 722d5bde956SThomas Hellstrom size_t shader_size, 723d5bde956SThomas Hellstrom size_t offset, 724d5bde956SThomas Hellstrom SVGA3dShaderType shader_type, 725d80efd5cSThomas Hellstrom uint8_t num_input_sig, 726d80efd5cSThomas Hellstrom uint8_t num_output_sig, 727d5bde956SThomas Hellstrom struct ttm_object_file *tfile, 728d5bde956SThomas Hellstrom u32 *handle) 729d5bde956SThomas Hellstrom { 730d5bde956SThomas Hellstrom struct vmw_user_shader *ushader; 731d5bde956SThomas Hellstrom struct vmw_resource *res, *tmp; 732d5bde956SThomas Hellstrom int ret; 733d5bde956SThomas Hellstrom 734d5bde956SThomas Hellstrom /* 735d5bde956SThomas Hellstrom * Approximate idr memory usage with 128 bytes. It will be limited 736d5bde956SThomas Hellstrom * by maximum number_of shaders anyway. 737d5bde956SThomas Hellstrom */ 738d5bde956SThomas Hellstrom if (unlikely(vmw_user_shader_size == 0)) 739d5bde956SThomas Hellstrom vmw_user_shader_size = 740d5bde956SThomas Hellstrom ttm_round_pot(sizeof(struct vmw_user_shader)) + 128; 741d5bde956SThomas Hellstrom 742d5bde956SThomas Hellstrom ret = ttm_mem_global_alloc(vmw_mem_glob(dev_priv), 743d5bde956SThomas Hellstrom vmw_user_shader_size, 744d5bde956SThomas Hellstrom false, true); 745d5bde956SThomas Hellstrom if (unlikely(ret != 0)) { 746d5bde956SThomas Hellstrom if (ret != -ERESTARTSYS) 747d5bde956SThomas Hellstrom DRM_ERROR("Out of graphics memory for shader " 748d5bde956SThomas Hellstrom "creation.\n"); 749d5bde956SThomas Hellstrom goto out; 750d5bde956SThomas Hellstrom } 751d5bde956SThomas Hellstrom 752d5bde956SThomas Hellstrom ushader = kzalloc(sizeof(*ushader), GFP_KERNEL); 753d5bde956SThomas Hellstrom if (unlikely(ushader == NULL)) { 754d5bde956SThomas Hellstrom ttm_mem_global_free(vmw_mem_glob(dev_priv), 755d5bde956SThomas Hellstrom vmw_user_shader_size); 756d5bde956SThomas Hellstrom ret = -ENOMEM; 757d5bde956SThomas Hellstrom goto out; 758d5bde956SThomas Hellstrom } 759d5bde956SThomas Hellstrom 760d5bde956SThomas Hellstrom res = &ushader->shader.res; 761d5bde956SThomas Hellstrom ushader->base.shareable = false; 762d5bde956SThomas Hellstrom ushader->base.tfile = NULL; 763d5bde956SThomas Hellstrom 764d5bde956SThomas Hellstrom /* 765d5bde956SThomas Hellstrom * From here on, the destructor takes over resource freeing. 766d5bde956SThomas Hellstrom */ 767d5bde956SThomas Hellstrom 768d5bde956SThomas Hellstrom ret = vmw_gb_shader_init(dev_priv, res, shader_size, 769d80efd5cSThomas Hellstrom offset, shader_type, num_input_sig, 770d80efd5cSThomas Hellstrom num_output_sig, buffer, 771d5bde956SThomas Hellstrom vmw_user_shader_free); 772d5bde956SThomas Hellstrom if (unlikely(ret != 0)) 773d5bde956SThomas Hellstrom goto out; 774d5bde956SThomas Hellstrom 775d5bde956SThomas Hellstrom tmp = vmw_resource_reference(res); 776d5bde956SThomas Hellstrom ret = ttm_base_object_init(tfile, &ushader->base, false, 777d5bde956SThomas Hellstrom VMW_RES_SHADER, 778d5bde956SThomas Hellstrom &vmw_user_shader_base_release, NULL); 779d5bde956SThomas Hellstrom 780d5bde956SThomas Hellstrom if (unlikely(ret != 0)) { 781d5bde956SThomas Hellstrom vmw_resource_unreference(&tmp); 782d5bde956SThomas Hellstrom goto out_err; 783d5bde956SThomas Hellstrom } 784d5bde956SThomas Hellstrom 785d5bde956SThomas Hellstrom if (handle) 786d5bde956SThomas Hellstrom *handle = ushader->base.hash.key; 787d5bde956SThomas Hellstrom out_err: 788d5bde956SThomas Hellstrom vmw_resource_unreference(&res); 789d5bde956SThomas Hellstrom out: 790d5bde956SThomas Hellstrom return ret; 791d5bde956SThomas Hellstrom } 792d5bde956SThomas Hellstrom 793d5bde956SThomas Hellstrom 794b9eb1a61SThomas Hellstrom static struct vmw_resource *vmw_shader_alloc(struct vmw_private *dev_priv, 79518e4a466SThomas Hellstrom struct vmw_dma_buffer *buffer, 79618e4a466SThomas Hellstrom size_t shader_size, 79718e4a466SThomas Hellstrom size_t offset, 79818e4a466SThomas Hellstrom SVGA3dShaderType shader_type) 79918e4a466SThomas Hellstrom { 80018e4a466SThomas Hellstrom struct vmw_shader *shader; 80118e4a466SThomas Hellstrom struct vmw_resource *res; 80218e4a466SThomas Hellstrom int ret; 80318e4a466SThomas Hellstrom 80418e4a466SThomas Hellstrom /* 80518e4a466SThomas Hellstrom * Approximate idr memory usage with 128 bytes. It will be limited 80618e4a466SThomas Hellstrom * by maximum number_of shaders anyway. 80718e4a466SThomas Hellstrom */ 80818e4a466SThomas Hellstrom if (unlikely(vmw_shader_size == 0)) 80918e4a466SThomas Hellstrom vmw_shader_size = 81018e4a466SThomas Hellstrom ttm_round_pot(sizeof(struct vmw_shader)) + 128; 81118e4a466SThomas Hellstrom 81218e4a466SThomas Hellstrom ret = ttm_mem_global_alloc(vmw_mem_glob(dev_priv), 81318e4a466SThomas Hellstrom vmw_shader_size, 81418e4a466SThomas Hellstrom false, true); 81518e4a466SThomas Hellstrom if (unlikely(ret != 0)) { 81618e4a466SThomas Hellstrom if (ret != -ERESTARTSYS) 81718e4a466SThomas Hellstrom DRM_ERROR("Out of graphics memory for shader " 81818e4a466SThomas Hellstrom "creation.\n"); 81918e4a466SThomas Hellstrom goto out_err; 82018e4a466SThomas Hellstrom } 82118e4a466SThomas Hellstrom 82218e4a466SThomas Hellstrom shader = kzalloc(sizeof(*shader), GFP_KERNEL); 82318e4a466SThomas Hellstrom if (unlikely(shader == NULL)) { 82418e4a466SThomas Hellstrom ttm_mem_global_free(vmw_mem_glob(dev_priv), 82518e4a466SThomas Hellstrom vmw_shader_size); 82618e4a466SThomas Hellstrom ret = -ENOMEM; 82718e4a466SThomas Hellstrom goto out_err; 82818e4a466SThomas Hellstrom } 82918e4a466SThomas Hellstrom 83018e4a466SThomas Hellstrom res = &shader->res; 83118e4a466SThomas Hellstrom 83218e4a466SThomas Hellstrom /* 83318e4a466SThomas Hellstrom * From here on, the destructor takes over resource freeing. 83418e4a466SThomas Hellstrom */ 83518e4a466SThomas Hellstrom ret = vmw_gb_shader_init(dev_priv, res, shader_size, 836d80efd5cSThomas Hellstrom offset, shader_type, 0, 0, buffer, 83718e4a466SThomas Hellstrom vmw_shader_free); 83818e4a466SThomas Hellstrom 83918e4a466SThomas Hellstrom out_err: 84018e4a466SThomas Hellstrom return ret ? ERR_PTR(ret) : res; 84118e4a466SThomas Hellstrom } 84218e4a466SThomas Hellstrom 84318e4a466SThomas Hellstrom 844d80efd5cSThomas Hellstrom static int vmw_shader_define(struct drm_device *dev, struct drm_file *file_priv, 845d80efd5cSThomas Hellstrom enum drm_vmw_shader_type shader_type_drm, 846d80efd5cSThomas Hellstrom u32 buffer_handle, size_t size, size_t offset, 847d80efd5cSThomas Hellstrom uint8_t num_input_sig, uint8_t num_output_sig, 848d80efd5cSThomas Hellstrom uint32_t *shader_handle) 849c74c162fSThomas Hellstrom { 850c74c162fSThomas Hellstrom struct vmw_private *dev_priv = vmw_priv(dev); 851c74c162fSThomas Hellstrom struct ttm_object_file *tfile = vmw_fpriv(file_priv)->tfile; 852c74c162fSThomas Hellstrom struct vmw_dma_buffer *buffer = NULL; 853c74c162fSThomas Hellstrom SVGA3dShaderType shader_type; 854c74c162fSThomas Hellstrom int ret; 855c74c162fSThomas Hellstrom 856d80efd5cSThomas Hellstrom if (buffer_handle != SVGA3D_INVALID_ID) { 857d80efd5cSThomas Hellstrom ret = vmw_user_dmabuf_lookup(tfile, buffer_handle, 858c74c162fSThomas Hellstrom &buffer); 859c74c162fSThomas Hellstrom if (unlikely(ret != 0)) { 860c74c162fSThomas Hellstrom DRM_ERROR("Could not find buffer for shader " 861c74c162fSThomas Hellstrom "creation.\n"); 862c74c162fSThomas Hellstrom return ret; 863c74c162fSThomas Hellstrom } 864c74c162fSThomas Hellstrom 865c74c162fSThomas Hellstrom if ((u64)buffer->base.num_pages * PAGE_SIZE < 866d80efd5cSThomas Hellstrom (u64)size + (u64)offset) { 867c74c162fSThomas Hellstrom DRM_ERROR("Illegal buffer- or shader size.\n"); 868c74c162fSThomas Hellstrom ret = -EINVAL; 869c74c162fSThomas Hellstrom goto out_bad_arg; 870c74c162fSThomas Hellstrom } 871c74c162fSThomas Hellstrom } 872c74c162fSThomas Hellstrom 873d80efd5cSThomas Hellstrom switch (shader_type_drm) { 874c74c162fSThomas Hellstrom case drm_vmw_shader_type_vs: 875c74c162fSThomas Hellstrom shader_type = SVGA3D_SHADERTYPE_VS; 876c74c162fSThomas Hellstrom break; 877c74c162fSThomas Hellstrom case drm_vmw_shader_type_ps: 878c74c162fSThomas Hellstrom shader_type = SVGA3D_SHADERTYPE_PS; 879c74c162fSThomas Hellstrom break; 880c74c162fSThomas Hellstrom default: 881c74c162fSThomas Hellstrom DRM_ERROR("Illegal shader type.\n"); 882c74c162fSThomas Hellstrom ret = -EINVAL; 883c74c162fSThomas Hellstrom goto out_bad_arg; 884c74c162fSThomas Hellstrom } 885c74c162fSThomas Hellstrom 886294adf7dSThomas Hellstrom ret = ttm_read_lock(&dev_priv->reservation_sem, true); 887c74c162fSThomas Hellstrom if (unlikely(ret != 0)) 888d5bde956SThomas Hellstrom goto out_bad_arg; 889c74c162fSThomas Hellstrom 890d80efd5cSThomas Hellstrom ret = vmw_user_shader_alloc(dev_priv, buffer, size, offset, 891d80efd5cSThomas Hellstrom shader_type, num_input_sig, 892d80efd5cSThomas Hellstrom num_output_sig, tfile, shader_handle); 893c74c162fSThomas Hellstrom 894294adf7dSThomas Hellstrom ttm_read_unlock(&dev_priv->reservation_sem); 895c74c162fSThomas Hellstrom out_bad_arg: 896c74c162fSThomas Hellstrom vmw_dmabuf_unreference(&buffer); 897d5bde956SThomas Hellstrom return ret; 898d5bde956SThomas Hellstrom } 899c74c162fSThomas Hellstrom 900d5bde956SThomas Hellstrom /** 901d80efd5cSThomas Hellstrom * vmw_shader_id_ok - Check whether a compat shader user key and 90218e4a466SThomas Hellstrom * shader type are within valid bounds. 903d5bde956SThomas Hellstrom * 90418e4a466SThomas Hellstrom * @user_key: User space id of the shader. 90518e4a466SThomas Hellstrom * @shader_type: Shader type. 906d5bde956SThomas Hellstrom * 90718e4a466SThomas Hellstrom * Returns true if valid false if not. 908d5bde956SThomas Hellstrom */ 909d80efd5cSThomas Hellstrom static bool vmw_shader_id_ok(u32 user_key, SVGA3dShaderType shader_type) 910d5bde956SThomas Hellstrom { 91118e4a466SThomas Hellstrom return user_key <= ((1 << 20) - 1) && (unsigned) shader_type < 16; 912d5bde956SThomas Hellstrom } 913d5bde956SThomas Hellstrom 914d5bde956SThomas Hellstrom /** 915d80efd5cSThomas Hellstrom * vmw_shader_key - Compute a hash key suitable for a compat shader. 916d5bde956SThomas Hellstrom * 91718e4a466SThomas Hellstrom * @user_key: User space id of the shader. 91818e4a466SThomas Hellstrom * @shader_type: Shader type. 919d5bde956SThomas Hellstrom * 92018e4a466SThomas Hellstrom * Returns a hash key suitable for a command buffer managed resource 92118e4a466SThomas Hellstrom * manager hash table. 922d5bde956SThomas Hellstrom */ 923d80efd5cSThomas Hellstrom static u32 vmw_shader_key(u32 user_key, SVGA3dShaderType shader_type) 924d5bde956SThomas Hellstrom { 92518e4a466SThomas Hellstrom return user_key | (shader_type << 20); 926d5bde956SThomas Hellstrom } 927d5bde956SThomas Hellstrom 928d5bde956SThomas Hellstrom /** 929d80efd5cSThomas Hellstrom * vmw_shader_remove - Stage a compat shader for removal. 930d5bde956SThomas Hellstrom * 93118e4a466SThomas Hellstrom * @man: Pointer to the compat shader manager identifying the shader namespace. 932d5bde956SThomas Hellstrom * @user_key: The key that is used to identify the shader. The key is 933d5bde956SThomas Hellstrom * unique to the shader type. 934d5bde956SThomas Hellstrom * @shader_type: Shader type. 93518e4a466SThomas Hellstrom * @list: Caller's list of staged command buffer resource actions. 936d5bde956SThomas Hellstrom */ 937d80efd5cSThomas Hellstrom int vmw_shader_remove(struct vmw_cmdbuf_res_manager *man, 938d5bde956SThomas Hellstrom u32 user_key, SVGA3dShaderType shader_type, 939d5bde956SThomas Hellstrom struct list_head *list) 940d5bde956SThomas Hellstrom { 941d80efd5cSThomas Hellstrom struct vmw_resource *dummy; 942d80efd5cSThomas Hellstrom 943d80efd5cSThomas Hellstrom if (!vmw_shader_id_ok(user_key, shader_type)) 944d5bde956SThomas Hellstrom return -EINVAL; 945d5bde956SThomas Hellstrom 946d80efd5cSThomas Hellstrom return vmw_cmdbuf_res_remove(man, vmw_cmdbuf_res_shader, 947d80efd5cSThomas Hellstrom vmw_shader_key(user_key, shader_type), 948d80efd5cSThomas Hellstrom list, &dummy); 949d5bde956SThomas Hellstrom } 950d5bde956SThomas Hellstrom 951d5bde956SThomas Hellstrom /** 95218e4a466SThomas Hellstrom * vmw_compat_shader_add - Create a compat shader and stage it for addition 95318e4a466SThomas Hellstrom * as a command buffer managed resource. 954d5bde956SThomas Hellstrom * 95518e4a466SThomas Hellstrom * @man: Pointer to the compat shader manager identifying the shader namespace. 956d5bde956SThomas Hellstrom * @user_key: The key that is used to identify the shader. The key is 957d5bde956SThomas Hellstrom * unique to the shader type. 958d5bde956SThomas Hellstrom * @bytecode: Pointer to the bytecode of the shader. 959d5bde956SThomas Hellstrom * @shader_type: Shader type. 960d5bde956SThomas Hellstrom * @tfile: Pointer to a struct ttm_object_file that the guest-backed shader is 961d5bde956SThomas Hellstrom * to be created with. 96218e4a466SThomas Hellstrom * @list: Caller's list of staged command buffer resource actions. 963d5bde956SThomas Hellstrom * 964d5bde956SThomas Hellstrom */ 96518e4a466SThomas Hellstrom int vmw_compat_shader_add(struct vmw_private *dev_priv, 96618e4a466SThomas Hellstrom struct vmw_cmdbuf_res_manager *man, 967d5bde956SThomas Hellstrom u32 user_key, const void *bytecode, 968d5bde956SThomas Hellstrom SVGA3dShaderType shader_type, 969d5bde956SThomas Hellstrom size_t size, 970d5bde956SThomas Hellstrom struct list_head *list) 971d5bde956SThomas Hellstrom { 972d5bde956SThomas Hellstrom struct vmw_dma_buffer *buf; 973d5bde956SThomas Hellstrom struct ttm_bo_kmap_obj map; 974d5bde956SThomas Hellstrom bool is_iomem; 975d5bde956SThomas Hellstrom int ret; 97618e4a466SThomas Hellstrom struct vmw_resource *res; 977d5bde956SThomas Hellstrom 978d80efd5cSThomas Hellstrom if (!vmw_shader_id_ok(user_key, shader_type)) 979d5bde956SThomas Hellstrom return -EINVAL; 980d5bde956SThomas Hellstrom 981d5bde956SThomas Hellstrom /* Allocate and pin a DMA buffer */ 982d5bde956SThomas Hellstrom buf = kzalloc(sizeof(*buf), GFP_KERNEL); 983d5bde956SThomas Hellstrom if (unlikely(buf == NULL)) 984d5bde956SThomas Hellstrom return -ENOMEM; 985d5bde956SThomas Hellstrom 98618e4a466SThomas Hellstrom ret = vmw_dmabuf_init(dev_priv, buf, size, &vmw_sys_ne_placement, 987d5bde956SThomas Hellstrom true, vmw_dmabuf_bo_free); 988d5bde956SThomas Hellstrom if (unlikely(ret != 0)) 989d5bde956SThomas Hellstrom goto out; 990d5bde956SThomas Hellstrom 991d5bde956SThomas Hellstrom ret = ttm_bo_reserve(&buf->base, false, true, false, NULL); 992d5bde956SThomas Hellstrom if (unlikely(ret != 0)) 993d5bde956SThomas Hellstrom goto no_reserve; 994d5bde956SThomas Hellstrom 995d5bde956SThomas Hellstrom /* Map and copy shader bytecode. */ 996d5bde956SThomas Hellstrom ret = ttm_bo_kmap(&buf->base, 0, PAGE_ALIGN(size) >> PAGE_SHIFT, 997d5bde956SThomas Hellstrom &map); 998d5bde956SThomas Hellstrom if (unlikely(ret != 0)) { 999d5bde956SThomas Hellstrom ttm_bo_unreserve(&buf->base); 1000d5bde956SThomas Hellstrom goto no_reserve; 1001d5bde956SThomas Hellstrom } 1002d5bde956SThomas Hellstrom 1003d5bde956SThomas Hellstrom memcpy(ttm_kmap_obj_virtual(&map, &is_iomem), bytecode, size); 1004d5bde956SThomas Hellstrom WARN_ON(is_iomem); 1005d5bde956SThomas Hellstrom 1006d5bde956SThomas Hellstrom ttm_bo_kunmap(&map); 1007d5bde956SThomas Hellstrom ret = ttm_bo_validate(&buf->base, &vmw_sys_placement, false, true); 1008d5bde956SThomas Hellstrom WARN_ON(ret != 0); 1009d5bde956SThomas Hellstrom ttm_bo_unreserve(&buf->base); 1010d5bde956SThomas Hellstrom 101118e4a466SThomas Hellstrom res = vmw_shader_alloc(dev_priv, buf, size, 0, shader_type); 1012d5bde956SThomas Hellstrom if (unlikely(ret != 0)) 1013d5bde956SThomas Hellstrom goto no_reserve; 1014d5bde956SThomas Hellstrom 1015d80efd5cSThomas Hellstrom ret = vmw_cmdbuf_res_add(man, vmw_cmdbuf_res_shader, 1016d80efd5cSThomas Hellstrom vmw_shader_key(user_key, shader_type), 101718e4a466SThomas Hellstrom res, list); 101818e4a466SThomas Hellstrom vmw_resource_unreference(&res); 1019d5bde956SThomas Hellstrom no_reserve: 102018e4a466SThomas Hellstrom vmw_dmabuf_unreference(&buf); 1021d5bde956SThomas Hellstrom out: 1022d5bde956SThomas Hellstrom return ret; 1023d5bde956SThomas Hellstrom } 1024d5bde956SThomas Hellstrom 1025d5bde956SThomas Hellstrom /** 1026d80efd5cSThomas Hellstrom * vmw_shader_lookup - Look up a compat shader 1027d5bde956SThomas Hellstrom * 102818e4a466SThomas Hellstrom * @man: Pointer to the command buffer managed resource manager identifying 102918e4a466SThomas Hellstrom * the shader namespace. 103018e4a466SThomas Hellstrom * @user_key: The user space id of the shader. 103118e4a466SThomas Hellstrom * @shader_type: The shader type. 1032d5bde956SThomas Hellstrom * 103318e4a466SThomas Hellstrom * Returns a refcounted pointer to a struct vmw_resource if the shader was 103418e4a466SThomas Hellstrom * found. An error pointer otherwise. 1035d5bde956SThomas Hellstrom */ 103618e4a466SThomas Hellstrom struct vmw_resource * 1037d80efd5cSThomas Hellstrom vmw_shader_lookup(struct vmw_cmdbuf_res_manager *man, 103818e4a466SThomas Hellstrom u32 user_key, 103918e4a466SThomas Hellstrom SVGA3dShaderType shader_type) 1040d5bde956SThomas Hellstrom { 1041d80efd5cSThomas Hellstrom if (!vmw_shader_id_ok(user_key, shader_type)) 104218e4a466SThomas Hellstrom return ERR_PTR(-EINVAL); 1043d5bde956SThomas Hellstrom 1044d80efd5cSThomas Hellstrom return vmw_cmdbuf_res_lookup(man, vmw_cmdbuf_res_shader, 1045d80efd5cSThomas Hellstrom vmw_shader_key(user_key, shader_type)); 1046d80efd5cSThomas Hellstrom } 1047d80efd5cSThomas Hellstrom 1048d80efd5cSThomas Hellstrom int vmw_shader_define_ioctl(struct drm_device *dev, void *data, 1049d80efd5cSThomas Hellstrom struct drm_file *file_priv) 1050d80efd5cSThomas Hellstrom { 1051d80efd5cSThomas Hellstrom struct drm_vmw_shader_create_arg *arg = 1052d80efd5cSThomas Hellstrom (struct drm_vmw_shader_create_arg *)data; 1053d80efd5cSThomas Hellstrom 1054d80efd5cSThomas Hellstrom return vmw_shader_define(dev, file_priv, arg->shader_type, 1055d80efd5cSThomas Hellstrom arg->buffer_handle, 1056d80efd5cSThomas Hellstrom arg->size, arg->offset, 1057d80efd5cSThomas Hellstrom 0, 0, 1058d80efd5cSThomas Hellstrom &arg->shader_handle); 1059c74c162fSThomas Hellstrom } 1060