1dff96888SDirk Hohndel (VMware) // SPDX-License-Identifier: GPL-2.0 OR MIT
2fb1d9738SJakob Bornecrantz /**************************************************************************
3fb1d9738SJakob Bornecrantz *
4dff96888SDirk Hohndel (VMware) * Copyright 2009-2015 VMware, Inc., Palo Alto, CA., USA
5fb1d9738SJakob Bornecrantz *
6fb1d9738SJakob Bornecrantz * Permission is hereby granted, free of charge, to any person obtaining a
7fb1d9738SJakob Bornecrantz * copy of this software and associated documentation files (the
8fb1d9738SJakob Bornecrantz * "Software"), to deal in the Software without restriction, including
9fb1d9738SJakob Bornecrantz * without limitation the rights to use, copy, modify, merge, publish,
10fb1d9738SJakob Bornecrantz * distribute, sub license, and/or sell copies of the Software, and to
11fb1d9738SJakob Bornecrantz * permit persons to whom the Software is furnished to do so, subject to
12fb1d9738SJakob Bornecrantz * the following conditions:
13fb1d9738SJakob Bornecrantz *
14fb1d9738SJakob Bornecrantz * The above copyright notice and this permission notice (including the
15fb1d9738SJakob Bornecrantz * next paragraph) shall be included in all copies or substantial portions
16fb1d9738SJakob Bornecrantz * of the Software.
17fb1d9738SJakob Bornecrantz *
18fb1d9738SJakob Bornecrantz * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19fb1d9738SJakob Bornecrantz * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20fb1d9738SJakob Bornecrantz * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
21fb1d9738SJakob Bornecrantz * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM,
22fb1d9738SJakob Bornecrantz * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
23fb1d9738SJakob Bornecrantz * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
24fb1d9738SJakob Bornecrantz * USE OR OTHER DEALINGS IN THE SOFTWARE.
25fb1d9738SJakob Bornecrantz *
26fb1d9738SJakob Bornecrantz **************************************************************************/
27fb1d9738SJakob Bornecrantz
28fb1d9738SJakob Bornecrantz
296ae8748bSSam Ravnborg #include "vmwgfx_drv.h"
306ae8748bSSam Ravnborg
316e4dcff3SJakob Bornecrantz #define VMW_PPN_SIZE (sizeof(unsigned long))
326e4dcff3SJakob Bornecrantz /* A future safe maximum remap size. */
336e4dcff3SJakob Bornecrantz #define VMW_PPN_PER_REMAP ((31 * 1024) / VMW_PPN_SIZE)
34d92d9851SThomas Hellstrom #define DMA_ADDR_INVALID ((dma_addr_t) 0)
35d92d9851SThomas Hellstrom #define DMA_PAGE_INVALID 0UL
362de59d01SThomas Hellstrom
vmw_gmr2_bind(struct vmw_private * dev_priv,struct vmw_piter * iter,unsigned long num_pages,int gmr_id)372de59d01SThomas Hellstrom static int vmw_gmr2_bind(struct vmw_private *dev_priv,
38d92d9851SThomas Hellstrom struct vmw_piter *iter,
392de59d01SThomas Hellstrom unsigned long num_pages,
402de59d01SThomas Hellstrom int gmr_id)
412de59d01SThomas Hellstrom {
422de59d01SThomas Hellstrom SVGAFifoCmdDefineGMR2 define_cmd;
432de59d01SThomas Hellstrom SVGAFifoCmdRemapGMR2 remap_cmd;
442de59d01SThomas Hellstrom uint32_t *cmd;
452de59d01SThomas Hellstrom uint32_t *cmd_orig;
466e4dcff3SJakob Bornecrantz uint32_t define_size = sizeof(define_cmd) + sizeof(*cmd);
476e4dcff3SJakob Bornecrantz uint32_t remap_num = num_pages / VMW_PPN_PER_REMAP + ((num_pages % VMW_PPN_PER_REMAP) > 0);
486e4dcff3SJakob Bornecrantz uint32_t remap_size = VMW_PPN_SIZE * num_pages + (sizeof(remap_cmd) + sizeof(*cmd)) * remap_num;
496e4dcff3SJakob Bornecrantz uint32_t remap_pos = 0;
506e4dcff3SJakob Bornecrantz uint32_t cmd_size = define_size + remap_size;
512de59d01SThomas Hellstrom uint32_t i;
522de59d01SThomas Hellstrom
538426ed9cSZack Rusin cmd_orig = cmd = VMW_CMD_RESERVE(dev_priv, cmd_size);
542de59d01SThomas Hellstrom if (unlikely(cmd == NULL))
552de59d01SThomas Hellstrom return -ENOMEM;
562de59d01SThomas Hellstrom
572de59d01SThomas Hellstrom define_cmd.gmrId = gmr_id;
582de59d01SThomas Hellstrom define_cmd.numPages = num_pages;
592de59d01SThomas Hellstrom
606e4dcff3SJakob Bornecrantz *cmd++ = SVGA_CMD_DEFINE_GMR2;
616e4dcff3SJakob Bornecrantz memcpy(cmd, &define_cmd, sizeof(define_cmd));
626e4dcff3SJakob Bornecrantz cmd += sizeof(define_cmd) / sizeof(*cmd);
636e4dcff3SJakob Bornecrantz
646e4dcff3SJakob Bornecrantz /*
656e4dcff3SJakob Bornecrantz * Need to split the command if there are too many
666e4dcff3SJakob Bornecrantz * pages that goes into the gmr.
676e4dcff3SJakob Bornecrantz */
686e4dcff3SJakob Bornecrantz
692de59d01SThomas Hellstrom remap_cmd.gmrId = gmr_id;
702de59d01SThomas Hellstrom remap_cmd.flags = (VMW_PPN_SIZE > sizeof(*cmd)) ?
712de59d01SThomas Hellstrom SVGA_REMAP_GMR2_PPN64 : SVGA_REMAP_GMR2_PPN32;
722de59d01SThomas Hellstrom
736e4dcff3SJakob Bornecrantz while (num_pages > 0) {
74*cf63561cSBernard Zhao unsigned long nr = min_t(unsigned long, num_pages, VMW_PPN_PER_REMAP);
756e4dcff3SJakob Bornecrantz
766e4dcff3SJakob Bornecrantz remap_cmd.offsetPages = remap_pos;
776e4dcff3SJakob Bornecrantz remap_cmd.numPages = nr;
782de59d01SThomas Hellstrom
792de59d01SThomas Hellstrom *cmd++ = SVGA_CMD_REMAP_GMR2;
802de59d01SThomas Hellstrom memcpy(cmd, &remap_cmd, sizeof(remap_cmd));
816e4dcff3SJakob Bornecrantz cmd += sizeof(remap_cmd) / sizeof(*cmd);
822de59d01SThomas Hellstrom
836e4dcff3SJakob Bornecrantz for (i = 0; i < nr; ++i) {
840824db38SThomas Hellstrom if (VMW_PPN_SIZE <= 4)
85d92d9851SThomas Hellstrom *cmd = vmw_piter_dma_addr(iter) >> PAGE_SHIFT;
862de59d01SThomas Hellstrom else
87d92d9851SThomas Hellstrom *((uint64_t *)cmd) = vmw_piter_dma_addr(iter) >>
88d92d9851SThomas Hellstrom PAGE_SHIFT;
892de59d01SThomas Hellstrom
902de59d01SThomas Hellstrom cmd += VMW_PPN_SIZE / sizeof(*cmd);
91d92d9851SThomas Hellstrom vmw_piter_next(iter);
922de59d01SThomas Hellstrom }
932de59d01SThomas Hellstrom
946e4dcff3SJakob Bornecrantz num_pages -= nr;
956e4dcff3SJakob Bornecrantz remap_pos += nr;
966e4dcff3SJakob Bornecrantz }
976e4dcff3SJakob Bornecrantz
986e4dcff3SJakob Bornecrantz BUG_ON(cmd != cmd_orig + cmd_size / sizeof(*cmd));
996e4dcff3SJakob Bornecrantz
1008426ed9cSZack Rusin vmw_cmd_commit(dev_priv, cmd_size);
1012de59d01SThomas Hellstrom
1022de59d01SThomas Hellstrom return 0;
1032de59d01SThomas Hellstrom }
1042de59d01SThomas Hellstrom
vmw_gmr2_unbind(struct vmw_private * dev_priv,int gmr_id)1052de59d01SThomas Hellstrom static void vmw_gmr2_unbind(struct vmw_private *dev_priv,
1062de59d01SThomas Hellstrom int gmr_id)
1072de59d01SThomas Hellstrom {
1082de59d01SThomas Hellstrom SVGAFifoCmdDefineGMR2 define_cmd;
1092de59d01SThomas Hellstrom uint32_t define_size = sizeof(define_cmd) + 4;
1102de59d01SThomas Hellstrom uint32_t *cmd;
1112de59d01SThomas Hellstrom
1128426ed9cSZack Rusin cmd = VMW_CMD_RESERVE(dev_priv, define_size);
11311c45419SDeepak Rawat if (unlikely(cmd == NULL))
1142de59d01SThomas Hellstrom return;
11511c45419SDeepak Rawat
1162de59d01SThomas Hellstrom define_cmd.gmrId = gmr_id;
1172de59d01SThomas Hellstrom define_cmd.numPages = 0;
1182de59d01SThomas Hellstrom
1192de59d01SThomas Hellstrom *cmd++ = SVGA_CMD_DEFINE_GMR2;
1202de59d01SThomas Hellstrom memcpy(cmd, &define_cmd, sizeof(define_cmd));
1212de59d01SThomas Hellstrom
1228426ed9cSZack Rusin vmw_cmd_commit(dev_priv, define_size);
1232de59d01SThomas Hellstrom }
1242de59d01SThomas Hellstrom
125d92d9851SThomas Hellstrom
vmw_gmr_bind(struct vmw_private * dev_priv,const struct vmw_sg_table * vsgt,unsigned long num_pages,int gmr_id)126fb1d9738SJakob Bornecrantz int vmw_gmr_bind(struct vmw_private *dev_priv,
127d92d9851SThomas Hellstrom const struct vmw_sg_table *vsgt,
128135cba0dSThomas Hellstrom unsigned long num_pages,
129135cba0dSThomas Hellstrom int gmr_id)
130fb1d9738SJakob Bornecrantz {
131d92d9851SThomas Hellstrom struct vmw_piter data_iter;
132fb1d9738SJakob Bornecrantz
133d92d9851SThomas Hellstrom vmw_piter_start(&data_iter, vsgt, 0);
134d92d9851SThomas Hellstrom
135d92d9851SThomas Hellstrom if (unlikely(!vmw_piter_next(&data_iter)))
136d92d9851SThomas Hellstrom return 0;
137d92d9851SThomas Hellstrom
1380d00c488SThomas Hellstrom if (unlikely(!(dev_priv->capabilities & SVGA_CAP_GMR2)))
1390d00c488SThomas Hellstrom return -EINVAL;
1400d00c488SThomas Hellstrom
141d92d9851SThomas Hellstrom return vmw_gmr2_bind(dev_priv, &data_iter, num_pages, gmr_id);
142fb1d9738SJakob Bornecrantz }
143fb1d9738SJakob Bornecrantz
144135cba0dSThomas Hellstrom
vmw_gmr_unbind(struct vmw_private * dev_priv,int gmr_id)145fb1d9738SJakob Bornecrantz void vmw_gmr_unbind(struct vmw_private *dev_priv, int gmr_id)
146fb1d9738SJakob Bornecrantz {
1470d00c488SThomas Hellstrom if (likely(dev_priv->capabilities & SVGA_CAP_GMR2))
1482de59d01SThomas Hellstrom vmw_gmr2_unbind(dev_priv, gmr_id);
149fb1d9738SJakob Bornecrantz }
150