/openbmc/linux/drivers/infiniband/sw/rdmavt/ |
H A D | mr.c | 75 static void rvt_deinit_mregion(struct rvt_mregion *mr) in rvt_deinit_mregion() argument 77 int i = mr->mapsz; in rvt_deinit_mregion() 79 mr->mapsz = 0; in rvt_deinit_mregion() 81 kfree(mr->map[--i]); in rvt_deinit_mregion() 82 percpu_ref_exit(&mr->refcount); in rvt_deinit_mregion() 87 struct rvt_mregion *mr = container_of(ref, struct rvt_mregion, in __rvt_mregion_complete() local 90 complete(&mr->comp); in __rvt_mregion_complete() 93 static int rvt_init_mregion(struct rvt_mregion *mr, struct ib_pd *pd, in rvt_init_mregion() argument 99 mr->mapsz = 0; in rvt_init_mregion() 102 mr->map[i] = kzalloc_node(sizeof(*mr->map[0]), GFP_KERNEL, in rvt_init_mregion() [all …]
|
H A D | trace_mr.h | 21 TP_PROTO(struct rvt_mregion *mr, u16 m, u16 n, void *v, size_t len), 22 TP_ARGS(mr, m, n, v, len), 24 RDI_DEV_ENTRY(ib_to_rvt(mr->pd->device)) 37 RDI_DEV_ASSIGN(ib_to_rvt(mr->pd->device)); 40 __entry->iova = mr->iova; 41 __entry->user_base = mr->user_base; 42 __entry->lkey = mr->lkey; 46 __entry->length = mr->length; 47 __entry->offset = mr->offset; 67 TP_PROTO(struct rvt_mregion *mr, u16 m, u16 n, void *v, size_t len), [all …]
|
/openbmc/linux/drivers/infiniband/sw/rxe/ |
H A D | rxe_mr.c | 27 int mr_check_range(struct rxe_mr *mr, u64 iova, size_t length) in mr_check_range() argument 29 switch (mr->ibmr.type) { in mr_check_range() 35 if (iova < mr->ibmr.iova || in mr_check_range() 36 iova + length > mr->ibmr.iova + mr->ibmr.length) { in mr_check_range() 37 rxe_dbg_mr(mr, "iova/length out of range"); in mr_check_range() 43 rxe_dbg_mr(mr, "mr type not supported\n"); in mr_check_range() 48 static void rxe_mr_init(int access, struct rxe_mr *mr) in rxe_mr_init() argument 50 u32 key = mr->elem.index << 8 | rxe_get_next_key(-1); in rxe_mr_init() 57 mr->lkey = mr->ibmr.lkey = key; in rxe_mr_init() 58 mr->rkey = mr->ibmr.rkey = key; in rxe_mr_init() [all …]
|
H A D | rxe_mw.c | 51 struct rxe_mw *mw, struct rxe_mr *mr, int access) in rxe_check_bind_mw() argument 83 if (unlikely(!mr || wqe->wr.wr.mw.length == 0)) { in rxe_check_bind_mw() 91 if (!mr) in rxe_check_bind_mw() 94 if (unlikely(mr->access & IB_ZERO_BASED)) { in rxe_check_bind_mw() 100 if (unlikely(!(mr->access & IB_ACCESS_MW_BIND))) { in rxe_check_bind_mw() 109 !(mr->access & IB_ACCESS_LOCAL_WRITE))) { in rxe_check_bind_mw() 117 if (unlikely(wqe->wr.wr.mw.length > mr->ibmr.length)) { in rxe_check_bind_mw() 123 if (unlikely((wqe->wr.wr.mw.addr < mr->ibmr.iova) || in rxe_check_bind_mw() 125 (mr->ibmr.iova + mr->ibmr.length)))) { in rxe_check_bind_mw() 136 struct rxe_mw *mw, struct rxe_mr *mr, int access) in rxe_do_bind_mw() argument [all …]
|
/openbmc/qemu/system/ |
H A D | memory.c | 220 MemoryRegion *mr; member 237 .mr = fr->mr, in section_from_flat_range() 250 return a->mr == b->mr in flatrange_equal() 285 memory_region_ref(range->mr); in flatview_insert() 298 memory_region_unref(view->ranges[i].mr); in flatview_destroy() 322 && r1->mr == r2->mr in can_merge() 348 memory_region_unref(view->ranges[k].mr); in flatview_simplify() 356 static bool memory_region_big_endian(MemoryRegion *mr) in memory_region_big_endian() argument 359 return mr->ops->endianness != DEVICE_LITTLE_ENDIAN; in memory_region_big_endian() 361 return mr->ops->endianness == DEVICE_BIG_ENDIAN; in memory_region_big_endian() [all …]
|
H A D | memory_ldst.c.inc | 29 MemoryRegion *mr; 36 mr = TRANSLATE(addr, &addr1, &l, false, attrs); 37 if (l < 4 || !memory_access_is_direct(mr, false)) { 38 release_lock |= prepare_mmio_access(mr); 41 r = memory_region_dispatch_read(mr, addr1, &val, 45 fuzz_dma_read_cb(addr, 4, mr); 46 ptr = qemu_map_ram_ptr(mr->ram_block, addr1); 98 MemoryRegion *mr; 105 mr = TRANSLATE(addr, &addr1, &l, false, attrs); 106 if (l < 8 || !memory_access_is_direct(mr, false)) { [all …]
|
/openbmc/linux/net/sunrpc/xprtrdma/ |
H A D | frwr_ops.c | 49 struct rpcrdma_mr *mr) in frwr_cid_init() argument 51 struct rpc_rdma_cid *cid = &mr->mr_cid; in frwr_cid_init() 54 cid->ci_completion_id = mr->mr_ibmr->res.id; in frwr_cid_init() 57 static void frwr_mr_unmap(struct rpcrdma_xprt *r_xprt, struct rpcrdma_mr *mr) in frwr_mr_unmap() argument 59 if (mr->mr_device) { in frwr_mr_unmap() 60 trace_xprtrdma_mr_unmap(mr); in frwr_mr_unmap() 61 ib_dma_unmap_sg(mr->mr_device, mr->mr_sg, mr->mr_nents, in frwr_mr_unmap() 62 mr->mr_dir); in frwr_mr_unmap() 63 mr->mr_device = NULL; in frwr_mr_unmap() 72 void frwr_mr_release(struct rpcrdma_mr *mr) in frwr_mr_release() argument [all …]
|
/openbmc/linux/drivers/vdpa/mlx5/core/ |
H A D | mr.c | 35 static void populate_mtts(struct mlx5_vdpa_direct_mr *mr, __be64 *mtt) in populate_mtts() argument 38 int nsg = mr->nsg; in populate_mtts() 44 for_each_sg(mr->sg_head.sgl, sg, mr->nent, i) { in populate_mtts() 47 nsg--, dma_addr += BIT(mr->log_size), dma_len -= BIT(mr->log_size)) in populate_mtts() 52 static int create_direct_mr(struct mlx5_vdpa_dev *mvdev, struct mlx5_vdpa_direct_mr *mr) in create_direct_mr() argument 59 inlen = MLX5_ST_SZ_BYTES(create_mkey_in) + roundup(MLX5_ST_SZ_BYTES(mtt) * mr->nsg, 16); in create_direct_mr() 66 MLX5_SET(mkc, mkc, lw, !!(mr->perm & VHOST_MAP_WO)); in create_direct_mr() 67 MLX5_SET(mkc, mkc, lr, !!(mr->perm & VHOST_MAP_RO)); in create_direct_mr() 71 MLX5_SET64(mkc, mkc, start_addr, mr->offset); in create_direct_mr() 72 MLX5_SET64(mkc, mkc, len, mr->end - mr->start); in create_direct_mr() [all …]
|
/openbmc/linux/drivers/infiniband/hw/vmw_pvrdma/ |
H A D | pvrdma_mr.c | 61 struct pvrdma_user_mr *mr; in pvrdma_get_dma_mr() local 75 mr = kzalloc(sizeof(*mr), GFP_KERNEL); in pvrdma_get_dma_mr() 76 if (!mr) in pvrdma_get_dma_mr() 89 kfree(mr); in pvrdma_get_dma_mr() 93 mr->mmr.mr_handle = resp->mr_handle; in pvrdma_get_dma_mr() 94 mr->ibmr.lkey = resp->lkey; in pvrdma_get_dma_mr() 95 mr->ibmr.rkey = resp->rkey; in pvrdma_get_dma_mr() 97 return &mr->ibmr; in pvrdma_get_dma_mr() 116 struct pvrdma_user_mr *mr = NULL; in pvrdma_reg_user_mr() local 144 mr = kzalloc(sizeof(*mr), GFP_KERNEL); in pvrdma_reg_user_mr() [all …]
|
/openbmc/linux/drivers/infiniband/hw/mlx5/ |
H A D | mr.c | 127 static int destroy_mkey(struct mlx5_ib_dev *dev, struct mlx5_ib_mr *mr) in destroy_mkey() argument 129 WARN_ON(xa_load(&dev->odp_mkeys, mlx5_base_mkey(mr->mmkey.key))); in destroy_mkey() 131 return mlx5_core_destroy_mkey(dev->mdev, mr->mmkey.key); in destroy_mkey() 758 struct mlx5_ib_mr *mr; in _mlx5_mr_cache_alloc() local 761 mr = kzalloc(sizeof(*mr), GFP_KERNEL); in _mlx5_mr_cache_alloc() 762 if (!mr) in _mlx5_mr_cache_alloc() 772 err = create_cache_mkey(ent, &mr->mmkey.key); in _mlx5_mr_cache_alloc() 777 kfree(mr); in _mlx5_mr_cache_alloc() 781 mr->mmkey.key = pop_stored_mkey(ent); in _mlx5_mr_cache_alloc() 785 mr->mmkey.cache_ent = ent; in _mlx5_mr_cache_alloc() [all …]
|
/openbmc/linux/drivers/gpu/drm/nouveau/nvkm/subdev/fb/ |
H A D | gddr5.c | 75 ram->mr[0] &= ~0xf7f; in nvkm_gddr5_calc() 76 ram->mr[0] |= (WR & 0x0f) << 8; in nvkm_gddr5_calc() 77 ram->mr[0] |= (CL & 0x0f) << 3; in nvkm_gddr5_calc() 78 ram->mr[0] |= (WL & 0x07) << 0; in nvkm_gddr5_calc() 80 ram->mr[1] &= ~0x0bf; in nvkm_gddr5_calc() 81 ram->mr[1] |= (xd & 0x01) << 7; in nvkm_gddr5_calc() 82 ram->mr[1] |= (at[0] & 0x03) << 4; in nvkm_gddr5_calc() 83 ram->mr[1] |= (dt & 0x03) << 2; in nvkm_gddr5_calc() 84 ram->mr[1] |= (ds & 0x03) << 0; in nvkm_gddr5_calc() 89 ram->mr1_nuts = ram->mr[1]; in nvkm_gddr5_calc() [all …]
|
H A D | sddr3.c | 92 ODT = (ram->mr[1] & 0x004) >> 2 | in nvkm_sddr3_calc() 93 (ram->mr[1] & 0x040) >> 5 | in nvkm_sddr3_calc() 94 (ram->mr[1] & 0x200) >> 7; in nvkm_sddr3_calc() 106 ram->mr[0] &= ~0xf74; in nvkm_sddr3_calc() 107 ram->mr[0] |= (WR & 0x07) << 9; in nvkm_sddr3_calc() 108 ram->mr[0] |= (CL & 0x0e) << 3; in nvkm_sddr3_calc() 109 ram->mr[0] |= (CL & 0x01) << 2; in nvkm_sddr3_calc() 111 ram->mr[1] &= ~0x245; in nvkm_sddr3_calc() 112 ram->mr[1] |= (ODT & 0x1) << 2; in nvkm_sddr3_calc() 113 ram->mr[1] |= (ODT & 0x2) << 5; in nvkm_sddr3_calc() [all …]
|
H A D | gddr3.c | 89 DLL = !(ram->mr[1] & 0x1); in nvkm_gddr3_calc() 90 RON = !((ram->mr[1] & 0x300) >> 8); in nvkm_gddr3_calc() 98 ODT = (ram->mr[1] & 0xc) >> 2; in nvkm_gddr3_calc() 101 hi = ram->mr[2] & 0x1; in nvkm_gddr3_calc() 107 ram->mr[0] &= ~0xf74; in nvkm_gddr3_calc() 108 ram->mr[0] |= (CWL & 0x07) << 9; in nvkm_gddr3_calc() 109 ram->mr[0] |= (CL & 0x07) << 4; in nvkm_gddr3_calc() 110 ram->mr[0] |= (CL & 0x08) >> 1; in nvkm_gddr3_calc() 112 ram->mr[1] &= ~0x3fc; in nvkm_gddr3_calc() 113 ram->mr[1] |= (ODT & 0x03) << 2; in nvkm_gddr3_calc() [all …]
|
/openbmc/linux/drivers/scsi/ |
H A D | mesh.c | 305 volatile struct mesh_regs __iomem *mr = ms->mesh; in mesh_dump_regs() local 311 ms, mr, md); in mesh_dump_regs() 314 (mr->count_hi << 8) + mr->count_lo, mr->sequence, in mesh_dump_regs() 315 (mr->bus_status1 << 8) + mr->bus_status0, mr->fifo_count, in mesh_dump_regs() 316 mr->exception, mr->error, mr->intr_mask, mr->interrupt, in mesh_dump_regs() 317 mr->sync_params); in mesh_dump_regs() 318 while(in_8(&mr->fifo_count)) in mesh_dump_regs() 319 printk(KERN_DEBUG " fifo data=%.2x\n",in_8(&mr->fifo)); in mesh_dump_regs() 339 static inline void mesh_flush_io(volatile struct mesh_regs __iomem *mr) in mesh_flush_io() argument 341 (void)in_8(&mr->mesh_id); in mesh_flush_io() [all …]
|
/openbmc/linux/drivers/infiniband/hw/mlx4/ |
H A D | mr.c | 60 struct mlx4_ib_mr *mr; in mlx4_ib_get_dma_mr() local 63 mr = kzalloc(sizeof(*mr), GFP_KERNEL); in mlx4_ib_get_dma_mr() 64 if (!mr) in mlx4_ib_get_dma_mr() 68 ~0ull, convert_access(acc), 0, 0, &mr->mmr); in mlx4_ib_get_dma_mr() 72 err = mlx4_mr_enable(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_get_dma_mr() 76 mr->ibmr.rkey = mr->ibmr.lkey = mr->mmr.key; in mlx4_ib_get_dma_mr() 77 mr->umem = NULL; in mlx4_ib_get_dma_mr() 79 return &mr->ibmr; in mlx4_ib_get_dma_mr() 82 (void) mlx4_mr_free(to_mdev(pd->device)->dev, &mr->mmr); in mlx4_ib_get_dma_mr() 85 kfree(mr); in mlx4_ib_get_dma_mr() [all …]
|
/openbmc/linux/drivers/infiniband/hw/hns/ |
H A D | hns_roce_mr.c | 51 static int alloc_mr_key(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr) in alloc_mr_key() argument 66 mr->key = hw_index_to_key(id); /* MR key */ in alloc_mr_key() 81 static void free_mr_key(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr) in free_mr_key() argument 83 unsigned long obj = key_to_hw_index(mr->key); in free_mr_key() 89 static int alloc_mr_pbl(struct hns_roce_dev *hr_dev, struct hns_roce_mr *mr, in alloc_mr_pbl() argument 93 bool is_fast = mr->type == MR_TYPE_FRMR; in alloc_mr_pbl() 97 mr->pbl_hop_num = is_fast ? 1 : hr_dev->caps.pbl_hop_num; in alloc_mr_pbl() 100 buf_attr.region[0].size = mr->size; in alloc_mr_pbl() 101 buf_attr.region[0].hopnum = mr->pbl_hop_num; in alloc_mr_pbl() 103 buf_attr.user_access = mr->access; in alloc_mr_pbl() [all …]
|
/openbmc/qemu/include/exec/ |
H A D | memory.h | 55 MemoryRegion *mr); 59 MemoryRegion *mr) in fuzz_dma_read_cb() argument 102 MemoryRegion *mr; member 624 const MemoryRegion *mr); 709 const MemoryRegion *mr); 782 void (*destructor)(MemoryRegion *mr); 811 #define IOMMU_NOTIFIER_FOREACH(n, mr) \ argument 812 QLIST_FOREACH((n), &(mr)->iommu_notify, node) 1157 const MemoryRegion *mr, 1177 return a->mr == b->mr && in MemoryRegionSection_eq() [all …]
|
/openbmc/linux/drivers/infiniband/core/ |
H A D | uverbs_std_types_mr.c | 95 struct ib_mr *mr; in UVERBS_HANDLER() local 127 mr = pd->device->ops.reg_dm_mr(pd, dm, &attr, attrs); in UVERBS_HANDLER() 128 if (IS_ERR(mr)) in UVERBS_HANDLER() 129 return PTR_ERR(mr); in UVERBS_HANDLER() 131 mr->device = pd->device; in UVERBS_HANDLER() 132 mr->pd = pd; in UVERBS_HANDLER() 133 mr->type = IB_MR_TYPE_DM; in UVERBS_HANDLER() 134 mr->dm = dm; in UVERBS_HANDLER() 135 mr->uobject = uobj; in UVERBS_HANDLER() 139 rdma_restrack_new(&mr->res, RDMA_RESTRACK_MR); in UVERBS_HANDLER() [all …]
|
H A D | mr_pool.c | 10 struct ib_mr *mr; in ib_mr_pool_get() local 14 mr = list_first_entry_or_null(list, struct ib_mr, qp_entry); in ib_mr_pool_get() 15 if (mr) { in ib_mr_pool_get() 16 list_del(&mr->qp_entry); in ib_mr_pool_get() 21 return mr; in ib_mr_pool_get() 25 void ib_mr_pool_put(struct ib_qp *qp, struct list_head *list, struct ib_mr *mr) in ib_mr_pool_put() argument 30 list_add(&mr->qp_entry, list); in ib_mr_pool_put() 39 struct ib_mr *mr; in ib_mr_pool_init() local 45 mr = ib_alloc_mr_integrity(qp->pd, max_num_sg, in ib_mr_pool_init() 48 mr = ib_alloc_mr(qp->pd, type, max_num_sg); in ib_mr_pool_init() [all …]
|
/openbmc/linux/arch/powerpc/platforms/pseries/ |
H A D | hvCall.S | 64 mr r4,r3; \ 65 mr r3,r0; \ 77 mr r5,BUFREG; \ 160 mr r4,r5 161 mr r5,r6 162 mr r6,r7 163 mr r7,r8 164 mr r8,r9 165 mr r9,r10 187 mr r4,r5 [all …]
|
/openbmc/linux/net/rds/ |
H A D | rdma.c | 70 struct rds_mr *mr; in rds_mr_tree_walk() local 74 mr = rb_entry(parent, struct rds_mr, r_rb_node); in rds_mr_tree_walk() 76 if (key < mr->r_key) in rds_mr_tree_walk() 78 else if (key > mr->r_key) in rds_mr_tree_walk() 81 return mr; in rds_mr_tree_walk() 95 static void rds_destroy_mr(struct rds_mr *mr) in rds_destroy_mr() argument 97 struct rds_sock *rs = mr->r_sock; in rds_destroy_mr() 102 mr->r_key, kref_read(&mr->r_kref)); in rds_destroy_mr() 105 if (!RB_EMPTY_NODE(&mr->r_rb_node)) in rds_destroy_mr() 106 rb_erase(&mr->r_rb_node, &rs->rs_rdma_keys); in rds_destroy_mr() [all …]
|
/openbmc/linux/drivers/rtc/ |
H A D | rtc-at91sam9.c | 133 u32 offset, alarm, mr; in at91_rtc_settime() local 140 mr = rtt_readl(rtc, MR); in at91_rtc_settime() 143 rtt_writel(rtc, MR, mr & ~(AT91_RTT_ALMIEN | AT91_RTT_RTTINCIEN)); in at91_rtc_settime() 164 mr &= ~AT91_RTT_ALMIEN; in at91_rtc_settime() 170 rtt_writel(rtc, MR, mr | AT91_RTT_RTTRST); in at91_rtc_settime() 205 u32 mr; in at91_rtc_setalarm() local 214 mr = rtt_readl(rtc, MR); in at91_rtc_setalarm() 215 rtt_writel(rtc, MR, mr & ~AT91_RTT_ALMIEN); in at91_rtc_setalarm() 226 rtt_writel(rtc, MR, mr | AT91_RTT_ALMIEN); in at91_rtc_setalarm() 236 u32 mr = rtt_readl(rtc, MR); in at91_rtc_alarm_irq_enable() local [all …]
|
/openbmc/linux/drivers/infiniband/hw/mana/ |
H A D | mr.c | 28 static int mana_ib_gd_create_mr(struct mana_ib_dev *dev, struct mana_ib_mr *mr, in mana_ib_gd_create_mr() argument 69 mr->ibmr.lkey = resp.lkey; in mana_ib_gd_create_mr() 70 mr->ibmr.rkey = resp.rkey; in mana_ib_gd_create_mr() 71 mr->mr_handle = resp.mr_handle; in mana_ib_gd_create_mr() 111 struct mana_ib_mr *mr; in mana_ib_reg_user_mr() local 125 mr = kzalloc(sizeof(*mr), GFP_KERNEL); in mana_ib_reg_user_mr() 126 if (!mr) in mana_ib_reg_user_mr() 129 mr->umem = ib_umem_get(ibdev, start, length, access_flags); in mana_ib_reg_user_mr() 130 if (IS_ERR(mr->umem)) { in mana_ib_reg_user_mr() 131 err = PTR_ERR(mr->umem); in mana_ib_reg_user_mr() [all …]
|
/openbmc/linux/drivers/net/ethernet/mellanox/mlxsw/ |
H A D | spectrum_mr.c | 254 struct mlxsw_sp_mr *mr = mlxsw_sp->mr; in mlxsw_sp_mr_route_write() local 264 mr_route->route_priv = kzalloc(mr->mr_ops->route_priv_size, in mlxsw_sp_mr_route_write() 274 err = mr->mr_ops->route_create(mlxsw_sp, mr->priv, in mlxsw_sp_mr_route_write() 280 err = mr->mr_ops->route_update(mlxsw_sp, mr_route->route_priv, in mlxsw_sp_mr_route_write() 292 struct mlxsw_sp_mr *mr = mlxsw_sp->mr; in mlxsw_sp_mr_route_erase() local 294 mr->mr_ops->route_destroy(mlxsw_sp, mr->priv, mr_route->route_priv); in mlxsw_sp_mr_route_erase() 485 struct mlxsw_sp_mr *mr = mlxsw_sp->mr; in mlxsw_sp_mr_route_ivif_resolve() local 495 err = mr->mr_ops->route_irif_update(mlxsw_sp, rve->mr_route->route_priv, in mlxsw_sp_mr_route_ivif_resolve() 500 err = mr->mr_ops->route_action_update(mlxsw_sp, in mlxsw_sp_mr_route_ivif_resolve() 519 struct mlxsw_sp_mr *mr = mlxsw_sp->mr; in mlxsw_sp_mr_route_ivif_unresolve() local [all …]
|
/openbmc/qemu/hw/mem/ |
H A D | memory-device.c | 27 MemoryRegion *mr; in memory_device_is_empty() local 30 mr = mdc->get_memory_region((MemoryDeviceState *)md, &local_err); in memory_device_is_empty() 36 return !mr; in memory_device_is_empty() 114 MemoryRegion *mr) in memory_device_memslot_decision_limit() argument 117 const uint64_t size = memory_region_size(mr); in memory_device_memslot_decision_limit() 174 MemoryRegion *mr, Error **errp) in memory_device_check_addable() argument 178 const uint64_t size = memory_region_size(mr); in memory_device_check_addable() 187 memslot_limit = memory_device_memslot_decision_limit(ms, mr); in memory_device_check_addable() 223 memory_region_size(&ms->device_memory->mr)); in memory_device_get_free_addr() 353 MemoryRegion *mr; in memory_device_pre_plug() local [all …]
|