Home
last modified time | relevance | path

Searched refs:ib (Results 1 – 25 of 183) sorted by relevance

12345678

/openbmc/qemu/hw/ipmi/
H A Dipmi_bt.c81 static void ipmi_bt_raise_irq(IPMIBT *ib) in ipmi_bt_raise_irq() argument
83 if (ib->use_irq && ib->irqs_enabled && ib->raise_irq) { in ipmi_bt_raise_irq()
84 ib->raise_irq(ib); in ipmi_bt_raise_irq()
88 static void ipmi_bt_lower_irq(IPMIBT *ib) in ipmi_bt_lower_irq() argument
90 if (ib->lower_irq) { in ipmi_bt_lower_irq()
91 ib->lower_irq(ib); in ipmi_bt_lower_irq()
98 IPMIBT *ib = iic->get_backend_data(ii); in ipmi_bt_handle_event() local
100 if (ib->inlen < 4) { in ipmi_bt_handle_event()
104 if (ib->inmsg[0] != (ib->inlen - 1)) { in ipmi_bt_handle_event()
106 IPMI_BT_SET_BBUSY(ib->control_reg, 1); in ipmi_bt_handle_event()
[all …]
/openbmc/linux/drivers/gpu/drm/amd/amdgpu/
H A Damdgpu_vce.c442 struct amdgpu_ib *ib; in amdgpu_vce_get_create_msg() local
463 ib = &job->ibs[0]; in amdgpu_vce_get_create_msg()
468 ib->length_dw = 0; in amdgpu_vce_get_create_msg()
469 ib->ptr[ib->length_dw++] = 0x0000000c; /* len */ in amdgpu_vce_get_create_msg()
470 ib->ptr[ib->length_dw++] = 0x00000001; /* session cmd */ in amdgpu_vce_get_create_msg()
471 ib->ptr[ib->length_dw++] = handle; in amdgpu_vce_get_create_msg()
474 ib->ptr[ib->length_dw++] = 0x00000040; /* len */ in amdgpu_vce_get_create_msg()
476 ib->ptr[ib->length_dw++] = 0x00000030; /* len */ in amdgpu_vce_get_create_msg()
477 ib->ptr[ib->length_dw++] = 0x01000001; /* create cmd */ in amdgpu_vce_get_create_msg()
478 ib->ptr[ib->length_dw++] = 0x00000000; in amdgpu_vce_get_create_msg()
[all …]
H A Dsi_dma.c64 struct amdgpu_ib *ib, in si_dma_ring_emit_ib() argument
74 amdgpu_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in si_dma_ring_emit_ib()
75 amdgpu_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in si_dma_ring_emit_ib()
251 struct amdgpu_ib ib; in si_dma_ring_test_ib() local
265 memset(&ib, 0, sizeof(ib)); in si_dma_ring_test_ib()
267 AMDGPU_IB_POOL_DIRECT, &ib); in si_dma_ring_test_ib()
271 ib.ptr[0] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, 1); in si_dma_ring_test_ib()
272 ib.ptr[1] = lower_32_bits(gpu_addr); in si_dma_ring_test_ib()
273 ib.ptr[2] = upper_32_bits(gpu_addr) & 0xff; in si_dma_ring_test_ib()
274 ib.ptr[3] = 0xDEADBEEF; in si_dma_ring_test_ib()
[all …]
H A Damdgpu_vcn.c534 struct amdgpu_ib *ib; in amdgpu_vcn_dec_send_msg() local
543 ib = &job->ibs[0]; in amdgpu_vcn_dec_send_msg()
544 ib->ptr[0] = PACKET0(adev->vcn.internal.data0, 0); in amdgpu_vcn_dec_send_msg()
545 ib->ptr[1] = addr; in amdgpu_vcn_dec_send_msg()
546 ib->ptr[2] = PACKET0(adev->vcn.internal.data1, 0); in amdgpu_vcn_dec_send_msg()
547 ib->ptr[3] = addr >> 32; in amdgpu_vcn_dec_send_msg()
548 ib->ptr[4] = PACKET0(adev->vcn.internal.cmd, 0); in amdgpu_vcn_dec_send_msg()
549 ib->ptr[5] = 0; in amdgpu_vcn_dec_send_msg()
551 ib->ptr[i] = PACKET0(adev->vcn.internal.nop, 0); in amdgpu_vcn_dec_send_msg()
552 ib->ptr[i+1] = 0; in amdgpu_vcn_dec_send_msg()
[all …]
H A Dsdma_v2_4.c248 struct amdgpu_ib *ib, in sdma_v2_4_ring_emit_ib() argument
259 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v2_4_ring_emit_ib()
260 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v2_4_ring_emit_ib()
261 amdgpu_ring_write(ring, ib->length_dw); in sdma_v2_4_ring_emit_ib()
591 struct amdgpu_ib ib; in sdma_v2_4_ring_test_ib() local
605 memset(&ib, 0, sizeof(ib)); in sdma_v2_4_ring_test_ib()
607 AMDGPU_IB_POOL_DIRECT, &ib); in sdma_v2_4_ring_test_ib()
611 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v2_4_ring_test_ib()
613 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib()
614 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v2_4_ring_test_ib()
[all …]
H A Dcik_sdma.c222 struct amdgpu_ib *ib, in cik_sdma_ring_emit_ib() argument
232 amdgpu_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_emit_ib()
233 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xffffffff); in cik_sdma_ring_emit_ib()
234 amdgpu_ring_write(ring, ib->length_dw); in cik_sdma_ring_emit_ib()
658 struct amdgpu_ib ib; in cik_sdma_ring_test_ib() local
672 memset(&ib, 0, sizeof(ib)); in cik_sdma_ring_test_ib()
674 AMDGPU_IB_POOL_DIRECT, &ib); in cik_sdma_ring_test_ib()
678 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, in cik_sdma_ring_test_ib()
680 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ring_test_ib()
681 ib.ptr[2] = upper_32_bits(gpu_addr); in cik_sdma_ring_test_ib()
[all …]
H A Dsdma_v3_0.c422 struct amdgpu_ib *ib, in sdma_v3_0_ring_emit_ib() argument
433 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v3_0_ring_emit_ib()
434 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v3_0_ring_emit_ib()
435 amdgpu_ring_write(ring, ib->length_dw); in sdma_v3_0_ring_emit_ib()
863 struct amdgpu_ib ib; in sdma_v3_0_ring_test_ib() local
877 memset(&ib, 0, sizeof(ib)); in sdma_v3_0_ring_test_ib()
879 AMDGPU_IB_POOL_DIRECT, &ib); in sdma_v3_0_ring_test_ib()
883 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v3_0_ring_test_ib()
885 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v3_0_ring_test_ib()
886 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v3_0_ring_test_ib()
[all …]
H A Dsdma_v5_2.c239 struct amdgpu_ib *ib, in sdma_v5_2_ring_emit_ib() argument
258 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v5_2_ring_emit_ib()
259 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v5_2_ring_emit_ib()
260 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_2_ring_emit_ib()
913 struct amdgpu_ib ib; in sdma_v5_2_ring_test_ib() local
922 memset(&ib, 0, sizeof(ib)); in sdma_v5_2_ring_test_ib()
927 ib.gpu_addr = amdgpu_mes_ctx_get_offs_gpu_addr(ring, offset); in sdma_v5_2_ring_test_ib()
928 ib.ptr = (void *)amdgpu_mes_ctx_get_offs_cpu_addr(ring, offset); in sdma_v5_2_ring_test_ib()
945 r = amdgpu_ib_get(adev, NULL, 256, AMDGPU_IB_POOL_DIRECT, &ib); in sdma_v5_2_ring_test_ib()
952 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v5_2_ring_test_ib()
[all …]
H A Dsdma_v6_0.c249 struct amdgpu_ib *ib, in sdma_v6_0_ring_emit_ib() argument
268 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v6_0_ring_emit_ib()
269 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v6_0_ring_emit_ib()
270 amdgpu_ring_write(ring, ib->length_dw); in sdma_v6_0_ring_emit_ib()
952 struct amdgpu_ib ib; in sdma_v6_0_ring_test_ib() local
961 memset(&ib, 0, sizeof(ib)); in sdma_v6_0_ring_test_ib()
966 ib.gpu_addr = amdgpu_mes_ctx_get_offs_gpu_addr(ring, offset); in sdma_v6_0_ring_test_ib()
967 ib.ptr = (void *)amdgpu_mes_ctx_get_offs_cpu_addr(ring, offset); in sdma_v6_0_ring_test_ib()
984 r = amdgpu_ib_get(adev, NULL, 256, AMDGPU_IB_POOL_DIRECT, &ib); in sdma_v6_0_ring_test_ib()
991 ib.ptr[0] = SDMA_PKT_COPY_LINEAR_HEADER_OP(SDMA_OP_WRITE) | in sdma_v6_0_ring_test_ib()
[all …]
H A Dsdma_v5_0.c424 struct amdgpu_ib *ib, in sdma_v5_0_ring_emit_ib() argument
443 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v5_0_ring_emit_ib()
444 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v5_0_ring_emit_ib()
445 amdgpu_ring_write(ring, ib->length_dw); in sdma_v5_0_ring_emit_ib()
1065 struct amdgpu_ib ib; in sdma_v5_0_ring_test_ib() local
1074 memset(&ib, 0, sizeof(ib)); in sdma_v5_0_ring_test_ib()
1079 ib.gpu_addr = amdgpu_mes_ctx_get_offs_gpu_addr(ring, offset); in sdma_v5_0_ring_test_ib()
1080 ib.ptr = (void *)amdgpu_mes_ctx_get_offs_cpu_addr(ring, offset); in sdma_v5_0_ring_test_ib()
1098 AMDGPU_IB_POOL_DIRECT, &ib); in sdma_v5_0_ring_test_ib()
1105 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v5_0_ring_test_ib()
[all …]
H A Dsdma_v4_4_2.c311 struct amdgpu_ib *ib, in sdma_v4_4_2_ring_emit_ib() argument
322 amdgpu_ring_write(ring, lower_32_bits(ib->gpu_addr) & 0xffffffe0); in sdma_v4_4_2_ring_emit_ib()
323 amdgpu_ring_write(ring, upper_32_bits(ib->gpu_addr)); in sdma_v4_4_2_ring_emit_ib()
324 amdgpu_ring_write(ring, ib->length_dw); in sdma_v4_4_2_ring_emit_ib()
1015 struct amdgpu_ib ib; in sdma_v4_4_2_ring_test_ib() local
1029 memset(&ib, 0, sizeof(ib)); in sdma_v4_4_2_ring_test_ib()
1031 AMDGPU_IB_POOL_DIRECT, &ib); in sdma_v4_4_2_ring_test_ib()
1035 ib.ptr[0] = SDMA_PKT_HEADER_OP(SDMA_OP_WRITE) | in sdma_v4_4_2_ring_test_ib()
1037 ib.ptr[1] = lower_32_bits(gpu_addr); in sdma_v4_4_2_ring_test_ib()
1038 ib.ptr[2] = upper_32_bits(gpu_addr); in sdma_v4_4_2_ring_test_ib()
[all …]
H A Duvd_v6_0.c214 struct amdgpu_ib *ib; in uvd_v6_0_enc_get_create_msg() local
224 ib = &job->ibs[0]; in uvd_v6_0_enc_get_create_msg()
227 ib->length_dw = 0; in uvd_v6_0_enc_get_create_msg()
228 ib->ptr[ib->length_dw++] = 0x00000018; in uvd_v6_0_enc_get_create_msg()
229 ib->ptr[ib->length_dw++] = 0x00000001; /* session info */ in uvd_v6_0_enc_get_create_msg()
230 ib->ptr[ib->length_dw++] = handle; in uvd_v6_0_enc_get_create_msg()
231 ib->ptr[ib->length_dw++] = 0x00010000; in uvd_v6_0_enc_get_create_msg()
232 ib->ptr[ib->length_dw++] = upper_32_bits(addr); in uvd_v6_0_enc_get_create_msg()
233 ib->ptr[ib->length_dw++] = addr; in uvd_v6_0_enc_get_create_msg()
235 ib->ptr[ib->length_dw++] = 0x00000014; in uvd_v6_0_enc_get_create_msg()
[all …]
/openbmc/linux/drivers/gpu/drm/radeon/
H A Dradeon_ib.c59 struct radeon_ib *ib, struct radeon_vm *vm, in radeon_ib_get() argument
64 r = radeon_sa_bo_new(&rdev->ring_tmp_bo, &ib->sa_bo, size, 256); in radeon_ib_get()
70 radeon_sync_create(&ib->sync); in radeon_ib_get()
72 ib->ring = ring; in radeon_ib_get()
73 ib->fence = NULL; in radeon_ib_get()
74 ib->ptr = radeon_sa_bo_cpu_addr(ib->sa_bo); in radeon_ib_get()
75 ib->vm = vm; in radeon_ib_get()
80 ib->gpu_addr = drm_suballoc_soffset(ib->sa_bo) + RADEON_VA_IB_OFFSET; in radeon_ib_get()
82 ib->gpu_addr = radeon_sa_bo_gpu_addr(ib->sa_bo); in radeon_ib_get()
84 ib->is_const_ib = false; in radeon_ib_get()
[all …]
H A Dsi_dma.c69 struct radeon_ib *ib, in si_dma_vm_copy_pages() argument
78 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in si_dma_vm_copy_pages()
80 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in si_dma_vm_copy_pages()
81 ib->ptr[ib->length_dw++] = lower_32_bits(src); in si_dma_vm_copy_pages()
82 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_copy_pages()
83 ib->ptr[ib->length_dw++] = upper_32_bits(src) & 0xff; in si_dma_vm_copy_pages()
105 struct radeon_ib *ib, in si_dma_vm_write_pages() argument
119 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_WRITE, 0, 0, 0, ndw); in si_dma_vm_write_pages()
120 ib->ptr[ib->length_dw++] = pe; in si_dma_vm_write_pages()
121 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in si_dma_vm_write_pages()
[all …]
H A Dni_dma.c122 struct radeon_ib *ib) in cayman_dma_ring_ib_execute() argument
124 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cayman_dma_ring_ib_execute()
125 unsigned vm_id = ib->vm ? ib->vm->ids[ib->ring].id : 0; in cayman_dma_ring_ib_execute()
144 radeon_ring_write(ring, (ib->gpu_addr & 0xFFFFFFE0)); in cayman_dma_ring_ib_execute()
145 radeon_ring_write(ring, (ib->length_dw << 12) | (upper_32_bits(ib->gpu_addr) & 0xFF)); in cayman_dma_ring_ib_execute()
315 struct radeon_ib *ib, in cayman_dma_vm_copy_pages() argument
326 ib->ptr[ib->length_dw++] = DMA_PACKET(DMA_PACKET_COPY, in cayman_dma_vm_copy_pages()
328 ib->ptr[ib->length_dw++] = lower_32_bits(pe); in cayman_dma_vm_copy_pages()
329 ib->ptr[ib->length_dw++] = lower_32_bits(src); in cayman_dma_vm_copy_pages()
330 ib->ptr[ib->length_dw++] = upper_32_bits(pe) & 0xff; in cayman_dma_vm_copy_pages()
[all …]
H A Dradeon_vce.c349 struct radeon_ib ib; in radeon_vce_get_create_msg() local
353 r = radeon_ib_get(rdev, ring, &ib, NULL, ib_size_dw * 4); in radeon_vce_get_create_msg()
359 dummy = ib.gpu_addr + 1024; in radeon_vce_get_create_msg()
362 ib.length_dw = 0; in radeon_vce_get_create_msg()
363 ib.ptr[ib.length_dw++] = cpu_to_le32(0x0000000c); /* len */ in radeon_vce_get_create_msg()
364 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000001); /* session cmd */ in radeon_vce_get_create_msg()
365 ib.ptr[ib.length_dw++] = cpu_to_le32(handle); in radeon_vce_get_create_msg()
367 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000030); /* len */ in radeon_vce_get_create_msg()
368 ib.ptr[ib.length_dw++] = cpu_to_le32(0x01000001); /* create cmd */ in radeon_vce_get_create_msg()
369 ib.ptr[ib.length_dw++] = cpu_to_le32(0x00000000); in radeon_vce_get_create_msg()
[all …]
H A Dcik_sdma.c133 struct radeon_ib *ib) in cik_sdma_ring_ib_execute() argument
135 struct radeon_ring *ring = &rdev->ring[ib->ring]; in cik_sdma_ring_ib_execute()
136 u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; in cik_sdma_ring_ib_execute()
154 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute()
155 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr)); in cik_sdma_ring_ib_execute()
156 radeon_ring_write(ring, ib->length_dw); in cik_sdma_ring_ib_execute()
703 struct radeon_ib ib; in cik_sdma_ib_test() local
720 r = radeon_ib_get(rdev, ring->idx, &ib, NULL, 256); in cik_sdma_ib_test()
726 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_ib_test()
727 ib.ptr[1] = lower_32_bits(gpu_addr); in cik_sdma_ib_test()
[all …]
H A Devergreen_cs.c450 uint32_t *ib = p->ib.ptr; in evergreen_cs_track_validate_cb() local
472 ib[track->cb_color_slice_idx[id]] = slice; in evergreen_cs_track_validate_cb()
1097 u32 tmp, *ib; in evergreen_cs_handle_reg() local
1100 ib = p->ib.ptr; in evergreen_cs_handle_reg()
1148 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_handle_reg()
1177 ib[idx] &= ~Z_ARRAY_MODE(0xf); in evergreen_cs_handle_reg()
1179 ib[idx] |= Z_ARRAY_MODE(evergreen_cs_get_aray_mode(reloc->tiling_flags)); in evergreen_cs_handle_reg()
1187 ib[idx] |= DB_NUM_BANKS(evergreen_cs_get_num_banks(track->nbanks)); in evergreen_cs_handle_reg()
1188 ib[idx] |= DB_TILE_SPLIT(tile_split) | in evergreen_cs_handle_reg()
1220 ib[idx] += (u32)((reloc->gpu_offset >> 8) & 0xffffffff); in evergreen_cs_handle_reg()
[all …]
/openbmc/linux/arch/s390/include/asm/
H A Didals.h132 struct idal_buffer *ib; in idal_buffer_alloc() local
137 ib = kmalloc(struct_size(ib, data, nr_ptrs), GFP_DMA | GFP_KERNEL); in idal_buffer_alloc()
138 if (ib == NULL) in idal_buffer_alloc()
140 ib->size = size; in idal_buffer_alloc()
141 ib->page_order = page_order; in idal_buffer_alloc()
144 ib->data[i] = ib->data[i-1] + IDA_BLOCK_SIZE; in idal_buffer_alloc()
147 ib->data[i] = (void *) in idal_buffer_alloc()
149 if (ib->data[i] != NULL) in idal_buffer_alloc()
154 free_pages((unsigned long) ib->data[i], in idal_buffer_alloc()
155 ib->page_order); in idal_buffer_alloc()
[all …]
/openbmc/qemu/ui/
H A Dinput-barrier.c203 static gboolean readcmd(InputBarrier *ib, struct barrierMsg *msg) in readcmd() argument
209 ret = qio_channel_read(QIO_CHANNEL(ib->sioc), (char *)&len, sizeof(len), in readcmd()
220 ret = qio_channel_read(QIO_CHANNEL(ib->sioc), ib->buffer, len, NULL); in readcmd()
225 p = ib->buffer; in readcmd()
234 if (memcmp(ib->buffer, cmd_names[cmd], 4) == 0) { in readcmd()
341 static gboolean writecmd(InputBarrier *ib, struct barrierMsg *msg) in writecmd() argument
347 p = ib->buffer; in writecmd()
359 ib->ioc_tag = 0; in writecmd()
365 write_string(p, ib->name, avail); in writecmd()
368 ib->ioc_tag = 0; in writecmd()
[all …]
/openbmc/linux/drivers/net/ethernet/amd/
H A D7990.c100 t, ib->brx_ring[t].rmd1_hadr, ib->brx_ring[t].rmd0, \
101 ib->brx_ring[t].length, \
102 ib->brx_ring[t].mblength, ib->brx_ring[t].rmd1_bits); \
106 t, ib->btx_ring[t].tmd1_hadr, ib->btx_ring[t].tmd0, \
107 ib->btx_ring[t].length, \
108 ib->btx_ring[t].misc, ib->btx_ring[t].tmd1_bits); \
140 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local
150 ib->mode = LE_MO_PROM; /* normal, enable Tx & Rx */ in lance_init_ring()
163 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring()
164 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring()
[all …]
H A Dsunlance.c319 struct lance_init_block *ib = lp->init_block_mem; in lance_init_ring_dvma() local
332 ib->phys_addr [0] = dev->dev_addr [1]; in lance_init_ring_dvma()
333 ib->phys_addr [1] = dev->dev_addr [0]; in lance_init_ring_dvma()
334 ib->phys_addr [2] = dev->dev_addr [3]; in lance_init_ring_dvma()
335 ib->phys_addr [3] = dev->dev_addr [2]; in lance_init_ring_dvma()
336 ib->phys_addr [4] = dev->dev_addr [5]; in lance_init_ring_dvma()
337 ib->phys_addr [5] = dev->dev_addr [4]; in lance_init_ring_dvma()
342 ib->btx_ring [i].tmd0 = leptr; in lance_init_ring_dvma()
343 ib->btx_ring [i].tmd1_hadr = leptr >> 16; in lance_init_ring_dvma()
344 ib->btx_ring [i].tmd1_bits = 0; in lance_init_ring_dvma()
[all …]
H A Da2065.c149 volatile struct lance_init_block *ib = lp->init_block; in lance_init_ring() local
160 ib->mode = 0; in lance_init_ring()
165 ib->phys_addr[0] = dev->dev_addr[1]; in lance_init_ring()
166 ib->phys_addr[1] = dev->dev_addr[0]; in lance_init_ring()
167 ib->phys_addr[2] = dev->dev_addr[3]; in lance_init_ring()
168 ib->phys_addr[3] = dev->dev_addr[2]; in lance_init_ring()
169 ib->phys_addr[4] = dev->dev_addr[5]; in lance_init_ring()
170 ib->phys_addr[5] = dev->dev_addr[4]; in lance_init_ring()
176 ib->btx_ring[i].tmd0 = leptr; in lance_init_ring()
177 ib->btx_ring[i].tmd1_hadr = leptr >> 16; in lance_init_ring()
[all …]
/openbmc/linux/include/rdma/
H A Dib_sa.h184 struct sa_path_rec_ib ib; member
218 static inline void path_conv_opa_to_ib(struct sa_path_rec *ib, in path_conv_opa_to_ib() argument
226 ib->dgid.global.interface_id in path_conv_opa_to_ib()
228 ib->dgid.global.subnet_prefix in path_conv_opa_to_ib()
230 ib->sgid.global.interface_id in path_conv_opa_to_ib()
232 ib->dgid.global.subnet_prefix in path_conv_opa_to_ib()
234 ib->ib.dlid = 0; in path_conv_opa_to_ib()
236 ib->ib.slid = 0; in path_conv_opa_to_ib()
238 ib->ib.dlid = htons(ntohl(opa->opa.dlid)); in path_conv_opa_to_ib()
239 ib->ib.slid = htons(ntohl(opa->opa.slid)); in path_conv_opa_to_ib()
[all …]
/openbmc/linux/drivers/infiniband/hw/mlx4/
H A Dah.c48 ah->av.ib.port_pd = cpu_to_be32(to_mpd(ib_ah->pd)->pdn | in create_ib_ah()
50 ah->av.ib.g_slid = rdma_ah_get_path_bits(ah_attr); in create_ib_ah()
51 ah->av.ib.sl_tclass_flowlabel = in create_ib_ah()
56 ah->av.ib.g_slid |= 0x80; in create_ib_ah()
57 ah->av.ib.gid_index = grh->sgid_index; in create_ib_ah()
58 ah->av.ib.hop_limit = grh->hop_limit; in create_ib_ah()
59 ah->av.ib.sl_tclass_flowlabel |= in create_ib_ah()
62 memcpy(ah->av.ib.dgid, grh->dgid.raw, 16); in create_ib_ah()
65 ah->av.ib.dlid = cpu_to_be16(rdma_ah_get_dlid(ah_attr)); in create_ib_ah()
73 ah->av.ib.stat_rate = static_rate; in create_ib_ah()
[all …]

12345678