Home
last modified time | relevance | path

Searched refs:tx_ctx (Results 1 – 17 of 17) sorted by relevance

/openbmc/linux/drivers/net/ethernet/fungible/funeth/
H A Dfuneth_ktls.c32 struct fun_ktls_tx_ctx *tx_ctx; in fun_ktls_add() local
64 tx_ctx = tls_driver_ctx(sk, direction); in fun_ktls_add()
65 tx_ctx->tlsid = rsp.tlsid; in fun_ktls_add()
66 tx_ctx->next_seq = start_offload_tcp_sn; in fun_ktls_add()
77 struct fun_ktls_tx_ctx *tx_ctx; in fun_ktls_del() local
82 tx_ctx = __tls_driver_ctx(tls_ctx, direction); in fun_ktls_del()
89 req.tlsid = tx_ctx->tlsid; in fun_ktls_del()
100 struct fun_ktls_tx_ctx *tx_ctx; in fun_ktls_resync() local
106 tx_ctx = tls_driver_ctx(sk, direction); in fun_ktls_resync()
113 req.tlsid = tx_ctx->tlsid; in fun_ktls_resync()
[all …]
/openbmc/libmctp/
H A Di2c.c118 i2c->tx_ctx); in mctp_binding_i2c_tx()
145 mctp_i2c_tx_fn tx_fn, void *tx_ctx) in mctp_i2c_setup() argument
166 i2c->tx_ctx = tx_ctx; in mctp_i2c_setup()
H A Di2c-internal.h42 void *tx_ctx; member
H A Dlibmctp-i2c.h11 mctp_i2c_tx_fn tx_fn, void *tx_ctx);
/openbmc/linux/drivers/net/ethernet/chelsio/inline_crypto/ch_ktls/
H A Dchcr_ktls.c364 struct chcr_ktls_ofld_ctx_tx *tx_ctx = in chcr_ktls_dev_del() local
366 struct chcr_ktls_info *tx_info = tx_ctx->chcr_info; in chcr_ktls_dev_del()
399 tx_ctx->chcr_info = NULL; in chcr_ktls_dev_del()
420 struct chcr_ktls_ofld_ctx_tx *tx_ctx; in chcr_ktls_dev_add() local
430 tx_ctx = chcr_get_ktls_tx_context(tls_ctx); in chcr_ktls_dev_add()
443 if (tx_ctx->chcr_info) in chcr_ktls_dev_add()
569 tx_ctx->chcr_info = tx_info; in chcr_ktls_dev_add()
650 struct chcr_ktls_ofld_ctx_tx *tx_ctx; in chcr_ktls_cpl_act_open_rpl() local
689 tx_ctx = chcr_get_ktls_tx_context(tls_ctx); in chcr_ktls_cpl_act_open_rpl()
692 ret = xa_insert_bh(&u_ctx->tid_list, tid, tx_ctx, in chcr_ktls_cpl_act_open_rpl()
[all …]
/openbmc/linux/drivers/infiniband/sw/siw/
H A Dsiw_qp.c137 qp->tx_ctx.tx_suspend = 1; in siw_qp_llp_close()
232 struct siw_iwarp_tx *c_tx = &qp->tx_ctx; in siw_qp_enable_crc()
586 if (qp->tx_ctx.mpa_crc_hd) { in siw_send_terminate()
587 crypto_shash_init(qp->tx_ctx.mpa_crc_hd); in siw_send_terminate()
588 if (crypto_shash_update(qp->tx_ctx.mpa_crc_hd, in siw_send_terminate()
594 if (crypto_shash_update(qp->tx_ctx.mpa_crc_hd, in siw_send_terminate()
599 crypto_shash_final(qp->tx_ctx.mpa_crc_hd, (u8 *)&crc); in siw_send_terminate()
663 qp->tx_ctx.ddp_msn[RDMAP_UNTAGGED_QN_SEND] = 0; in siw_qp_nextstate_from_idle()
664 qp->tx_ctx.ddp_msn[RDMAP_UNTAGGED_QN_RDMA_READ] = 0; in siw_qp_nextstate_from_idle()
665 qp->tx_ctx.ddp_msn[RDMAP_UNTAGGED_QN_TERMINATE] = 0; in siw_qp_nextstate_from_idle()
[all …]
H A Dsiw_qp_tx.c716 struct siw_iwarp_tx *c_tx = &qp->tx_ctx; in siw_prepare_fpdu()
804 struct siw_iwarp_tx *c_tx = &qp->tx_ctx; in siw_qp_sq_proc_tx()
806 int rv = 0, burst_len = qp->tx_ctx.burst; in siw_qp_sq_proc_tx()
926 qp->tx_ctx.burst = burst_len; in siw_qp_sq_proc_tx()
1046 if (unlikely(qp->tx_ctx.tx_suspend)) { in siw_qp_sq_process()
1104 qp->tx_ctx.ctrl_sent, qp->tx_ctx.ctrl_len, in siw_qp_sq_process()
1105 qp->tx_ctx.bytes_unsent); in siw_qp_sq_process()
1141 if (!qp->tx_ctx.tx_suspend) in siw_qp_sq_process()
1186 !qp->tx_ctx.tx_suspend)) { in siw_sq_resume()
1194 if (!qp->tx_ctx.tx_suspend) in siw_sq_resume()
H A Dsiw.h435 struct siw_iwarp_tx tx_ctx; /* Transmit context */ member
474 #define tx_qp(tx) container_of(tx, struct siw_qp, tx_ctx)
475 #define tx_wqe(qp) (&(qp)->tx_ctx.wqe_active)
H A Dsiw_verbs.c430 qp->tx_ctx.gso_seg_limit = 1; in siw_create_qp()
431 qp->tx_ctx.zcopy_tx = zcopy_tx; in siw_create_qp()
574 qp->tx_ctx.tx_suspend = 1; in siw_verbs_modify_qp()
623 kfree(qp->tx_ctx.mpa_crc_hd); in siw_destroy_qp()
966 qp->tx_ctx.in_syscall = 1; in siw_post_send()
968 if (siw_qp_sq_process(qp) != 0 && !(qp->tx_ctx.tx_suspend)) in siw_post_send()
971 qp->tx_ctx.in_syscall = 0; in siw_post_send()
H A Dsiw_cm.c380 qp->tx_ctx.tx_suspend = 1; in siw_qp_cm_drop()
758 qp->tx_ctx.gso_seg_limit = 0; in siw_proc_mpareply()
1306 cep->qp->tx_ctx.tx_suspend = 1; in siw_cm_llp_state_change()
1591 qp->tx_ctx.gso_seg_limit = 0; in siw_accept()
H A Dsiw_qp_rx.c1165 if (qp->tx_ctx.orq_fence) { in siw_check_tx_fence()
1186 qp->tx_ctx.orq_fence = 0; in siw_check_tx_fence()
1194 qp->tx_ctx.orq_fence = 0; in siw_check_tx_fence()
/openbmc/linux/net/wireless/
H A Dlib80211_crypt_wep.c35 struct arc4_ctx tx_ctx; member
138 arc4_setkey(&wep->tx_ctx, key, klen); in lib80211_wep_encrypt()
139 arc4_crypt(&wep->tx_ctx, pos, pos, len + 4); in lib80211_wep_encrypt()
/openbmc/linux/drivers/net/ethernet/mellanox/mlx5/core/en_accel/
H A Dktls_tx.c98 struct tls_offload_context_tx *tx_ctx; member
496 priv_tx->tx_ctx = tls_offload_ctx_tx(tls_ctx); in mlx5e_ktls_add_tx()
625 struct tls_offload_context_tx *tx_ctx = priv_tx->tx_ctx; in tx_sync_info_get() local
632 spin_lock_irqsave(&tx_ctx->lock, flags); in tx_sync_info_get()
633 record = tls_get_record(tx_ctx, tcp_seq, &info->rcd_sn); in tx_sync_info_get()
672 spin_unlock_irqrestore(&tx_ctx->lock, flags); in tx_sync_info_get()
/openbmc/linux/net/tipc/
H A Dcrypto.c738 struct tipc_crypto_tx_ctx *tx_ctx; in tipc_aead_encrypt() local
773 ctx = tipc_aead_mem_alloc(tfm, sizeof(*tx_ctx), &iv, &req, &sg, nsg); in tipc_aead_encrypt()
809 tx_ctx = (struct tipc_crypto_tx_ctx *)ctx; in tipc_aead_encrypt()
810 tx_ctx->aead = aead; in tipc_aead_encrypt()
811 tx_ctx->bearer = b; in tipc_aead_encrypt()
812 memcpy(&tx_ctx->dst, dst, sizeof(*dst)); in tipc_aead_encrypt()
836 struct tipc_crypto_tx_ctx *tx_ctx = TIPC_SKB_CB(skb)->crypto_ctx; in tipc_aead_encrypt_done() local
837 struct tipc_bearer *b = tx_ctx->bearer; in tipc_aead_encrypt_done()
838 struct tipc_aead *aead = tx_ctx->aead; in tipc_aead_encrypt_done()
847 b->media->send_msg(net, skb, b, &tx_ctx->dst); in tipc_aead_encrypt_done()
[all …]
/openbmc/linux/drivers/net/ethernet/intel/i40e/
H A Di40e_virtchnl_pf.c638 struct i40e_hmc_obj_txq tx_ctx; in i40e_config_vsi_tx_queue() local
656 memset(&tx_ctx, 0, sizeof(struct i40e_hmc_obj_txq)); in i40e_config_vsi_tx_queue()
659 tx_ctx.base = info->dma_ring_addr / 128; in i40e_config_vsi_tx_queue()
660 tx_ctx.qlen = info->ring_len; in i40e_config_vsi_tx_queue()
661 tx_ctx.rdylist = le16_to_cpu(vsi->info.qs_handle[0]); in i40e_config_vsi_tx_queue()
662 tx_ctx.rdylist_act = 0; in i40e_config_vsi_tx_queue()
663 tx_ctx.head_wb_ena = info->headwb_enabled; in i40e_config_vsi_tx_queue()
664 tx_ctx.head_wb_addr = info->dma_headwb_addr; in i40e_config_vsi_tx_queue()
677 ret = i40e_set_lan_tx_queue_context(hw, pf_queue_id, &tx_ctx); in i40e_config_vsi_tx_queue()
H A Di40e_main.c3479 struct i40e_hmc_obj_txq tx_ctx; in i40e_configure_tx_ring() local
3498 memset(&tx_ctx, 0, sizeof(tx_ctx)); in i40e_configure_tx_ring()
3500 tx_ctx.new_context = 1; in i40e_configure_tx_ring()
3501 tx_ctx.base = (ring->dma / 128); in i40e_configure_tx_ring()
3502 tx_ctx.qlen = ring->count; in i40e_configure_tx_ring()
3503 tx_ctx.fd_ena = !!(vsi->back->flags & (I40E_FLAG_FD_SB_ENABLED | in i40e_configure_tx_ring()
3505 tx_ctx.timesync_ena = !!(vsi->back->flags & I40E_FLAG_PTP); in i40e_configure_tx_ring()
3508 tx_ctx.head_wb_ena = 1; in i40e_configure_tx_ring()
3509 tx_ctx.head_wb_addr = ring->dma + in i40e_configure_tx_ring()
3524 tx_ctx.rdylist = in i40e_configure_tx_ring()
[all …]
/openbmc/linux/net/tls/
H A Dtls_sw.c2592 struct tls_sw_context_tx *tx_ctx = tls_sw_ctx_tx(ctx); in tls_sw_write_space() local
2595 if (tls_is_tx_ready(tx_ctx) && in tls_sw_write_space()
2596 !test_and_set_bit(BIT_TX_SCHEDULED, &tx_ctx->tx_bitmask)) in tls_sw_write_space()
2597 schedule_delayed_work(&tx_ctx->tx_work.work, 0); in tls_sw_write_space()