/openbmc/linux/drivers/net/ethernet/chelsio/inline_crypto/chtls/ |
H A D | chtls_hw.c | 47 unsigned int wrlen; in __set_tcb_field() local 49 wrlen = roundup(sizeof(*req) + sizeof(*sc), 16); in __set_tcb_field() 52 req = (struct cpl_set_tcb_field *)__skb_put(skb, wrlen); in __set_tcb_field() 68 unsigned int wrlen; in chtls_set_tcb_field() local 71 wrlen = roundup(sizeof(*req) + sizeof(*sc), 16); in chtls_set_tcb_field() 73 skb = alloc_skb(wrlen, GFP_ATOMIC); in chtls_set_tcb_field() 77 credits_needed = DIV_ROUND_UP(wrlen, 16); in chtls_set_tcb_field() 96 unsigned int wrlen; in chtls_set_tcb_field_rpl_skb() local 98 wrlen = sizeof(struct cpl_set_tcb_field) + sizeof(struct ulptx_idata); in chtls_set_tcb_field_rpl_skb() 99 wrlen = roundup(wrlen, 16); in chtls_set_tcb_field_rpl_skb() [all …]
|
/openbmc/u-boot/drivers/misc/ |
H A D | ds4510.c | 34 int wrlen; in ds4510_mem_write() local 38 wrlen = DS4510_EEPROM_PAGE_SIZE - in ds4510_mem_write() 40 if (count < wrlen) in ds4510_mem_write() 41 wrlen = count; in ds4510_mem_write() 42 if (i2c_write(chip, offset, 1, &buf[i], wrlen)) in ds4510_mem_write() 50 count -= wrlen; in ds4510_mem_write() 51 offset += wrlen; in ds4510_mem_write() 52 i += wrlen; in ds4510_mem_write()
|
/openbmc/linux/drivers/scsi/cxgbi/ |
H A D | libcxgbi.h | 365 static inline struct sk_buff *alloc_wr(int wrlen, int dlen, gfp_t gfp) in alloc_wr() argument 367 struct sk_buff *skb = alloc_skb(wrlen + dlen, gfp); in alloc_wr() 370 __skb_put(skb, wrlen); in alloc_wr() 371 memset(skb->head, 0, wrlen + dlen); in alloc_wr() 373 pr_info("alloc cpl wr skb %u+%u, OOM.\n", wrlen, dlen); in alloc_wr()
|
/openbmc/linux/drivers/infiniband/hw/cxgb4/ |
H A D | cm.c | 649 u32 wrlen = roundup(sizeof(struct cpl_close_con_req), 16); in send_halfclose() local 655 cxgb_mk_close_con_req(skb, wrlen, ep->hwtid, ep->txq_idx, in send_halfclose() 665 int wrlen = roundup(sizeof(*req), 16); in read_tcb() local 672 req = (struct cpl_get_tcb *) skb_put(skb, wrlen); in read_tcb() 673 memset(req, 0, wrlen); in read_tcb() 689 u32 wrlen = roundup(sizeof(struct cpl_abort_req), 16); in send_abort_req() local 696 cxgb_mk_abort_req(req_skb, wrlen, ep->hwtid, ep->txq_idx, in send_abort_req() 726 int win, sizev4, sizev6, wrlen; in send_connect() local 762 wrlen = (ep->com.remote_addr.ss_family == AF_INET) ? in send_connect() 768 skb = get_skb(NULL, wrlen, GFP_KERNEL); in send_connect() [all …]
|
/openbmc/linux/drivers/net/ethernet/chelsio/cxgb4/ |
H A D | cxgb4_uld.h | 75 #define INIT_ULPTX_WR(w, wrlen, atomic, tid) do { \ argument 78 (w)->wr.wr_mid = htonl(FW_WR_LEN16_V(DIV_ROUND_UP(wrlen, 16)) | \
|
H A D | sge.c | 2176 u32 wrlen; in ethofld_calc_tx_flits() local 2178 wrlen = sizeof(struct fw_eth_tx_eo_wr) + sizeof(struct cpl_tx_pkt_core); in ethofld_calc_tx_flits() 2181 wrlen += sizeof(struct cpl_tx_pkt_lso_core); in ethofld_calc_tx_flits() 2183 wrlen += roundup(hdr_len, 16); in ethofld_calc_tx_flits() 2186 flits = DIV_ROUND_UP(wrlen, 8); in ethofld_calc_tx_flits() 2202 u32 hdr_len, u32 wrlen) in write_eo_wr() argument 2213 wrlen16 = DIV_ROUND_UP(wrlen, 16); in write_eo_wr() 2267 u32 wrlen, wrlen16, hdr_len, data_len; in ethofld_hard_xmit() local 2304 wrlen = flits * 8; in ethofld_hard_xmit() 2305 wrlen16 = DIV_ROUND_UP(wrlen, 16); in ethofld_hard_xmit() [all …]
|
H A D | cxgb4_filter.c | 1747 unsigned int wrlen; in cxgb4_del_hash_filter() local 1772 wrlen = roundup(sizeof(*wr) + (sizeof(*req) + sizeof(*aligner)) in cxgb4_del_hash_filter() 1774 skb = alloc_skb(wrlen, GFP_KERNEL); in cxgb4_del_hash_filter() 1780 req = (struct cpl_set_tcb_field *)__skb_put(skb, wrlen); in cxgb4_del_hash_filter() 1781 INIT_ULPTX_WR(req, wrlen, 0, 0); in cxgb4_del_hash_filter()
|
/openbmc/linux/drivers/scsi/cxgbi/cxgb3i/ |
H A D | cxgb3i.c | 333 static unsigned int wrlen __read_mostly; 349 wrlen = wr_len * 8; in init_wr_tab() 412 if (wrs_needed > 1 && len + sizeof(struct tx_data_wr) <= wrlen) in push_tx_frames()
|
/openbmc/linux/drivers/crypto/chelsio/ |
H A D | chcr_algo.c | 2404 int aadmax, int wrlen, in chcr_aead_need_fallback() argument 2412 (wrlen > SGE_MAX_WR_LEN)) in chcr_aead_need_fallback()
|