Searched refs:txq_size (Results 1 – 10 of 10) sorted by relevance
95 u32 txq_size; /* TX Descriptors Queue Size */ member116 u32 txq_size; /* FCoE TX Descriptors Queue Size. */ member146 u32 txq_size; /* PDU TX Descriptors Queue Size. */ member
3414 ether_stat->txq_size = bp->tx_ring_size; in bnx2x_drv_info_ether_stat()
1966 e->tx_pending = q->txq_size[0]; in get_sge_param()1996 q->txq_size[0] = e->tx_pending; in set_sge_param()1997 q->txq_size[1] = e->tx_pending; in set_sge_param()1998 q->txq_size[2] = e->tx_pending; in set_sge_param()2167 !cxgb_in_range(t.txq_size[0], MIN_TXQ_ENTRIES, in cxgb_siocdevprivate()2169 !cxgb_in_range(t.txq_size[1], MIN_TXQ_ENTRIES, in cxgb_siocdevprivate()2171 !cxgb_in_range(t.txq_size[2], MIN_CTRL_TXQ_ENTRIES, in cxgb_siocdevprivate()2183 t.fl_size[1] >= 0 || t.txq_size[0] >= 0 || in cxgb_siocdevprivate()2184 t.txq_size[1] >= 0 || t.txq_size[2] >= 0 || in cxgb_siocdevprivate()2210 if (t.txq_size[0] >= 0) in cxgb_siocdevprivate()[all …]
88 int32_t txq_size[3]; member
3073 q->txq[i].desc = alloc_ring(adapter->pdev, p->txq_size[i], in t3_sge_alloc_qset()3081 q->txq[i].size = p->txq_size[i]; in t3_sge_alloc_qset()3376 q->txq_size[TXQ_ETH] = 1024; in t3_sge_prep()3377 q->txq_size[TXQ_OFLD] = 1024; in t3_sge_prep()3378 q->txq_size[TXQ_CTRL] = 256; in t3_sge_prep()
322 unsigned int txq_size[SGE_TXQ_PER_SET]; /* Tx queue sizes */ member
390 stats->txq_size = hba->max_sqes; in bnx2i_get_stats()
2127 int txq_size = 0; in ieee80211_if_add() local2132 txq_size += sizeof(struct txq_info) + in ieee80211_if_add()2135 ndev = alloc_netdev_mqs(size + txq_size, in ieee80211_if_add()2179 if (txq_size) { in ieee80211_if_add()
1889 u32 txq_size; in mana_create_txq() local1904 txq_size = MAX_SEND_BUFFERS_PER_QUEUE * 32; in mana_create_txq()1905 BUILD_BUG_ON(!MANA_PAGE_ALIGNED(txq_size)); in mana_create_txq()1928 spec.queue_size = txq_size; in mana_create_txq()
1802 stats_addr->txq_size = BNX2FC_SQ_WQES_MAX; in bnx2fc_ulp_get_stats()