Lines Matching +full:num +full:- +full:txq
1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 /* Copyright(c) 2018-2019 Realtek Corporation
18 hdr = (struct ieee80211_hdr *)skb->data; in rtw_tx_stats()
20 if (!ieee80211_is_data(hdr->frame_control)) in rtw_tx_stats()
23 if (!is_broadcast_ether_addr(hdr->addr1) && in rtw_tx_stats()
24 !is_multicast_ether_addr(hdr->addr1)) { in rtw_tx_stats()
25 rtwdev->stats.tx_unicast += skb->len; in rtw_tx_stats()
26 rtwdev->stats.tx_cnt++; in rtw_tx_stats()
28 rtwvif = (struct rtw_vif *)vif->drv_priv; in rtw_tx_stats()
29 rtwvif->stats.tx_unicast += skb->len; in rtw_tx_stats()
30 rtwvif->stats.tx_cnt++; in rtw_tx_stats()
37 struct rtw_tx_desc *tx_desc = (struct rtw_tx_desc *)skb->data; in rtw_tx_fill_tx_desc()
40 if (pkt_info->qsel == TX_DESC_QSEL_HIGH) in rtw_tx_fill_tx_desc()
43 tx_desc->w0 = le32_encode_bits(pkt_info->tx_pkt_size, RTW_TX_DESC_W0_TXPKTSIZE) | in rtw_tx_fill_tx_desc()
44 le32_encode_bits(pkt_info->offset, RTW_TX_DESC_W0_OFFSET) | in rtw_tx_fill_tx_desc()
45 le32_encode_bits(pkt_info->bmc, RTW_TX_DESC_W0_BMC) | in rtw_tx_fill_tx_desc()
46 le32_encode_bits(pkt_info->ls, RTW_TX_DESC_W0_LS) | in rtw_tx_fill_tx_desc()
47 le32_encode_bits(pkt_info->dis_qselseq, RTW_TX_DESC_W0_DISQSELSEQ); in rtw_tx_fill_tx_desc()
49 tx_desc->w1 = le32_encode_bits(pkt_info->qsel, RTW_TX_DESC_W1_QSEL) | in rtw_tx_fill_tx_desc()
50 le32_encode_bits(pkt_info->rate_id, RTW_TX_DESC_W1_RATE_ID) | in rtw_tx_fill_tx_desc()
51 le32_encode_bits(pkt_info->sec_type, RTW_TX_DESC_W1_SEC_TYPE) | in rtw_tx_fill_tx_desc()
52 le32_encode_bits(pkt_info->pkt_offset, RTW_TX_DESC_W1_PKT_OFFSET) | in rtw_tx_fill_tx_desc()
55 tx_desc->w2 = le32_encode_bits(pkt_info->ampdu_en, RTW_TX_DESC_W2_AGG_EN) | in rtw_tx_fill_tx_desc()
56 le32_encode_bits(pkt_info->report, RTW_TX_DESC_W2_SPE_RPT) | in rtw_tx_fill_tx_desc()
57 le32_encode_bits(pkt_info->ampdu_density, RTW_TX_DESC_W2_AMPDU_DEN) | in rtw_tx_fill_tx_desc()
58 le32_encode_bits(pkt_info->bt_null, RTW_TX_DESC_W2_BT_NULL); in rtw_tx_fill_tx_desc()
60 tx_desc->w3 = le32_encode_bits(pkt_info->hw_ssn_sel, RTW_TX_DESC_W3_HW_SSN_SEL) | in rtw_tx_fill_tx_desc()
61 le32_encode_bits(pkt_info->use_rate, RTW_TX_DESC_W3_USE_RATE) | in rtw_tx_fill_tx_desc()
62 le32_encode_bits(pkt_info->dis_rate_fallback, RTW_TX_DESC_W3_DISDATAFB) | in rtw_tx_fill_tx_desc()
63 le32_encode_bits(pkt_info->rts, RTW_TX_DESC_W3_USE_RTS) | in rtw_tx_fill_tx_desc()
64 le32_encode_bits(pkt_info->nav_use_hdr, RTW_TX_DESC_W3_NAVUSEHDR) | in rtw_tx_fill_tx_desc()
65 le32_encode_bits(pkt_info->ampdu_factor, RTW_TX_DESC_W3_MAX_AGG_NUM); in rtw_tx_fill_tx_desc()
67 tx_desc->w4 = le32_encode_bits(pkt_info->rate, RTW_TX_DESC_W4_DATARATE); in rtw_tx_fill_tx_desc()
69 tx_desc->w5 = le32_encode_bits(pkt_info->short_gi, RTW_TX_DESC_W5_DATA_SHORT) | in rtw_tx_fill_tx_desc()
70 le32_encode_bits(pkt_info->bw, RTW_TX_DESC_W5_DATA_BW) | in rtw_tx_fill_tx_desc()
71 le32_encode_bits(pkt_info->ldpc, RTW_TX_DESC_W5_DATA_LDPC) | in rtw_tx_fill_tx_desc()
72 le32_encode_bits(pkt_info->stbc, RTW_TX_DESC_W5_DATA_STBC); in rtw_tx_fill_tx_desc()
74 tx_desc->w6 = le32_encode_bits(pkt_info->sn, RTW_TX_DESC_W6_SW_DEFINE); in rtw_tx_fill_tx_desc()
76 tx_desc->w8 = le32_encode_bits(pkt_info->en_hwseq, RTW_TX_DESC_W8_EN_HWSEQ); in rtw_tx_fill_tx_desc()
78 tx_desc->w9 = le32_encode_bits(pkt_info->seq, RTW_TX_DESC_W9_SW_SEQ); in rtw_tx_fill_tx_desc()
80 if (pkt_info->rts) { in rtw_tx_fill_tx_desc()
81 tx_desc->w4 |= le32_encode_bits(DESC_RATE24M, RTW_TX_DESC_W4_RTSRATE); in rtw_tx_fill_tx_desc()
82 tx_desc->w5 |= le32_encode_bits(1, RTW_TX_DESC_W5_DATA_RTS_SHORT); in rtw_tx_fill_tx_desc()
85 if (pkt_info->tim_offset) in rtw_tx_fill_tx_desc()
86 tx_desc->w9 |= le32_encode_bits(1, RTW_TX_DESC_W9_TIM_EN) | in rtw_tx_fill_tx_desc()
87 le32_encode_bits(pkt_info->tim_offset, RTW_TX_DESC_W9_TIM_OFFSET); in rtw_tx_fill_tx_desc()
93 u8 exp = sta->deflink.ht_cap.ampdu_factor; in get_tx_ampdu_factor()
96 * max aggregation num, which represents val * 2 packets can be in get_tx_ampdu_factor()
99 return (BIT(2) << exp) - 1; in get_tx_ampdu_factor()
104 return sta->deflink.ht_cap.ampdu_density; in get_tx_ampdu_density()
112 if (rtwdev->hal.rf_type == RF_2T2R && sta->deflink.ht_cap.mcs.rx_mask[1] != 0) in get_highest_ht_tx_rate()
123 struct rtw_efuse *efuse = &rtwdev->efuse; in get_highest_vht_tx_rate()
127 tx_mcs_map = le16_to_cpu(sta->deflink.vht_cap.vht_mcs.tx_mcs_map); in get_highest_vht_tx_rate()
128 if (efuse->hw_cap.nss == 1) { in get_highest_vht_tx_rate()
141 } else if (efuse->hw_cap.nss >= 2) { in get_highest_vht_tx_rate()
164 struct rtw_tx_report *tx_report = &rtwdev->tx_report; in rtw_tx_report_enable()
170 pkt_info->sn = (atomic_inc_return(&tx_report->sn) << 2) & 0xfc; in rtw_tx_report_enable()
171 pkt_info->report = true; in rtw_tx_report_enable()
177 struct rtw_tx_report *tx_report = &rtwdev->tx_report; in rtw_tx_report_purge_timer()
180 if (skb_queue_len(&tx_report->queue) == 0) in rtw_tx_report_purge_timer()
185 spin_lock_irqsave(&tx_report->q_lock, flags); in rtw_tx_report_purge_timer()
186 skb_queue_purge(&tx_report->queue); in rtw_tx_report_purge_timer()
187 spin_unlock_irqrestore(&tx_report->q_lock, flags); in rtw_tx_report_purge_timer()
192 struct rtw_tx_report *tx_report = &rtwdev->tx_report; in rtw_tx_report_enqueue()
197 drv_data = (u8 *)IEEE80211_SKB_CB(skb)->status.status_driver_data; in rtw_tx_report_enqueue()
200 spin_lock_irqsave(&tx_report->q_lock, flags); in rtw_tx_report_enqueue()
201 __skb_queue_tail(&tx_report->queue, skb); in rtw_tx_report_enqueue()
202 spin_unlock_irqrestore(&tx_report->q_lock, flags); in rtw_tx_report_enqueue()
204 mod_timer(&tx_report->purge_timer, jiffies + RTW_TX_PROBE_TIMEOUT); in rtw_tx_report_enqueue()
216 info->flags |= IEEE80211_TX_STAT_ACK; in rtw_tx_report_tx_status()
218 info->flags &= ~IEEE80211_TX_STAT_ACK; in rtw_tx_report_tx_status()
220 ieee80211_tx_status_irqsafe(rtwdev->hw, skb); in rtw_tx_report_tx_status()
225 struct rtw_tx_report *tx_report = &rtwdev->tx_report; in rtw_tx_report_handle()
235 sn = GET_CCX_REPORT_SEQNUM_V0(c2h->payload); in rtw_tx_report_handle()
236 st = GET_CCX_REPORT_STATUS_V0(c2h->payload); in rtw_tx_report_handle()
238 sn = GET_CCX_REPORT_SEQNUM_V1(c2h->payload); in rtw_tx_report_handle()
239 st = GET_CCX_REPORT_STATUS_V1(c2h->payload); in rtw_tx_report_handle()
242 spin_lock_irqsave(&tx_report->q_lock, flags); in rtw_tx_report_handle()
243 skb_queue_walk_safe(&tx_report->queue, cur, tmp) { in rtw_tx_report_handle()
244 n = (u8 *)IEEE80211_SKB_CB(cur)->status.status_driver_data; in rtw_tx_report_handle()
246 __skb_unlink(cur, &tx_report->queue); in rtw_tx_report_handle()
251 spin_unlock_irqrestore(&tx_report->q_lock, flags); in rtw_tx_report_handle()
258 struct ieee80211_vif *vif = tx_info->control.vif; in rtw_get_mgmt_rate()
259 bool force_lowest = test_bit(RTW_FLAG_FORCE_LOWEST_RATE, rtwdev->flags); in rtw_get_mgmt_rate()
261 if (!vif || !vif->bss_conf.basic_rates || ignore_rate || force_lowest) in rtw_get_mgmt_rate()
264 return __ffs(vif->bss_conf.basic_rates) + lowest_rate; in rtw_get_mgmt_rate()
272 if (rtwdev->hal.current_band_type == RTW_BAND_2G) { in rtw_tx_pkt_info_update_rate()
273 pkt_info->rate_id = RTW_RATEID_B_20M; in rtw_tx_pkt_info_update_rate()
274 pkt_info->rate = rtw_get_mgmt_rate(rtwdev, skb, DESC_RATE1M, in rtw_tx_pkt_info_update_rate()
277 pkt_info->rate_id = RTW_RATEID_G; in rtw_tx_pkt_info_update_rate()
278 pkt_info->rate = rtw_get_mgmt_rate(rtwdev, skb, DESC_RATE6M, in rtw_tx_pkt_info_update_rate()
282 pkt_info->use_rate = true; in rtw_tx_pkt_info_update_rate()
283 pkt_info->dis_rate_fallback = true; in rtw_tx_pkt_info_update_rate()
293 if (info && info->control.hw_key) { in rtw_tx_pkt_info_update_sec()
294 struct ieee80211_key_conf *key = info->control.hw_key; in rtw_tx_pkt_info_update_sec()
296 switch (key->cipher) { in rtw_tx_pkt_info_update_sec()
310 pkt_info->sec_type = sec_type; in rtw_tx_pkt_info_update_sec()
319 pkt_info->dis_qselseq = true; in rtw_tx_mgmt_pkt_info_update()
320 pkt_info->en_hwseq = true; in rtw_tx_mgmt_pkt_info_update()
321 pkt_info->hw_ssn_sel = 0; in rtw_tx_mgmt_pkt_info_update()
330 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in rtw_tx_data_pkt_info_update()
332 struct ieee80211_hw *hw = rtwdev->hw; in rtw_tx_data_pkt_info_update()
333 struct rtw_dm_info *dm_info = &rtwdev->dm_info; in rtw_tx_data_pkt_info_update()
346 seq = (le16_to_cpu(hdr->seq_ctrl) & IEEE80211_SCTL_SEQ) >> 4; in rtw_tx_data_pkt_info_update()
352 if (info->flags & IEEE80211_TX_CTL_AMPDU) { in rtw_tx_data_pkt_info_update()
358 if (info->control.use_rts || skb->len > hw->wiphy->rts_threshold) in rtw_tx_data_pkt_info_update()
359 pkt_info->rts = true; in rtw_tx_data_pkt_info_update()
361 if (sta->deflink.vht_cap.vht_supported) in rtw_tx_data_pkt_info_update()
363 else if (sta->deflink.ht_cap.ht_supported) in rtw_tx_data_pkt_info_update()
365 else if (sta->deflink.supp_rates[0] <= 0xf) in rtw_tx_data_pkt_info_update()
370 si = (struct rtw_sta_info *)sta->drv_priv; in rtw_tx_data_pkt_info_update()
372 bw = si->bw_mode; in rtw_tx_data_pkt_info_update()
373 rate_id = si->rate_id; in rtw_tx_data_pkt_info_update()
374 stbc = rtwdev->hal.txrx_1ss ? false : si->stbc_en; in rtw_tx_data_pkt_info_update()
375 ldpc = si->ldpc_en; in rtw_tx_data_pkt_info_update()
378 pkt_info->seq = seq; in rtw_tx_data_pkt_info_update()
379 pkt_info->ampdu_factor = ampdu_factor; in rtw_tx_data_pkt_info_update()
380 pkt_info->ampdu_density = ampdu_density; in rtw_tx_data_pkt_info_update()
381 pkt_info->ampdu_en = ampdu_en; in rtw_tx_data_pkt_info_update()
382 pkt_info->rate = rate; in rtw_tx_data_pkt_info_update()
383 pkt_info->rate_id = rate_id; in rtw_tx_data_pkt_info_update()
384 pkt_info->bw = bw; in rtw_tx_data_pkt_info_update()
385 pkt_info->stbc = stbc; in rtw_tx_data_pkt_info_update()
386 pkt_info->ldpc = ldpc; in rtw_tx_data_pkt_info_update()
388 fix_rate = dm_info->fix_rate; in rtw_tx_data_pkt_info_update()
390 pkt_info->rate = fix_rate; in rtw_tx_data_pkt_info_update()
391 pkt_info->dis_rate_fallback = true; in rtw_tx_data_pkt_info_update()
392 pkt_info->use_rate = true; in rtw_tx_data_pkt_info_update()
401 const struct rtw_chip_info *chip = rtwdev->chip; in rtw_tx_pkt_info_update()
403 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in rtw_tx_pkt_info_update()
406 __le16 fc = hdr->frame_control; in rtw_tx_pkt_info_update()
410 si = (struct rtw_sta_info *)sta->drv_priv; in rtw_tx_pkt_info_update()
411 vif = si->vif; in rtw_tx_pkt_info_update()
419 bmc = is_broadcast_ether_addr(hdr->addr1) || in rtw_tx_pkt_info_update()
420 is_multicast_ether_addr(hdr->addr1); in rtw_tx_pkt_info_update()
422 if (info->flags & IEEE80211_TX_CTL_REQ_TX_STATUS) in rtw_tx_pkt_info_update()
425 pkt_info->bmc = bmc; in rtw_tx_pkt_info_update()
427 pkt_info->tx_pkt_size = skb->len; in rtw_tx_pkt_info_update()
428 pkt_info->offset = chip->tx_pkt_desc_sz; in rtw_tx_pkt_info_update()
429 pkt_info->qsel = skb->priority; in rtw_tx_pkt_info_update()
430 pkt_info->ls = true; in rtw_tx_pkt_info_update()
441 const struct rtw_chip_info *chip = rtwdev->chip; in rtw_tx_rsvd_page_pkt_info_update()
442 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in rtw_tx_rsvd_page_pkt_info_update()
449 pkt_info->qsel = TX_DESC_QSEL_MGMT; in rtw_tx_rsvd_page_pkt_info_update()
453 bmc = is_broadcast_ether_addr(hdr->addr1) || in rtw_tx_rsvd_page_pkt_info_update()
454 is_multicast_ether_addr(hdr->addr1); in rtw_tx_rsvd_page_pkt_info_update()
455 pkt_info->bmc = bmc; in rtw_tx_rsvd_page_pkt_info_update()
456 pkt_info->tx_pkt_size = skb->len; in rtw_tx_rsvd_page_pkt_info_update()
457 pkt_info->offset = chip->tx_pkt_desc_sz; in rtw_tx_rsvd_page_pkt_info_update()
458 pkt_info->ls = true; in rtw_tx_rsvd_page_pkt_info_update()
460 pkt_info->nav_use_hdr = true; in rtw_tx_rsvd_page_pkt_info_update()
462 pkt_info->dis_qselseq = true; in rtw_tx_rsvd_page_pkt_info_update()
463 pkt_info->en_hwseq = true; in rtw_tx_rsvd_page_pkt_info_update()
464 pkt_info->hw_ssn_sel = 0; in rtw_tx_rsvd_page_pkt_info_update()
467 pkt_info->bt_null = true; in rtw_tx_rsvd_page_pkt_info_update()
473 rsvd_pkt = list_first_entry_or_null(&rtwdev->rsvd_page_list, in rtw_tx_rsvd_page_pkt_info_update()
476 if (rsvd_pkt && rsvd_pkt->tim_offset != 0) { in rtw_tx_rsvd_page_pkt_info_update()
478 pkt_info->tim_offset = rsvd_pkt->tim_offset - hdr_len; in rtw_tx_rsvd_page_pkt_info_update()
492 const struct rtw_chip_info *chip = rtwdev->chip; in rtw_tx_write_data_rsvd_page_get()
497 tx_pkt_desc_sz = chip->tx_pkt_desc_sz; in rtw_tx_write_data_rsvd_page_get()
518 const struct rtw_chip_info *chip = rtwdev->chip; in rtw_tx_write_data_h2c_get()
523 tx_pkt_desc_sz = chip->tx_pkt_desc_sz; in rtw_tx_write_data_h2c_get()
533 pkt_info->tx_pkt_size = size; in rtw_tx_write_data_h2c_get()
546 rtw_tx_pkt_info_update(rtwdev, &pkt_info, control->sta, skb); in rtw_tx()
558 ieee80211_free_txskb(rtwdev->hw, skb); in rtw_tx()
565 struct ieee80211_txq *txq = rtwtxq_to_txq(rtwtxq); in rtw_txq_check_agg() local
569 if (test_bit(RTW_TXQ_AMPDU, &rtwtxq->flags)) { in rtw_txq_check_agg()
571 info->flags |= IEEE80211_TX_CTL_AMPDU; in rtw_txq_check_agg()
578 if (test_bit(RTW_TXQ_BLOCK_BA, &rtwtxq->flags)) in rtw_txq_check_agg()
581 if (unlikely(skb->protocol == cpu_to_be16(ETH_P_PAE))) in rtw_txq_check_agg()
584 if (!txq->sta) in rtw_txq_check_agg()
587 si = (struct rtw_sta_info *)txq->sta->drv_priv; in rtw_txq_check_agg()
588 set_bit(txq->tid, si->tid_ba); in rtw_txq_check_agg()
590 ieee80211_queue_work(rtwdev->hw, &rtwdev->ba_work); in rtw_txq_check_agg()
597 struct ieee80211_txq *txq = rtwtxq_to_txq(rtwtxq); in rtw_txq_push_skb() local
603 rtw_tx_pkt_info_update(rtwdev, &pkt_info, txq->sta, skb); in rtw_txq_push_skb()
615 struct ieee80211_txq *txq = rtwtxq_to_txq(rtwtxq); in rtw_txq_dequeue() local
618 skb = ieee80211_tx_dequeue(rtwdev->hw, txq); in rtw_txq_dequeue()
654 spin_lock_bh(&rtwdev->txq_lock); in __rtw_tx_work()
656 list_for_each_entry_safe(rtwtxq, tmp, &rtwdev->txqs, list) { in __rtw_tx_work()
657 struct ieee80211_txq *txq = rtwtxq_to_txq(rtwtxq); in __rtw_tx_work() local
661 ieee80211_txq_get_depth(txq, &frame_cnt, &byte_cnt); in __rtw_tx_work()
664 list_del_init(&rtwtxq->list); in __rtw_tx_work()
669 spin_unlock_bh(&rtwdev->txq_lock); in __rtw_tx_work()
679 void rtw_txq_init(struct rtw_dev *rtwdev, struct ieee80211_txq *txq) in rtw_txq_init() argument
683 if (!txq) in rtw_txq_init()
686 rtwtxq = (struct rtw_txq *)txq->drv_priv; in rtw_txq_init()
687 INIT_LIST_HEAD(&rtwtxq->list); in rtw_txq_init()
690 void rtw_txq_cleanup(struct rtw_dev *rtwdev, struct ieee80211_txq *txq) in rtw_txq_cleanup() argument
694 if (!txq) in rtw_txq_cleanup()
697 rtwtxq = (struct rtw_txq *)txq->drv_priv; in rtw_txq_cleanup()
698 spin_lock_bh(&rtwdev->txq_lock); in rtw_txq_cleanup()
699 if (!list_empty(&rtwtxq->list)) in rtw_txq_cleanup()
700 list_del_init(&rtwtxq->list); in rtw_txq_cleanup()
701 spin_unlock_bh(&rtwdev->txq_lock); in rtw_txq_cleanup()
724 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in rtw_tx_queue_mapping()
725 __le16 fc = hdr->frame_control; in rtw_tx_queue_mapping()
733 else if (is_broadcast_ether_addr(hdr->addr1) || in rtw_tx_queue_mapping()
734 is_multicast_ether_addr(hdr->addr1)) in rtw_tx_queue_mapping()