Lines Matching +full:tx +full:- +full:ts +full:- +full:mask
2 * Copyright (c) 2008-2011 Atheros Communications Inc.
17 #include <linux/dma-mapping.h>
33 #define TIME_SYMBOLS_HALFGI(t) (((t) * 5 - 4) / 18)
35 #define NUM_SYMBOLS_PER_USEC_HALFGI(_usec) (((_usec*5)-4)/18)
49 { 104, 216 }, /* 3: 16-QAM 1/2 */
50 { 156, 324 }, /* 4: 16-QAM 3/4 */
51 { 208, 432 }, /* 5: 64-QAM 2/3 */
52 { 234, 486 }, /* 6: 64-QAM 3/4 */
53 { 260, 540 }, /* 7: 64-QAM 5/6 */
64 struct ath_tx_status *ts, int txok);
68 struct ath_tx_status *ts, int nframes, int nbad,
93 struct ieee80211_sta *sta = info->status.status_driver_data[0]; in ath_tx_status()
95 if (info->flags & (IEEE80211_TX_CTL_REQ_TX_STATUS | in ath_tx_status()
108 __releases(&txq->axq_lock) in ath_txq_unlock_complete()
110 struct ieee80211_hw *hw = sc->hw; in ath_txq_unlock_complete()
115 skb_queue_splice_init(&txq->complete_q, &q); in ath_txq_unlock_complete()
116 spin_unlock_bh(&txq->axq_lock); in ath_txq_unlock_complete()
127 ieee80211_schedule_txq(sc->hw, queue); in ath_tx_queue_tid()
132 struct ath_softc *sc = hw->priv; in ath9k_wake_tx_queue()
133 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath9k_wake_tx_queue()
134 struct ath_atx_tid *tid = (struct ath_atx_tid *) queue->drv_priv; in ath9k_wake_tx_queue()
135 struct ath_txq *txq = tid->txq; in ath9k_wake_tx_queue()
137 ath_dbg(common, QUEUE, "Waking TX queue: %pM (%d)\n", in ath9k_wake_tx_queue()
138 queue->sta ? queue->sta->addr : queue->vif->addr, in ath9k_wake_tx_queue()
139 tid->tidno); in ath9k_wake_tx_queue()
150 sizeof(tx_info->status.status_driver_data)); in get_frame_info()
151 return (struct ath_frame_info *) &tx_info->status.status_driver_data[0]; in get_frame_info()
156 if (!tid->an->sta) in ath_send_bar()
159 ieee80211_send_bar(tid->an->vif, tid->an->sta->addr, tid->tidno, in ath_send_bar()
172 ratetbl = rcu_dereference(sta->rates); in ath_merge_ratetbl()
176 if (tx_info->control.rates[0].idx < 0 || in ath_merge_ratetbl()
177 tx_info->control.rates[0].count == 0) in ath_merge_ratetbl()
181 bf->rates[0] = tx_info->control.rates[0]; in ath_merge_ratetbl()
186 bf->rates[i].idx = ratetbl->rate[i].idx; in ath_merge_ratetbl()
187 bf->rates[i].flags = ratetbl->rate[i].flags; in ath_merge_ratetbl()
188 if (tx_info->control.use_rts) in ath_merge_ratetbl()
189 bf->rates[i].count = ratetbl->rate[i].count_rts; in ath_merge_ratetbl()
190 else if (tx_info->control.use_cts_prot) in ath_merge_ratetbl()
191 bf->rates[i].count = ratetbl->rate[i].count_cts; in ath_merge_ratetbl()
193 bf->rates[i].count = ratetbl->rate[i].count; in ath_merge_ratetbl()
204 tx_info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_set_rates()
207 ieee80211_get_tx_rates(vif, sta, bf->bf_mpdu, bf->rates, in ath_set_rates()
208 ARRAY_SIZE(bf->rates)); in ath_set_rates()
215 int q = fi->txq; in ath_txq_skb_done()
220 txq = sc->tx.txq_map[q]; in ath_txq_skb_done()
221 if (WARN_ON(--txq->pending_frames < 0)) in ath_txq_skb_done()
222 txq->pending_frames = 0; in ath_txq_skb_done()
229 u8 tidno = skb->priority & IEEE80211_QOS_CTL_TID_MASK; in ath_get_skb_tid()
237 struct ath_softc *sc = tid->an->sc; in ath_tid_pull()
238 struct ieee80211_hw *hw = sc->hw; in ath_tid_pull()
240 .txq = tid->txq, in ath_tid_pull()
241 .sta = tid->an->sta, in ath_tid_pull()
249 return -ENOENT; in ath_tid_pull()
258 if (tid->txq == sc->tx.txq_map[q]) { in ath_tid_pull()
260 fi->txq = q; in ath_tid_pull()
261 ++tid->txq->pending_frames; in ath_tid_pull()
272 *skb = __skb_dequeue(&tid->retry_q); in ath_tid_dequeue()
281 struct ath_txq *txq = tid->txq; in ath_tx_flush_tid()
285 struct ath_tx_status ts; in ath_tx_flush_tid() local
291 memset(&ts, 0, sizeof(ts)); in ath_tx_flush_tid()
293 while ((skb = __skb_dequeue(&tid->retry_q))) { in ath_tx_flush_tid()
295 bf = fi->bf; in ath_tx_flush_tid()
298 ieee80211_free_txskb(sc->hw, skb); in ath_tx_flush_tid()
302 if (fi->baw_tracked) { in ath_tx_flush_tid()
307 list_add_tail(&bf->list, &bf_head); in ath_tx_flush_tid()
308 ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, &ts, 0); in ath_tx_flush_tid()
313 ath_send_bar(tid, tid->seq_start); in ath_tx_flush_tid()
321 struct ath_frame_info *fi = get_frame_info(bf->bf_mpdu); in ath_tx_update_baw()
322 u16 seqno = bf->bf_state.seqno; in ath_tx_update_baw()
325 if (!fi->baw_tracked) in ath_tx_update_baw()
328 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_update_baw()
329 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1); in ath_tx_update_baw()
331 __clear_bit(cindex, tid->tx_buf); in ath_tx_update_baw()
333 while (tid->baw_head != tid->baw_tail && !test_bit(tid->baw_head, tid->tx_buf)) { in ath_tx_update_baw()
334 INCR(tid->seq_start, IEEE80211_SEQ_MAX); in ath_tx_update_baw()
335 INCR(tid->baw_head, ATH_TID_MAX_BUFS); in ath_tx_update_baw()
336 if (tid->bar_index >= 0) in ath_tx_update_baw()
337 tid->bar_index--; in ath_tx_update_baw()
344 struct ath_frame_info *fi = get_frame_info(bf->bf_mpdu); in ath_tx_addto_baw()
345 u16 seqno = bf->bf_state.seqno; in ath_tx_addto_baw()
348 if (fi->baw_tracked) in ath_tx_addto_baw()
351 index = ATH_BA_INDEX(tid->seq_start, seqno); in ath_tx_addto_baw()
352 cindex = (tid->baw_head + index) & (ATH_TID_MAX_BUFS - 1); in ath_tx_addto_baw()
353 __set_bit(cindex, tid->tx_buf); in ath_tx_addto_baw()
354 fi->baw_tracked = 1; in ath_tx_addto_baw()
356 if (index >= ((tid->baw_tail - tid->baw_head) & in ath_tx_addto_baw()
357 (ATH_TID_MAX_BUFS - 1))) { in ath_tx_addto_baw()
358 tid->baw_tail = cindex; in ath_tx_addto_baw()
359 INCR(tid->baw_tail, ATH_TID_MAX_BUFS); in ath_tx_addto_baw()
370 struct ath_tx_status ts; in ath_tid_drain() local
374 memset(&ts, 0, sizeof(ts)); in ath_tid_drain()
379 bf = fi->bf; in ath_tid_drain()
386 list_add_tail(&bf->list, &bf_head); in ath_tid_drain()
387 ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, &ts, 0); in ath_tid_drain()
395 struct ath_buf *bf = fi->bf; in ath_tx_set_retry()
397 int prev = fi->retries; in ath_tx_set_retry()
399 TX_STAT_INC(sc, txq->axq_qnum, a_retries); in ath_tx_set_retry()
400 fi->retries += count; in ath_tx_set_retry()
405 hdr = (struct ieee80211_hdr *)skb->data; in ath_tx_set_retry()
406 hdr->frame_control |= cpu_to_le16(IEEE80211_FCTL_RETRY); in ath_tx_set_retry()
407 dma_sync_single_for_device(sc->dev, bf->bf_buf_addr, in ath_tx_set_retry()
415 spin_lock_bh(&sc->tx.txbuflock); in ath_tx_get_buffer()
417 if (unlikely(list_empty(&sc->tx.txbuf))) { in ath_tx_get_buffer()
418 spin_unlock_bh(&sc->tx.txbuflock); in ath_tx_get_buffer()
422 bf = list_first_entry(&sc->tx.txbuf, struct ath_buf, list); in ath_tx_get_buffer()
423 list_del(&bf->list); in ath_tx_get_buffer()
425 spin_unlock_bh(&sc->tx.txbuflock); in ath_tx_get_buffer()
432 spin_lock_bh(&sc->tx.txbuflock); in ath_tx_return_buffer()
433 list_add_tail(&bf->list, &sc->tx.txbuf); in ath_tx_return_buffer()
434 spin_unlock_bh(&sc->tx.txbuflock); in ath_tx_return_buffer()
447 tbf->bf_mpdu = bf->bf_mpdu; in ath_clone_txbuf()
448 tbf->bf_buf_addr = bf->bf_buf_addr; in ath_clone_txbuf()
449 memcpy(tbf->bf_desc, bf->bf_desc, sc->sc_ah->caps.tx_desc_len); in ath_clone_txbuf()
450 tbf->bf_state = bf->bf_state; in ath_clone_txbuf()
451 tbf->bf_state.stale = false; in ath_clone_txbuf()
457 struct ath_tx_status *ts, int txok, in ath_tx_count_frames() argument
472 seq_st = ts->ts_seqnum; in ath_tx_count_frames()
473 memcpy(ba, &ts->ba, WME_BA_BMP_SIZE >> 3); in ath_tx_count_frames()
477 ba_index = ATH_BA_INDEX(seq_st, bf->bf_state.seqno); in ath_tx_count_frames()
483 bf = bf->bf_next; in ath_tx_count_frames()
492 struct ath_tx_status *ts, int txok) in ath_tx_complete_aggr() argument
497 struct ath_buf *bf_next, *bf_last = bf->bf_lastbf; in ath_tx_complete_aggr()
507 bool flush = !!(ts->ts_status & ATH9K_TX_FLUSH); in ath_tx_complete_aggr()
509 int bar_index = -1; in ath_tx_complete_aggr()
511 skb = bf->bf_mpdu; in ath_tx_complete_aggr()
514 memcpy(rates, bf->rates, sizeof(rates)); in ath_tx_complete_aggr()
516 retries = ts->ts_longretry + 1; in ath_tx_complete_aggr()
517 for (i = 0; i < ts->ts_rateindex; i++) in ath_tx_complete_aggr()
523 bf_next = bf->bf_next; in ath_tx_complete_aggr()
525 if (!bf->bf_state.stale || bf_next != NULL) in ath_tx_complete_aggr()
526 list_move_tail(&bf->list, &bf_head); in ath_tx_complete_aggr()
528 ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, ts, 0); in ath_tx_complete_aggr()
535 an = (struct ath_node *)sta->drv_priv; in ath_tx_complete_aggr()
536 seq_first = tid->seq_start; in ath_tx_complete_aggr()
537 isba = ts->ts_flags & ATH9K_TX_BA; in ath_tx_complete_aggr()
540 * The hardware occasionally sends a tx status for the wrong TID. in ath_tx_complete_aggr()
547 if (isba && tid->tidno != ts->tid) in ath_tx_complete_aggr()
554 if (ts->ts_flags & ATH9K_TX_BA) { in ath_tx_complete_aggr()
555 seq_st = ts->ts_seqnum; in ath_tx_complete_aggr()
556 memcpy(ba, &ts->ba, WME_BA_BMP_SIZE >> 3); in ath_tx_complete_aggr()
565 if (sc->sc_ah->opmode == NL80211_IFTYPE_STATION) in ath_tx_complete_aggr()
572 ath_tx_count_frames(sc, bf, ts, txok, &nframes, &nbad); in ath_tx_complete_aggr()
574 u16 seqno = bf->bf_state.seqno; in ath_tx_complete_aggr()
577 bf_next = bf->bf_next; in ath_tx_complete_aggr()
579 skb = bf->bf_mpdu; in ath_tx_complete_aggr()
583 if (!BAW_WITHIN(tid->seq_start, tid->baw_size, seqno) || in ath_tx_complete_aggr()
584 !tid->active) { in ath_tx_complete_aggr()
599 } else if (fi->retries < ATH_MAX_SW_RETRIES) { in ath_tx_complete_aggr()
600 if (txok || !an->sleeping) in ath_tx_complete_aggr()
601 ath_tx_set_retry(sc, txq, bf->bf_mpdu, in ath_tx_complete_aggr()
617 if (bf_next != NULL || !bf_last->bf_state.stale) in ath_tx_complete_aggr()
618 list_move_tail(&bf->list, &bf_head); in ath_tx_complete_aggr()
622 * complete the acked-ones/xretried ones; update in ath_tx_complete_aggr()
623 * block-ack window in ath_tx_complete_aggr()
628 memcpy(tx_info->control.rates, rates, sizeof(rates)); in ath_tx_complete_aggr()
629 ath_tx_rc_status(sc, bf, ts, nframes, nbad, txok); in ath_tx_complete_aggr()
631 if (bf == bf->bf_lastbf) in ath_tx_complete_aggr()
632 ath_dynack_sample_tx_ts(sc->sc_ah, in ath_tx_complete_aggr()
633 bf->bf_mpdu, in ath_tx_complete_aggr()
634 ts, sta); in ath_tx_complete_aggr()
637 ath_tx_complete_buf(sc, bf, txq, &bf_head, sta, ts, in ath_tx_complete_aggr()
640 if (tx_info->flags & IEEE80211_TX_STATUS_EOSP) { in ath_tx_complete_aggr()
641 tx_info->flags &= ~IEEE80211_TX_STATUS_EOSP; in ath_tx_complete_aggr()
644 /* retry the un-acked ones */ in ath_tx_complete_aggr()
645 if (bf->bf_next == NULL && bf_last->bf_state.stale) { in ath_tx_complete_aggr()
650 * Update tx baw and complete the in ath_tx_complete_aggr()
652 * run out of tx buf. in ath_tx_complete_aggr()
658 &bf_head, NULL, ts, in ath_tx_complete_aggr()
665 fi->bf = tbf; in ath_tx_complete_aggr()
678 /* prepend un-acked frames to the beginning of the pending frame queue */ in ath_tx_complete_aggr()
680 if (an->sleeping) in ath_tx_complete_aggr()
681 ieee80211_sta_set_buffered(sta, tid->tidno, true); in ath_tx_complete_aggr()
683 skb_queue_splice_tail(&bf_pending, &tid->retry_q); in ath_tx_complete_aggr()
684 if (!an->sleeping) { in ath_tx_complete_aggr()
686 if (ts->ts_status & (ATH9K_TXERR_FILT | ATH9K_TXERR_XRETRY)) in ath_tx_complete_aggr()
687 tid->clear_ps_filter = true; in ath_tx_complete_aggr()
694 if (BAW_WITHIN(tid->seq_start, tid->baw_size, bar_seq)) in ath_tx_complete_aggr()
695 tid->bar_index = ATH_BA_INDEX(tid->seq_start, bar_seq); in ath_tx_complete_aggr()
708 struct ieee80211_tx_info *info = IEEE80211_SKB_CB(bf->bf_mpdu); in bf_is_ampdu_not_probing()
709 return bf_isampdu(bf) && !(info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE); in bf_is_ampdu_not_probing()
715 struct ath_tx_status *ts, in ath_tx_count_airtime() argument
721 airtime += ts->duration * (ts->ts_longretry + 1); in ath_tx_count_airtime()
722 for(i = 0; i < ts->ts_rateindex; i++) { in ath_tx_count_airtime()
723 int rate_dur = ath9k_hw_get_duration(sc->sc_ah, bf->bf_desc, i); in ath_tx_count_airtime()
724 airtime += rate_dur * bf->rates[i].count; in ath_tx_count_airtime()
731 struct ath_tx_status *ts, struct ath_buf *bf, in ath_tx_process_buffer() argument
734 struct ieee80211_hw *hw = sc->hw; in ath_tx_process_buffer()
741 txok = !(ts->ts_status & ATH9K_TXERR_MASK); in ath_tx_process_buffer()
742 flush = !!(ts->ts_status & ATH9K_TX_FLUSH); in ath_tx_process_buffer()
743 txq->axq_tx_inprogress = false; in ath_tx_process_buffer()
745 txq->axq_depth--; in ath_tx_process_buffer()
747 txq->axq_ampdu_depth--; in ath_tx_process_buffer()
749 ts->duration = ath9k_hw_get_duration(sc->sc_ah, bf->bf_desc, in ath_tx_process_buffer()
750 ts->ts_rateindex); in ath_tx_process_buffer()
752 hdr = (struct ieee80211_hdr *) bf->bf_mpdu->data; in ath_tx_process_buffer()
753 sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr1, hdr->addr2); in ath_tx_process_buffer()
755 struct ath_node *an = (struct ath_node *)sta->drv_priv; in ath_tx_process_buffer()
756 tid = ath_get_skb_tid(sc, an, bf->bf_mpdu); in ath_tx_process_buffer()
757 ath_tx_count_airtime(sc, sta, bf, ts, tid->tidno); in ath_tx_process_buffer()
758 if (ts->ts_status & (ATH9K_TXERR_FILT | ATH9K_TXERR_XRETRY)) in ath_tx_process_buffer()
759 tid->clear_ps_filter = true; in ath_tx_process_buffer()
764 info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_tx_process_buffer()
765 memcpy(info->control.rates, bf->rates, in ath_tx_process_buffer()
766 sizeof(info->control.rates)); in ath_tx_process_buffer()
767 ath_tx_rc_status(sc, bf, ts, 1, txok ? 0 : 1, txok); in ath_tx_process_buffer()
768 ath_dynack_sample_tx_ts(sc->sc_ah, bf->bf_mpdu, ts, in ath_tx_process_buffer()
771 ath_tx_complete_buf(sc, bf, txq, bf_head, sta, ts, txok); in ath_tx_process_buffer()
773 ath_tx_complete_aggr(sc, txq, bf, bf_head, sta, tid, ts, txok); in ath_tx_process_buffer()
786 skb = bf->bf_mpdu; in ath_lookup_legacy()
788 rates = tx_info->control.rates; in ath_lookup_legacy()
809 int q = tid->txq->mac80211_qnum; in ath_lookup_rate()
812 skb = bf->bf_mpdu; in ath_lookup_rate()
814 rates = bf->rates; in ath_lookup_rate()
841 frmlen = sc->tx.max_aggr_framelen[q][modeidx][rates[i].idx]; in ath_lookup_rate()
850 if (tx_info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE || legacy) in ath_lookup_rate()
862 if (tid->an->maxampdu) in ath_lookup_rate()
863 aggr_limit = min(aggr_limit, tid->an->maxampdu); in ath_lookup_rate()
881 struct ath_frame_info *fi = get_frame_info(bf->bf_mpdu); in ath_compute_num_delims()
889 * TODO - this could be improved to be dependent on the rate. in ath_compute_num_delims()
892 if ((fi->keyix != ATH9K_TXKEYIX_INVALID) && in ath_compute_num_delims()
893 !(sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA)) in ath_compute_num_delims()
900 if (first_subfrm && !AR_SREV_9580_10_OR_LATER(sc->sc_ah) && in ath_compute_num_delims()
901 (sc->sc_ah->ent_mode & AR_ENT_OTP_MIN_PKT_SIZE_DISABLE)) in ath_compute_num_delims()
914 if (tid->an->mpdudensity == 0) in ath_compute_num_delims()
917 rix = bf->rates[0].idx; in ath_compute_num_delims()
918 flags = bf->rates[0].flags; in ath_compute_num_delims()
923 nsymbols = NUM_SYMBOLS_PER_USEC_HALFGI(tid->an->mpdudensity); in ath_compute_num_delims()
925 nsymbols = NUM_SYMBOLS_PER_USEC(tid->an->mpdudensity); in ath_compute_num_delims()
935 mindelim = (minlen - frmlen) / ATH_AGGR_DELIM_SZ; in ath_compute_num_delims()
959 bf = fi->bf; in ath_tx_get_tid_subframe()
960 if (!fi->bf) in ath_tx_get_tid_subframe()
963 bf->bf_state.stale = false; in ath_tx_get_tid_subframe()
967 ieee80211_free_txskb(sc->hw, skb); in ath_tx_get_tid_subframe()
971 bf->bf_next = NULL; in ath_tx_get_tid_subframe()
972 bf->bf_lastbf = bf; in ath_tx_get_tid_subframe()
975 tx_info->flags &= ~(IEEE80211_TX_CTL_CLEAR_PS_FILT | in ath_tx_get_tid_subframe()
983 if (!tid->active) in ath_tx_get_tid_subframe()
984 tx_info->flags &= ~IEEE80211_TX_CTL_AMPDU; in ath_tx_get_tid_subframe()
986 if (!(tx_info->flags & IEEE80211_TX_CTL_AMPDU)) { in ath_tx_get_tid_subframe()
987 bf->bf_state.bf_type = 0; in ath_tx_get_tid_subframe()
991 bf->bf_state.bf_type = BUF_AMPDU | BUF_AGGR; in ath_tx_get_tid_subframe()
992 seqno = bf->bf_state.seqno; in ath_tx_get_tid_subframe()
994 /* do not step over block-ack window */ in ath_tx_get_tid_subframe()
995 if (!BAW_WITHIN(tid->seq_start, tid->baw_size, seqno)) { in ath_tx_get_tid_subframe()
996 __skb_queue_tail(&tid->retry_q, skb); in ath_tx_get_tid_subframe()
1001 if (!skb_queue_is_first(&tid->retry_q, skb) && in ath_tx_get_tid_subframe()
1007 return -EINPROGRESS; in ath_tx_get_tid_subframe()
1010 if (tid->bar_index > ATH_BA_INDEX(tid->seq_start, seqno)) { in ath_tx_get_tid_subframe()
1011 struct ath_tx_status ts = {}; in ath_tx_get_tid_subframe() local
1015 list_add(&bf->list, &bf_head); in ath_tx_get_tid_subframe()
1017 ath_tx_complete_buf(sc, bf, txq, &bf_head, NULL, &ts, 0); in ath_tx_get_tid_subframe()
1036 #define PADBYTES(_len) ((4 - ((_len) % 4)) % 4) in ath_tx_form_aggr()
1040 al_delta, h_baw = tid->baw_size / 2; in ath_tx_form_aggr()
1051 skb = bf->bf_mpdu; in ath_tx_form_aggr()
1055 al_delta = ATH_AGGR_DELIM_SZ + fi->framelen; in ath_tx_form_aggr()
1061 tx_info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_tx_form_aggr()
1062 if ((tx_info->flags & IEEE80211_TX_CTL_RATE_CTRL_PROBE) || in ath_tx_form_aggr()
1063 !(tx_info->flags & IEEE80211_TX_CTL_AMPDU)) in ath_tx_form_aggr()
1074 ndelim = ath_compute_num_delims(sc, tid, bf_first, fi->framelen, in ath_tx_form_aggr()
1079 bf->bf_next = NULL; in ath_tx_form_aggr()
1082 bf->bf_state.ndelim = ndelim; in ath_tx_form_aggr()
1084 list_add_tail(&bf->list, bf_q); in ath_tx_form_aggr()
1086 bf_prev->bf_next = bf; in ath_tx_form_aggr()
1096 __skb_queue_tail(&tid->retry_q, bf->bf_mpdu); in ath_tx_form_aggr()
1099 bf->bf_lastbf = bf_prev; in ath_tx_form_aggr()
1102 al = get_frame_info(bf->bf_mpdu)->framelen; in ath_tx_form_aggr()
1103 bf->bf_state.bf_type = BUF_AMPDU; in ath_tx_form_aggr()
1105 TX_STAT_INC(sc, txq->axq_qnum, a_aggr); in ath_tx_form_aggr()
1113 * rix - rate index
1114 * pktlen - total bytes (delims + data + fcs + pads + pad delims)
1115 * width - 0 for 20 MHz, 1 for 40 MHz
1116 * half_gi - to use 4us v/s 3.6 us for symbol time
1128 nsymbols = (nbits + nsymbits - 1) / nsymbits; in ath_pkt_duration()
1147 usec -= L_STF + L_LTF + L_SIG + HT_SIG + HT_STF + HT_LTF(streams); in ath_max_framelen()
1150 bits -= OFDM_PLCP_BITS; in ath_max_framelen()
1167 cur_ht20 = sc->tx.max_aggr_framelen[queue][MCS_HT20]; in ath_update_max_aggr_framelen()
1168 cur_ht20_sgi = sc->tx.max_aggr_framelen[queue][MCS_HT20_SGI]; in ath_update_max_aggr_framelen()
1169 cur_ht40 = sc->tx.max_aggr_framelen[queue][MCS_HT40]; in ath_update_max_aggr_framelen()
1170 cur_ht40_sgi = sc->tx.max_aggr_framelen[queue][MCS_HT40_SGI]; in ath_update_max_aggr_framelen()
1186 struct ath_hw *ah = sc->sc_ah; in ath_get_rate_txpower()
1189 if (sc->tx99_state || !ah->tpc_enabled) in ath_get_rate_txpower()
1192 skb = bf->bf_mpdu; in ath_get_rate_txpower()
1196 is_2ghz = info->band == NL80211_BAND_2GHZ; in ath_get_rate_txpower()
1197 is_5ghz = info->band == NL80211_BAND_5GHZ; in ath_get_rate_txpower()
1198 use_stbc = is_mcs && rateidx < 8 && (info->flags & in ath_get_rate_txpower()
1210 int txpower = fi->tx_power; in ath_get_rate_txpower()
1214 struct ar5416_eeprom_def *eep = &ah->eeprom.def; in ath_get_rate_txpower()
1215 u16 eeprom_rev = ah->eep_ops->get_eeprom_rev(ah); in ath_get_rate_txpower()
1220 pmodal = &eep->modalHeader[is_2ghz]; in ath_get_rate_txpower()
1221 power_ht40delta = pmodal->ht40PowerIncForPdadc; in ath_get_rate_txpower()
1230 txpower -= 2 * AR9287_PWR_TABLE_OFFSET_DB; in ath_get_rate_txpower()
1234 power_offset = ah->eep_ops->get_eeprom(ah, in ath_get_rate_txpower()
1236 txpower -= 2 * power_offset; in ath_get_rate_txpower()
1240 txpower -= 2; in ath_get_rate_txpower()
1243 max_power = min_t(u8, ah->tx_power[rateidx], txpower); in ath_get_rate_txpower()
1245 /* XXX: clamp minimum TX power at 1 for AR9160 since if in ath_get_rate_txpower()
1247 * TX power in ath_get_rate_txpower()
1251 } else if (!bf->bf_state.bfs_paprd) { in ath_get_rate_txpower()
1253 max_power = min_t(u8, ah->tx_power_stbc[rateidx], in ath_get_rate_txpower()
1254 fi->tx_power); in ath_get_rate_txpower()
1256 max_power = min_t(u8, ah->tx_power[rateidx], in ath_get_rate_txpower()
1257 fi->tx_power); in ath_get_rate_txpower()
1259 max_power = ah->paprd_training_power; in ath_get_rate_txpower()
1268 struct ath_hw *ah = sc->sc_ah; in ath_buf_set_rate()
1275 struct ath_frame_info *fi = get_frame_info(bf->bf_mpdu); in ath_buf_set_rate()
1276 u32 rts_thresh = sc->hw->wiphy->rts_threshold; in ath_buf_set_rate()
1280 skb = bf->bf_mpdu; in ath_buf_set_rate()
1282 rates = bf->rates; in ath_buf_set_rate()
1283 hdr = (struct ieee80211_hdr *)skb->data; in ath_buf_set_rate()
1285 /* set dur_update_en for l-sig computation except for PS-Poll frames */ in ath_buf_set_rate()
1286 info->dur_update = !ieee80211_is_pspoll(hdr->frame_control); in ath_buf_set_rate()
1287 info->rtscts_rate = fi->rtscts_rate; in ath_buf_set_rate()
1289 for (i = 0; i < ARRAY_SIZE(bf->rates); i++) { in ath_buf_set_rate()
1297 info->rates[i].Tries = rates[i].count; in ath_buf_set_rate()
1304 unlikely(rts_thresh != (u32) -1)) { in ath_buf_set_rate()
1310 info->rates[i].RateFlags |= ATH9K_RATESERIES_RTS_CTS; in ath_buf_set_rate()
1311 info->flags |= ATH9K_TXDESC_RTSENA; in ath_buf_set_rate()
1313 info->rates[i].RateFlags |= ATH9K_RATESERIES_RTS_CTS; in ath_buf_set_rate()
1314 info->flags |= ATH9K_TXDESC_CTSENA; in ath_buf_set_rate()
1318 info->rates[i].RateFlags |= ATH9K_RATESERIES_2040; in ath_buf_set_rate()
1320 info->rates[i].RateFlags |= ATH9K_RATESERIES_HALFGI; in ath_buf_set_rate()
1328 info->rates[i].Rate = rix | 0x80; in ath_buf_set_rate()
1329 info->rates[i].ChSel = ath_txchainmask_reduction(sc, in ath_buf_set_rate()
1330 ah->txchainmask, info->rates[i].Rate); in ath_buf_set_rate()
1331 info->rates[i].PktDuration = ath_pkt_duration(sc, rix, len, in ath_buf_set_rate()
1333 if (rix < 8 && (tx_info->flags & IEEE80211_TX_CTL_STBC)) in ath_buf_set_rate()
1334 info->rates[i].RateFlags |= ATH9K_RATESERIES_STBC; in ath_buf_set_rate()
1335 if (rix >= 8 && fi->dyn_smps) { in ath_buf_set_rate()
1336 info->rates[i].RateFlags |= in ath_buf_set_rate()
1338 info->flags |= ATH9K_TXDESC_CTSENA; in ath_buf_set_rate()
1341 info->txpower[i] = ath_get_rate_txpower(sc, bf, rix, in ath_buf_set_rate()
1347 rate = &common->sbands[tx_info->band].bitrates[rates[i].idx]; in ath_buf_set_rate()
1348 if ((tx_info->band == NL80211_BAND_2GHZ) && in ath_buf_set_rate()
1349 !(rate->flags & IEEE80211_RATE_ERP_G)) in ath_buf_set_rate()
1354 info->rates[i].Rate = rate->hw_value; in ath_buf_set_rate()
1355 if (rate->hw_value_short) { in ath_buf_set_rate()
1357 info->rates[i].Rate |= rate->hw_value_short; in ath_buf_set_rate()
1362 if (bf->bf_state.bfs_paprd) in ath_buf_set_rate()
1363 info->rates[i].ChSel = ah->txchainmask; in ath_buf_set_rate()
1365 info->rates[i].ChSel = ath_txchainmask_reduction(sc, in ath_buf_set_rate()
1366 ah->txchainmask, info->rates[i].Rate); in ath_buf_set_rate()
1368 info->rates[i].PktDuration = ath9k_hw_computetxtime(sc->sc_ah, in ath_buf_set_rate()
1369 phy, rate->bitrate * 100, len, rix, is_sp); in ath_buf_set_rate()
1371 is_cck = IS_CCK_RATE(info->rates[i].Rate); in ath_buf_set_rate()
1372 info->txpower[i] = ath_get_rate_txpower(sc, bf, rix, false, in ath_buf_set_rate()
1376 /* For AR5416 - RTS cannot be followed by a frame larger than 8K */ in ath_buf_set_rate()
1377 if (bf_isaggr(bf) && (len > sc->sc_ah->caps.rts_aggr_limit)) in ath_buf_set_rate()
1378 info->flags &= ~ATH9K_TXDESC_RTSENA; in ath_buf_set_rate()
1381 if (info->flags & ATH9K_TXDESC_RTSENA) in ath_buf_set_rate()
1382 info->flags &= ~ATH9K_TXDESC_CTSENA; in ath_buf_set_rate()
1391 hdr = (struct ieee80211_hdr *)skb->data; in get_hw_packet_type()
1392 fc = hdr->frame_control; in get_hw_packet_type()
1411 struct ath_hw *ah = sc->sc_ah; in ath_tx_fill_desc()
1414 u32 rts_thresh = sc->hw->wiphy->rts_threshold; in ath_tx_fill_desc()
1420 info.qcu = txq->axq_qnum; in ath_tx_fill_desc()
1423 struct sk_buff *skb = bf->bf_mpdu; in ath_tx_fill_desc()
1426 bool aggr = !!(bf->bf_state.bf_type & BUF_AGGR); in ath_tx_fill_desc()
1429 if (bf->bf_next) in ath_tx_fill_desc()
1430 info.link = bf->bf_next->bf_daddr; in ath_tx_fill_desc()
1432 info.link = (sc->tx99_state) ? bf->bf_daddr : 0; in ath_tx_fill_desc()
1437 if (!sc->tx99_state) in ath_tx_fill_desc()
1439 if ((tx_info->flags & IEEE80211_TX_CTL_CLEAR_PS_FILT) || in ath_tx_fill_desc()
1440 txq == sc->tx.uapsdq) in ath_tx_fill_desc()
1443 if (tx_info->flags & IEEE80211_TX_CTL_NO_ACK) in ath_tx_fill_desc()
1445 if (tx_info->flags & IEEE80211_TX_CTL_LDPC) in ath_tx_fill_desc()
1448 if (bf->bf_state.bfs_paprd) in ath_tx_fill_desc()
1449 info.flags |= (u32) bf->bf_state.bfs_paprd << in ath_tx_fill_desc()
1460 unlikely(rts_thresh != (u32) -1)) { in ath_tx_fill_desc()
1469 len = fi->framelen; in ath_tx_fill_desc()
1474 info.buf_addr[0] = bf->bf_buf_addr; in ath_tx_fill_desc()
1475 info.buf_len[0] = skb->len; in ath_tx_fill_desc()
1476 info.pkt_len = fi->framelen; in ath_tx_fill_desc()
1477 info.keyix = fi->keyix; in ath_tx_fill_desc()
1478 info.keytype = fi->keytype; in ath_tx_fill_desc()
1483 else if (bf == bf_first->bf_lastbf) in ath_tx_fill_desc()
1488 info.ndelim = bf->bf_state.ndelim; in ath_tx_fill_desc()
1492 if (bf == bf_first->bf_lastbf) in ath_tx_fill_desc()
1495 ath9k_hw_set_txdesc(ah, bf->bf_desc, &info); in ath_tx_fill_desc()
1496 bf = bf->bf_next; in ath_tx_fill_desc()
1512 list_add_tail(&bf->list, bf_q); in ath_tx_form_burst()
1514 bf_prev->bf_next = bf; in ath_tx_form_burst()
1524 tx_info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_tx_form_burst()
1525 if (tx_info->flags & IEEE80211_TX_CTL_AMPDU) { in ath_tx_form_burst()
1526 __skb_queue_tail(&tid->retry_q, bf->bf_mpdu); in ath_tx_form_burst()
1530 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath_tx_form_burst()
1549 tx_info = IEEE80211_SKB_CB(bf->bf_mpdu); in ath_tx_sched_aggr()
1550 aggr = !!(tx_info->flags & IEEE80211_TX_CTL_AMPDU); in ath_tx_sched_aggr()
1551 if ((aggr && txq->axq_ampdu_depth >= ATH_AGGR_MIN_QDEPTH) || in ath_tx_sched_aggr()
1552 (!aggr && txq->axq_depth >= ATH_NON_AGGR_MIN_QDEPTH)) { in ath_tx_sched_aggr()
1553 __skb_queue_tail(&tid->retry_q, bf->bf_mpdu); in ath_tx_sched_aggr()
1554 return -EBUSY; in ath_tx_sched_aggr()
1557 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath_tx_sched_aggr()
1564 return -EAGAIN; in ath_tx_sched_aggr()
1566 if (tid->clear_ps_filter || tid->an->no_ps_filter) { in ath_tx_sched_aggr()
1567 tid->clear_ps_filter = false; in ath_tx_sched_aggr()
1568 tx_info->flags |= IEEE80211_TX_CTL_CLEAR_PS_FILT; in ath_tx_sched_aggr()
1579 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_aggr_start()
1587 an = (struct ath_node *)sta->drv_priv; in ath_tx_aggr_start()
1589 txq = txtid->txq; in ath_tx_aggr_start()
1594 * in HT IBSS when a beacon with HT-info is received after the station in ath_tx_aggr_start()
1597 if (sta->deflink.ht_cap.ht_supported) { in ath_tx_aggr_start()
1598 an->maxampdu = (1 << (IEEE80211_HT_MAX_AMPDU_FACTOR + in ath_tx_aggr_start()
1599 sta->deflink.ht_cap.ampdu_factor)) - 1; in ath_tx_aggr_start()
1600 density = ath9k_parse_mpdudensity(sta->deflink.ht_cap.ampdu_density); in ath_tx_aggr_start()
1601 an->mpdudensity = density; in ath_tx_aggr_start()
1604 txtid->active = true; in ath_tx_aggr_start()
1605 *ssn = txtid->seq_start = txtid->seq_next; in ath_tx_aggr_start()
1606 txtid->bar_index = -1; in ath_tx_aggr_start()
1608 memset(txtid->tx_buf, 0, sizeof(txtid->tx_buf)); in ath_tx_aggr_start()
1609 txtid->baw_head = txtid->baw_tail = 0; in ath_tx_aggr_start()
1618 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_aggr_stop()
1619 struct ath_node *an = (struct ath_node *)sta->drv_priv; in ath_tx_aggr_stop()
1621 struct ath_txq *txq = txtid->txq; in ath_tx_aggr_stop()
1626 txtid->active = false; in ath_tx_aggr_stop()
1634 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_aggr_sleep()
1643 if (!skb_queue_empty(&tid->retry_q)) in ath_tx_aggr_sleep()
1644 ieee80211_sta_set_buffered(sta, tid->tidno, true); in ath_tx_aggr_sleep()
1651 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_aggr_wakeup()
1660 txq = tid->txq; in ath_tx_aggr_wakeup()
1663 tid->clear_ps_filter = true; in ath_tx_aggr_wakeup()
1664 if (!skb_queue_empty(&tid->retry_q)) { in ath_tx_aggr_wakeup()
1678 u16 mask = cpu_to_le16(IEEE80211_FCTL_MOREDATA); in ath9k_set_moredata() local
1679 u16 mask_val = mask * val; in ath9k_set_moredata()
1681 hdr = (struct ieee80211_hdr *) bf->bf_mpdu->data; in ath9k_set_moredata()
1682 if ((hdr->frame_control & mask) != mask_val) { in ath9k_set_moredata()
1683 hdr->frame_control = (hdr->frame_control & ~mask) | mask_val; in ath9k_set_moredata()
1684 dma_sync_single_for_device(sc->dev, bf->bf_buf_addr, in ath9k_set_moredata()
1695 struct ath_softc *sc = hw->priv; in ath9k_release_buffered_frames()
1696 struct ath_node *an = (struct ath_node *)sta->drv_priv; in ath9k_release_buffered_frames()
1697 struct ath_txq *txq = sc->tx.uapsdq; in ath9k_release_buffered_frames()
1712 ath_txq_lock(sc, tid->txq); in ath9k_release_buffered_frames()
1714 ret = ath_tx_get_tid_subframe(sc, sc->tx.uapsdq, in ath9k_release_buffered_frames()
1720 list_add_tail(&bf->list, &bf_q); in ath9k_release_buffered_frames()
1721 ath_set_rates(tid->an->vif, tid->an->sta, bf); in ath9k_release_buffered_frames()
1723 bf->bf_state.bf_type &= ~BUF_AGGR; in ath9k_release_buffered_frames()
1725 bf_tail->bf_next = bf; in ath9k_release_buffered_frames()
1728 nframes--; in ath9k_release_buffered_frames()
1729 TX_STAT_INC(sc, txq->axq_qnum, a_queued_hw); in ath9k_release_buffered_frames()
1731 if (an->sta && skb_queue_empty(&tid->retry_q)) in ath9k_release_buffered_frames()
1732 ieee80211_sta_set_buffered(an->sta, i, false); in ath9k_release_buffered_frames()
1734 ath_txq_unlock_complete(sc, tid->txq); in ath9k_release_buffered_frames()
1743 info = IEEE80211_SKB_CB(bf_tail->bf_mpdu); in ath9k_release_buffered_frames()
1744 info->flags |= IEEE80211_TX_STATUS_EOSP; in ath9k_release_buffered_frames()
1759 struct ath_hw *ah = sc->sc_ah; in ath_txq_setup()
1778 * We mark tx descriptors to receive a DESC interrupt in ath_txq_setup()
1779 * when a tx queue gets deep; otherwise waiting for the in ath_txq_setup()
1784 * The only potential downside is if the tx queue backs in ath_txq_setup()
1786 * due to a lack of tx descriptors. in ath_txq_setup()
1788 * The UAPSD queue is an exception, since we take a desc- in ath_txq_setup()
1791 if (ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) { in ath_txq_setup()
1801 if (axq_qnum == -1) { in ath_txq_setup()
1804 * normally on parts with too few tx queues in ath_txq_setup()
1809 struct ath_txq *txq = &sc->tx.txq[axq_qnum]; in ath_txq_setup()
1811 txq->axq_qnum = axq_qnum; in ath_txq_setup()
1812 txq->mac80211_qnum = -1; in ath_txq_setup()
1813 txq->axq_link = NULL; in ath_txq_setup()
1814 __skb_queue_head_init(&txq->complete_q); in ath_txq_setup()
1815 INIT_LIST_HEAD(&txq->axq_q); in ath_txq_setup()
1816 spin_lock_init(&txq->axq_lock); in ath_txq_setup()
1817 txq->axq_depth = 0; in ath_txq_setup()
1818 txq->axq_ampdu_depth = 0; in ath_txq_setup()
1819 txq->axq_tx_inprogress = false; in ath_txq_setup()
1820 sc->tx.txqsetup |= 1<<axq_qnum; in ath_txq_setup()
1822 txq->txq_headidx = txq->txq_tailidx = 0; in ath_txq_setup()
1824 INIT_LIST_HEAD(&txq->txq_fifo[i]); in ath_txq_setup()
1826 return &sc->tx.txq[axq_qnum]; in ath_txq_setup()
1832 struct ath_hw *ah = sc->sc_ah; in ath_txq_update()
1836 BUG_ON(sc->tx.txq[qnum].axq_qnum != qnum); in ath_txq_update()
1839 qi.tqi_aifs = qinfo->tqi_aifs; in ath_txq_update()
1840 qi.tqi_cwmin = qinfo->tqi_cwmin; in ath_txq_update()
1841 qi.tqi_cwmax = qinfo->tqi_cwmax; in ath_txq_update()
1842 qi.tqi_burstTime = qinfo->tqi_burstTime; in ath_txq_update()
1843 qi.tqi_readyTime = qinfo->tqi_readyTime; in ath_txq_update()
1846 ath_err(ath9k_hw_common(sc->sc_ah), in ath_txq_update()
1848 error = -EIO; in ath_txq_update()
1859 struct ath_beacon_config *cur_conf = &sc->cur_chan->beacon; in ath_cabq_update()
1860 int qnum = sc->beacon.cabq->axq_qnum; in ath_cabq_update()
1862 ath9k_hw_get_txq_props(sc->sc_ah, qnum, &qi); in ath_cabq_update()
1864 qi.tqi_readyTime = (TU_TO_USEC(cur_conf->beacon_interval) * in ath_cabq_update()
1876 struct ath_tx_status ts; in ath_drain_txq_list() local
1878 memset(&ts, 0, sizeof(ts)); in ath_drain_txq_list()
1879 ts.ts_status = ATH9K_TX_FLUSH; in ath_drain_txq_list()
1885 if (bf->bf_state.stale) { in ath_drain_txq_list()
1886 list_del(&bf->list); in ath_drain_txq_list()
1892 lastbf = bf->bf_lastbf; in ath_drain_txq_list()
1893 list_cut_position(&bf_head, list, &lastbf->list); in ath_drain_txq_list()
1894 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head); in ath_drain_txq_list()
1899 * Drain a given TX queue (could be Beacon or Data)
1909 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) { in ath_draintxq()
1910 int idx = txq->txq_tailidx; in ath_draintxq()
1912 while (!list_empty(&txq->txq_fifo[idx])) { in ath_draintxq()
1913 ath_drain_txq_list(sc, txq, &txq->txq_fifo[idx]); in ath_draintxq()
1917 txq->txq_tailidx = idx; in ath_draintxq()
1920 txq->axq_link = NULL; in ath_draintxq()
1921 txq->axq_tx_inprogress = false; in ath_draintxq()
1922 ath_drain_txq_list(sc, txq, &txq->axq_q); in ath_draintxq()
1930 struct ath_hw *ah = sc->sc_ah; in ath_drain_all_txq()
1931 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_drain_all_txq()
1936 if (test_bit(ATH_OP_INVALID, &common->op_flags)) in ath_drain_all_txq()
1946 if (!sc->tx.txq[i].axq_depth) in ath_drain_all_txq()
1949 if (ath9k_hw_numtxpending(ah, sc->tx.txq[i].axq_qnum)) in ath_drain_all_txq()
1956 "Failed to stop TX DMA, queues=0x%03x!\n", npend); in ath_drain_all_txq()
1963 txq = &sc->tx.txq[i]; in ath_drain_all_txq()
1972 ath9k_hw_releasetxqueue(sc->sc_ah, txq->axq_qnum); in ath_tx_cleanupq()
1973 sc->tx.txqsetup &= ~(1<<txq->axq_qnum); in ath_tx_cleanupq()
1981 struct ieee80211_hw *hw = sc->hw; in ath_txq_schedule()
1982 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_txq_schedule()
1987 if (txq->mac80211_qnum < 0) in ath_txq_schedule()
1990 if (test_bit(ATH_OP_HW_RESET, &common->op_flags)) in ath_txq_schedule()
1993 ieee80211_txq_schedule_start(hw, txq->mac80211_qnum); in ath_txq_schedule()
1994 spin_lock_bh(&sc->chan_lock); in ath_txq_schedule()
1997 if (sc->cur_chan->stopped) in ath_txq_schedule()
2000 while ((queue = ieee80211_next_txq(hw, txq->mac80211_qnum))) { in ath_txq_schedule()
2003 tid = (struct ath_atx_tid *)queue->drv_priv; in ath_txq_schedule()
2008 force = !skb_queue_empty(&tid->retry_q); in ath_txq_schedule()
2014 spin_unlock_bh(&sc->chan_lock); in ath_txq_schedule()
2015 ieee80211_txq_schedule_end(hw, txq->mac80211_qnum); in ath_txq_schedule()
2024 txq = sc->tx.txq_map[i]; in ath_txq_schedule_all()
2026 spin_lock_bh(&txq->axq_lock); in ath_txq_schedule_all()
2028 spin_unlock_bh(&txq->axq_lock); in ath_txq_schedule_all()
2033 /* TX, DMA */
2043 struct ath_hw *ah = sc->sc_ah; in ath_tx_txqaddbuf()
2057 edma = !!(ah->caps.hw_caps & ATH9K_HW_CAP_EDMA); in ath_tx_txqaddbuf()
2059 bf_last = list_entry(head->prev, struct ath_buf, list); in ath_tx_txqaddbuf()
2062 txq->axq_qnum, txq->axq_depth); in ath_tx_txqaddbuf()
2064 if (edma && list_empty(&txq->txq_fifo[txq->txq_headidx])) { in ath_tx_txqaddbuf()
2065 list_splice_tail_init(head, &txq->txq_fifo[txq->txq_headidx]); in ath_tx_txqaddbuf()
2066 INCR(txq->txq_headidx, ATH_TXFIFO_DEPTH); in ath_tx_txqaddbuf()
2069 list_splice_tail_init(head, &txq->axq_q); in ath_tx_txqaddbuf()
2071 if (txq->axq_link) { in ath_tx_txqaddbuf()
2072 ath9k_hw_set_desc_link(ah, txq->axq_link, bf->bf_daddr); in ath_tx_txqaddbuf()
2074 txq->axq_qnum, txq->axq_link, in ath_tx_txqaddbuf()
2075 ito64(bf->bf_daddr), bf->bf_desc); in ath_tx_txqaddbuf()
2079 txq->axq_link = bf_last->bf_desc; in ath_tx_txqaddbuf()
2083 TX_STAT_INC(sc, txq->axq_qnum, puttxbuf); in ath_tx_txqaddbuf()
2084 ath9k_hw_puttxbuf(ah, txq->axq_qnum, bf->bf_daddr); in ath_tx_txqaddbuf()
2086 txq->axq_qnum, ito64(bf->bf_daddr), bf->bf_desc); in ath_tx_txqaddbuf()
2089 if (!edma || sc->tx99_state) { in ath_tx_txqaddbuf()
2090 TX_STAT_INC(sc, txq->axq_qnum, txstart); in ath_tx_txqaddbuf()
2091 ath9k_hw_txstart(ah, txq->axq_qnum); in ath_tx_txqaddbuf()
2096 txq->axq_depth++; in ath_tx_txqaddbuf()
2098 txq->axq_ampdu_depth++; in ath_tx_txqaddbuf()
2100 bf_last = bf->bf_lastbf; in ath_tx_txqaddbuf()
2101 bf = bf_last->bf_next; in ath_tx_txqaddbuf()
2102 bf_last->bf_next = NULL; in ath_tx_txqaddbuf()
2113 struct ath_buf *bf = fi->bf; in ath_tx_send_normal()
2116 list_add_tail(&bf->list, &bf_head); in ath_tx_send_normal()
2117 bf->bf_state.bf_type = 0; in ath_tx_send_normal()
2118 if (tid && (tx_info->flags & IEEE80211_TX_CTL_AMPDU)) { in ath_tx_send_normal()
2119 bf->bf_state.bf_type = BUF_AMPDU; in ath_tx_send_normal()
2123 bf->bf_next = NULL; in ath_tx_send_normal()
2124 bf->bf_lastbf = bf; in ath_tx_send_normal()
2125 ath_tx_fill_desc(sc, bf, txq, fi->framelen); in ath_tx_send_normal()
2127 TX_STAT_INC(sc, txq->axq_qnum, queued); in ath_tx_send_normal()
2136 struct ieee80211_key_conf *hw_key = tx_info->control.hw_key; in setup_frame_info()
2137 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in setup_frame_info()
2150 if (tx_info->control.vif && in setup_frame_info()
2151 tx_info->control.vif->bss_conf.use_short_preamble) in setup_frame_info()
2158 an = (struct ath_node *) sta->drv_priv; in setup_frame_info()
2160 if (tx_info->control.vif) { in setup_frame_info()
2161 struct ieee80211_vif *vif = tx_info->control.vif; in setup_frame_info()
2162 if (vif->bss_conf.txpower == INT_MIN) in setup_frame_info()
2164 txpower = 2 * vif->bss_conf.txpower; in setup_frame_info()
2168 sc = hw->priv; in setup_frame_info()
2170 txpower = sc->cur_chan->cur_txpower; in setup_frame_info()
2174 fi->txq = -1; in setup_frame_info()
2176 fi->keyix = hw_key->hw_key_idx; in setup_frame_info()
2177 else if (an && ieee80211_is_data(hdr->frame_control) && an->ps_key > 0) in setup_frame_info()
2178 fi->keyix = an->ps_key; in setup_frame_info()
2180 fi->keyix = ATH9K_TXKEYIX_INVALID; in setup_frame_info()
2181 fi->dyn_smps = sta && sta->deflink.smps_mode == IEEE80211_SMPS_DYNAMIC; in setup_frame_info()
2182 fi->keytype = keytype; in setup_frame_info()
2183 fi->framelen = framelen; in setup_frame_info()
2184 fi->tx_power = txpower; in setup_frame_info()
2188 fi->rtscts_rate = rate->hw_value; in setup_frame_info()
2190 fi->rtscts_rate |= rate->hw_value_short; in setup_frame_info()
2195 struct ath_hw *ah = sc->sc_ah; in ath_txchainmask_reduction()
2196 struct ath9k_channel *curchan = ah->curchan; in ath_txchainmask_reduction()
2198 if ((ah->caps.hw_caps & ATH9K_HW_CAP_APM) && IS_CHAN_5GHZ(curchan) && in ath_txchainmask_reduction()
2217 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_setup_buffer()
2219 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in ath_tx_setup_buffer()
2226 ath_dbg(common, XMIT, "TX buffers are full\n"); in ath_tx_setup_buffer()
2232 if (tid && ieee80211_is_data_present(hdr->frame_control)) { in ath_tx_setup_buffer()
2233 fragno = le16_to_cpu(hdr->seq_ctrl) & IEEE80211_SCTL_FRAG; in ath_tx_setup_buffer()
2234 seqno = tid->seq_next; in ath_tx_setup_buffer()
2235 hdr->seq_ctrl = cpu_to_le16(tid->seq_next << IEEE80211_SEQ_SEQ_SHIFT); in ath_tx_setup_buffer()
2238 hdr->seq_ctrl |= cpu_to_le16(fragno); in ath_tx_setup_buffer()
2240 if (!ieee80211_has_morefrags(hdr->frame_control)) in ath_tx_setup_buffer()
2241 INCR(tid->seq_next, IEEE80211_SEQ_MAX); in ath_tx_setup_buffer()
2243 bf->bf_state.seqno = seqno; in ath_tx_setup_buffer()
2246 bf->bf_mpdu = skb; in ath_tx_setup_buffer()
2248 bf->bf_buf_addr = dma_map_single(sc->dev, skb->data, in ath_tx_setup_buffer()
2249 skb->len, DMA_TO_DEVICE); in ath_tx_setup_buffer()
2250 if (unlikely(dma_mapping_error(sc->dev, bf->bf_buf_addr))) { in ath_tx_setup_buffer()
2251 bf->bf_mpdu = NULL; in ath_tx_setup_buffer()
2252 bf->bf_buf_addr = 0; in ath_tx_setup_buffer()
2253 ath_err(ath9k_hw_common(sc->sc_ah), in ath_tx_setup_buffer()
2254 "dma_mapping_error() on TX\n"); in ath_tx_setup_buffer()
2259 fi->bf = bf; in ath_tx_setup_buffer()
2266 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; in ath_assign_seq()
2268 struct ieee80211_vif *vif = info->control.vif; in ath_assign_seq()
2271 if (!(info->flags & IEEE80211_TX_CTL_ASSIGN_SEQ)) in ath_assign_seq()
2277 avp = (struct ath_vif *)vif->drv_priv; in ath_assign_seq()
2279 if (info->flags & IEEE80211_TX_CTL_FIRST_FRAGMENT) in ath_assign_seq()
2280 avp->seq_no += 0x10; in ath_assign_seq()
2282 hdr->seq_ctrl &= cpu_to_le16(IEEE80211_SCTL_FRAG); in ath_assign_seq()
2283 hdr->seq_ctrl |= cpu_to_le16(avp->seq_no); in ath_assign_seq()
2289 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; in ath_tx_prepare()
2291 struct ieee80211_sta *sta = txctl->sta; in ath_tx_prepare()
2292 struct ieee80211_vif *vif = info->control.vif; in ath_tx_prepare()
2294 struct ath_softc *sc = hw->priv; in ath_tx_prepare()
2295 int frmlen = skb->len + FCS_LEN; in ath_tx_prepare()
2300 txctl->an = (struct ath_node *)sta->drv_priv; in ath_tx_prepare()
2301 else if (vif && ieee80211_is_data(hdr->frame_control)) { in ath_tx_prepare()
2302 avp = (void *)vif->drv_priv; in ath_tx_prepare()
2303 txctl->an = &avp->mcast_node; in ath_tx_prepare()
2306 if (info->control.hw_key) in ath_tx_prepare()
2307 frmlen += info->control.hw_key->icv_len; in ath_tx_prepare()
2309 ath_assign_seq(ath9k_hw_common(sc->sc_ah), skb); in ath_tx_prepare()
2311 if ((vif && vif->type != NL80211_IFTYPE_AP && in ath_tx_prepare()
2312 vif->type != NL80211_IFTYPE_AP_VLAN) || in ath_tx_prepare()
2313 !ieee80211_is_data(hdr->frame_control)) in ath_tx_prepare()
2314 info->flags |= IEEE80211_TX_CTL_CLEAR_PS_FILT; in ath_tx_prepare()
2317 padpos = ieee80211_hdrlen(hdr->frame_control); in ath_tx_prepare()
2319 if (padsize && skb->len > padpos) { in ath_tx_prepare()
2321 return -ENOMEM; in ath_tx_prepare()
2324 memmove(skb->data, skb->data + padsize, padpos); in ath_tx_prepare()
2337 struct ieee80211_sta *sta = txctl->sta; in ath_tx_start()
2338 struct ieee80211_vif *vif = info->control.vif; in ath_tx_start()
2340 struct ath_softc *sc = hw->priv; in ath_tx_start()
2341 struct ath_txq *txq = txctl->txq; in ath_tx_start()
2348 ps_resp = !!(info->control.flags & IEEE80211_TX_CTRL_PS_RESPONSE); in ath_tx_start()
2355 * At this point, the vif, hw_key and sta pointers in the tx control in ath_tx_start()
2362 txq = sc->tx.uapsdq; in ath_tx_start()
2364 if (txctl->sta) { in ath_tx_start()
2365 an = (struct ath_node *) sta->drv_priv; in ath_tx_start()
2370 if (txq == sc->tx.txq_map[q]) { in ath_tx_start()
2371 fi->txq = q; in ath_tx_start()
2372 ++txq->pending_frames; in ath_tx_start()
2378 if (txctl->paprd) in ath_tx_start()
2381 ieee80211_free_txskb(sc->hw, skb); in ath_tx_start()
2385 bf->bf_state.bfs_paprd = txctl->paprd; in ath_tx_start()
2387 if (txctl->paprd) in ath_tx_start()
2388 bf->bf_state.bfs_paprd_timestamp = jiffies; in ath_tx_start()
2402 struct ath_softc *sc = hw->priv; in ath_tx_cabq()
2404 .txq = sc->beacon.cabq in ath_tx_cabq()
2414 sc->cur_chan->beacon.beacon_interval * 1000 * in ath_tx_cabq()
2415 sc->cur_chan->beacon.dtim_period / ATH_BCBUF; in ath_tx_cabq()
2427 bf->bf_lastbf = bf; in ath_tx_cabq()
2429 ath_buf_set_rate(sc, bf, &info, fi->framelen, false); in ath_tx_cabq()
2432 bf_tail->bf_next = bf; in ath_tx_cabq()
2434 list_add_tail(&bf->list, &bf_q); in ath_tx_cabq()
2457 TX_STAT_INC(sc, txctl.txq->axq_qnum, queued); in ath_tx_cabq()
2462 /* TX Completion */
2470 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_complete()
2471 struct ieee80211_hdr * hdr = (struct ieee80211_hdr *)skb->data; in ath_tx_complete()
2475 ath_dbg(common, XMIT, "TX complete: skb: %p\n", skb); in ath_tx_complete()
2477 if (sc->sc_ah->caldata) in ath_tx_complete()
2478 set_bit(PAPRD_PACKET_SENT, &sc->sc_ah->caldata->cal_flags); in ath_tx_complete()
2481 if (tx_info->flags & IEEE80211_TX_CTL_NO_ACK) in ath_tx_complete()
2482 tx_info->flags |= IEEE80211_TX_STAT_NOACK_TRANSMITTED; in ath_tx_complete()
2484 tx_info->flags |= IEEE80211_TX_STAT_ACK; in ath_tx_complete()
2487 if (tx_info->flags & IEEE80211_TX_CTL_REQ_TX_STATUS) { in ath_tx_complete()
2488 padpos = ieee80211_hdrlen(hdr->frame_control); in ath_tx_complete()
2490 if (padsize && skb->len>padpos+padsize) { in ath_tx_complete()
2495 memmove(skb->data + padsize, skb->data, padpos); in ath_tx_complete()
2500 spin_lock_irqsave(&sc->sc_pm_lock, flags); in ath_tx_complete()
2501 if ((sc->ps_flags & PS_WAIT_FOR_TX_ACK) && !txq->axq_depth) { in ath_tx_complete()
2502 sc->ps_flags &= ~PS_WAIT_FOR_TX_ACK; in ath_tx_complete()
2504 "Going back to sleep after having received TX status (0x%lx)\n", in ath_tx_complete()
2505 sc->ps_flags & (PS_WAIT_FOR_BEACON | in ath_tx_complete()
2510 spin_unlock_irqrestore(&sc->sc_pm_lock, flags); in ath_tx_complete()
2513 tx_info->status.status_driver_data[0] = sta; in ath_tx_complete()
2514 __skb_queue_tail(&txq->complete_q, skb); in ath_tx_complete()
2520 struct ath_tx_status *ts, int txok) in ath_tx_complete_buf() argument
2522 struct sk_buff *skb = bf->bf_mpdu; in ath_tx_complete_buf()
2530 if (ts->ts_status & ATH9K_TXERR_FILT) in ath_tx_complete_buf()
2531 tx_info->flags |= IEEE80211_TX_STAT_TX_FILTERED; in ath_tx_complete_buf()
2533 dma_unmap_single(sc->dev, bf->bf_buf_addr, skb->len, DMA_TO_DEVICE); in ath_tx_complete_buf()
2534 bf->bf_buf_addr = 0; in ath_tx_complete_buf()
2535 if (sc->tx99_state) in ath_tx_complete_buf()
2538 if (bf->bf_state.bfs_paprd) { in ath_tx_complete_buf()
2540 bf->bf_state.bfs_paprd_timestamp + in ath_tx_complete_buf()
2544 complete(&sc->paprd_complete); in ath_tx_complete_buf()
2546 ath_debug_stat_tx(sc, bf, ts, txq, tx_flags); in ath_tx_complete_buf()
2550 /* At this point, skb (bf->bf_mpdu) is consumed...make sure we don't in ath_tx_complete_buf()
2553 bf->bf_mpdu = NULL; in ath_tx_complete_buf()
2558 spin_lock_irqsave(&sc->tx.txbuflock, flags); in ath_tx_complete_buf()
2559 list_splice_tail_init(bf_q, &sc->tx.txbuf); in ath_tx_complete_buf()
2560 spin_unlock_irqrestore(&sc->tx.txbuflock, flags); in ath_tx_complete_buf()
2565 void *ptr = &tx_info->status; in ath_clear_tx_status()
2567 memset(ptr + sizeof(tx_info->status.rates), 0, in ath_clear_tx_status()
2568 sizeof(tx_info->status) - in ath_clear_tx_status()
2569 sizeof(tx_info->status.rates) - in ath_clear_tx_status()
2570 sizeof(tx_info->status.status_driver_data)); in ath_clear_tx_status()
2574 struct ath_tx_status *ts, int nframes, int nbad, in ath_tx_rc_status() argument
2577 struct sk_buff *skb = bf->bf_mpdu; in ath_tx_rc_status()
2578 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *)skb->data; in ath_tx_rc_status()
2580 struct ieee80211_hw *hw = sc->hw; in ath_tx_rc_status()
2581 struct ath_hw *ah = sc->sc_ah; in ath_tx_rc_status()
2587 tx_info->status.ack_signal = ts->ts_rssi; in ath_tx_rc_status()
2589 tx_rateindex = ts->ts_rateindex; in ath_tx_rc_status()
2590 WARN_ON(tx_rateindex >= hw->max_rates); in ath_tx_rc_status()
2592 if (tx_info->flags & IEEE80211_TX_CTL_AMPDU) { in ath_tx_rc_status()
2593 tx_info->flags |= IEEE80211_TX_STAT_AMPDU; in ath_tx_rc_status()
2597 tx_info->status.ampdu_len = nframes; in ath_tx_rc_status()
2598 tx_info->status.ampdu_ack_len = nframes - nbad; in ath_tx_rc_status()
2600 tx_info->status.rates[tx_rateindex].count = ts->ts_longretry + 1; in ath_tx_rc_status()
2602 for (i = tx_rateindex + 1; i < hw->max_rates; i++) { in ath_tx_rc_status()
2603 tx_info->status.rates[i].count = 0; in ath_tx_rc_status()
2604 tx_info->status.rates[i].idx = -1; in ath_tx_rc_status()
2607 if ((ts->ts_status & ATH9K_TXERR_FILT) == 0 && in ath_tx_rc_status()
2608 (tx_info->flags & IEEE80211_TX_CTL_NO_ACK) == 0) { in ath_tx_rc_status()
2614 * hw->max_rate_tries times to affect how rate control updates in ath_tx_rc_status()
2621 if (unlikely(ts->ts_flags & (ATH9K_TX_DATA_UNDERRUN | in ath_tx_rc_status()
2623 ieee80211_is_data(hdr->frame_control) && in ath_tx_rc_status()
2624 ah->tx_trig_level >= sc->sc_ah->config.max_txtrig_level) in ath_tx_rc_status()
2625 tx_info->status.rates[tx_rateindex].count = in ath_tx_rc_status()
2626 hw->max_rate_tries; in ath_tx_rc_status()
2632 struct ath_hw *ah = sc->sc_ah; in ath_tx_processq()
2637 struct ath_tx_status ts; in ath_tx_processq() local
2640 ath_dbg(common, QUEUE, "tx queue %d (%x), link %p\n", in ath_tx_processq()
2641 txq->axq_qnum, ath9k_hw_gettxbuf(sc->sc_ah, txq->axq_qnum), in ath_tx_processq()
2642 txq->axq_link); in ath_tx_processq()
2646 if (test_bit(ATH_OP_HW_RESET, &common->op_flags)) in ath_tx_processq()
2649 if (list_empty(&txq->axq_q)) { in ath_tx_processq()
2650 txq->axq_link = NULL; in ath_tx_processq()
2654 bf = list_first_entry(&txq->axq_q, struct ath_buf, list); in ath_tx_processq()
2658 * after sw writes TxE and before hw re-load the last in ath_tx_processq()
2661 * holding descriptor - software does so by marking in ath_tx_processq()
2665 if (bf->bf_state.stale) { in ath_tx_processq()
2667 if (list_is_last(&bf_held->list, &txq->axq_q)) in ath_tx_processq()
2670 bf = list_entry(bf_held->list.next, struct ath_buf, in ath_tx_processq()
2674 lastbf = bf->bf_lastbf; in ath_tx_processq()
2675 ds = lastbf->bf_desc; in ath_tx_processq()
2677 memset(&ts, 0, sizeof(ts)); in ath_tx_processq()
2678 status = ath9k_hw_txprocdesc(ah, ds, &ts); in ath_tx_processq()
2679 if (status == -EINPROGRESS) in ath_tx_processq()
2682 TX_STAT_INC(sc, txq->axq_qnum, txprocdesc); in ath_tx_processq()
2689 lastbf->bf_state.stale = true; in ath_tx_processq()
2691 if (!list_is_singular(&lastbf->list)) in ath_tx_processq()
2693 &txq->axq_q, lastbf->list.prev); in ath_tx_processq()
2696 list_del(&bf_held->list); in ath_tx_processq()
2700 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head); in ath_tx_processq()
2707 struct ath_hw *ah = sc->sc_ah; in ath_tx_tasklet()
2708 u32 qcumask = ((1 << ATH9K_NUM_TX_QUEUES) - 1) & ah->intr_txqs; in ath_tx_tasklet()
2714 ath_tx_processq(sc, &sc->tx.txq[i]); in ath_tx_tasklet()
2721 struct ath_tx_status ts; in ath_tx_edma_tasklet() local
2722 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_edma_tasklet()
2723 struct ath_hw *ah = sc->sc_ah; in ath_tx_edma_tasklet()
2732 if (test_bit(ATH_OP_HW_RESET, &common->op_flags)) in ath_tx_edma_tasklet()
2735 status = ath9k_hw_txprocdesc(ah, NULL, (void *)&ts); in ath_tx_edma_tasklet()
2736 if (status == -EINPROGRESS) in ath_tx_edma_tasklet()
2738 if (status == -EIO) { in ath_tx_edma_tasklet()
2739 ath_dbg(common, XMIT, "Error processing tx status\n"); in ath_tx_edma_tasklet()
2744 if (ts.qid == sc->beacon.beaconq) { in ath_tx_edma_tasklet()
2745 sc->beacon.tx_processed = true; in ath_tx_edma_tasklet()
2746 sc->beacon.tx_last = !(ts.ts_status & ATH9K_TXERR_MASK); in ath_tx_edma_tasklet()
2757 txq = &sc->tx.txq[ts.qid]; in ath_tx_edma_tasklet()
2761 TX_STAT_INC(sc, txq->axq_qnum, txprocdesc); in ath_tx_edma_tasklet()
2763 fifo_list = &txq->txq_fifo[txq->txq_tailidx]; in ath_tx_edma_tasklet()
2770 if (bf->bf_state.stale) { in ath_tx_edma_tasklet()
2771 list_del(&bf->list); in ath_tx_edma_tasklet()
2776 lastbf = bf->bf_lastbf; in ath_tx_edma_tasklet()
2779 if (list_is_last(&lastbf->list, fifo_list)) { in ath_tx_edma_tasklet()
2781 INCR(txq->txq_tailidx, ATH_TXFIFO_DEPTH); in ath_tx_edma_tasklet()
2783 if (!list_empty(&txq->axq_q)) { in ath_tx_edma_tasklet()
2787 txq->axq_link = NULL; in ath_tx_edma_tasklet()
2788 list_splice_tail_init(&txq->axq_q, &bf_q); in ath_tx_edma_tasklet()
2792 lastbf->bf_state.stale = true; in ath_tx_edma_tasklet()
2795 lastbf->list.prev); in ath_tx_edma_tasklet()
2798 ath_tx_process_buffer(sc, txq, &ts, bf, &bf_head); in ath_tx_edma_tasklet()
2810 struct ath_descdma *dd = &sc->txsdma; in ath_txstatus_setup()
2811 u8 txs_len = sc->sc_ah->caps.txs_len; in ath_txstatus_setup()
2813 dd->dd_desc_len = size * txs_len; in ath_txstatus_setup()
2814 dd->dd_desc = dmam_alloc_coherent(sc->dev, dd->dd_desc_len, in ath_txstatus_setup()
2815 &dd->dd_desc_paddr, GFP_KERNEL); in ath_txstatus_setup()
2816 if (!dd->dd_desc) in ath_txstatus_setup()
2817 return -ENOMEM; in ath_txstatus_setup()
2828 ath9k_hw_setup_statusring(sc->sc_ah, sc->txsdma.dd_desc, in ath_tx_edma_init()
2829 sc->txsdma.dd_desc_paddr, in ath_tx_edma_init()
2837 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath_tx_init()
2840 spin_lock_init(&sc->tx.txbuflock); in ath_tx_init()
2842 error = ath_descdma_setup(sc, &sc->tx.txdma, &sc->tx.txbuf, in ath_tx_init()
2843 "tx", nbufs, 1, 1); in ath_tx_init()
2846 "Failed to allocate tx descriptors: %d\n", error); in ath_tx_init()
2850 error = ath_descdma_setup(sc, &sc->beacon.bdma, &sc->beacon.bbuf, in ath_tx_init()
2858 if (sc->sc_ah->caps.hw_caps & ATH9K_HW_CAP_EDMA) in ath_tx_init()
2871 tid->an = an; in ath_tx_node_init()
2872 tid->tidno = tidno; in ath_tx_node_init()
2873 tid->seq_start = tid->seq_next = 0; in ath_tx_node_init()
2874 tid->baw_size = WME_MAX_BA; in ath_tx_node_init()
2875 tid->baw_head = tid->baw_tail = 0; in ath_tx_node_init()
2876 tid->active = false; in ath_tx_node_init()
2877 tid->clear_ps_filter = true; in ath_tx_node_init()
2878 __skb_queue_head_init(&tid->retry_q); in ath_tx_node_init()
2879 INIT_LIST_HEAD(&tid->list); in ath_tx_node_init()
2881 tid->txq = sc->tx.txq_map[acno]; in ath_tx_node_init()
2883 if (!an->sta) in ath_tx_node_init()
2898 txq = tid->txq; in ath_tx_node_cleanup()
2902 if (!list_empty(&tid->list)) in ath_tx_node_cleanup()
2903 list_del_init(&tid->list); in ath_tx_node_cleanup()
2906 tid->active = false; in ath_tx_node_cleanup()
2910 if (!an->sta) in ath_tx_node_cleanup()
2922 struct ieee80211_hdr *hdr = (struct ieee80211_hdr *) skb->data; in ath9k_tx99_send()
2924 struct ath_common *common = ath9k_hw_common(sc->sc_ah); in ath9k_tx99_send()
2928 padpos = ieee80211_hdrlen(hdr->frame_control); in ath9k_tx99_send()
2931 if (padsize && skb->len > padpos) { in ath9k_tx99_send()
2935 return -EINVAL; in ath9k_tx99_send()
2939 memmove(skb->data, skb->data + padsize, padpos); in ath9k_tx99_send()
2942 fi->keyix = ATH9K_TXKEYIX_INVALID; in ath9k_tx99_send()
2943 fi->framelen = skb->len + FCS_LEN; in ath9k_tx99_send()
2944 fi->keytype = ATH9K_KEY_TYPE_CLEAR; in ath9k_tx99_send()
2946 bf = ath_tx_setup_buffer(sc, txctl->txq, NULL, skb); in ath9k_tx99_send()
2949 return -EINVAL; in ath9k_tx99_send()
2952 ath_set_rates(sc->tx99_vif, NULL, bf); in ath9k_tx99_send()
2954 ath9k_hw_set_desc_link(sc->sc_ah, bf->bf_desc, bf->bf_daddr); in ath9k_tx99_send()
2955 ath9k_hw_tx99_start(sc->sc_ah, txctl->txq->axq_qnum); in ath9k_tx99_send()
2957 ath_tx_send_normal(sc, txctl->txq, NULL, skb); in ath9k_tx99_send()