Lines Matching refs:dev

18 int __mt76u_vendor_request(struct mt76_dev *dev, u8 req, u8 req_type,  in __mt76u_vendor_request()  argument
21 struct usb_interface *uintf = to_usb_interface(dev->dev); in __mt76u_vendor_request()
26 lockdep_assert_held(&dev->usb.usb_ctrl_mtx); in __mt76u_vendor_request()
31 if (test_bit(MT76_REMOVED, &dev->phy.state)) in __mt76u_vendor_request()
37 set_bit(MT76_REMOVED, &dev->phy.state); in __mt76u_vendor_request()
43 dev_err(dev->dev, "vendor request req:%02x off:%04x failed:%d\n", in __mt76u_vendor_request()
49 int mt76u_vendor_request(struct mt76_dev *dev, u8 req, in mt76u_vendor_request() argument
55 mutex_lock(&dev->usb.usb_ctrl_mtx); in mt76u_vendor_request()
56 ret = __mt76u_vendor_request(dev, req, req_type, in mt76u_vendor_request()
58 trace_usb_reg_wr(dev, offset, val); in mt76u_vendor_request()
59 mutex_unlock(&dev->usb.usb_ctrl_mtx); in mt76u_vendor_request()
65 u32 ___mt76u_rr(struct mt76_dev *dev, u8 req, u8 req_type, u32 addr) in ___mt76u_rr() argument
67 struct mt76_usb *usb = &dev->usb; in ___mt76u_rr()
71 ret = __mt76u_vendor_request(dev, req, req_type, addr >> 16, in ___mt76u_rr()
75 trace_usb_reg_rr(dev, addr, data); in ___mt76u_rr()
81 static u32 __mt76u_rr(struct mt76_dev *dev, u32 addr) in __mt76u_rr() argument
97 return ___mt76u_rr(dev, req, USB_DIR_IN | USB_TYPE_VENDOR, in __mt76u_rr()
101 static u32 mt76u_rr(struct mt76_dev *dev, u32 addr) in mt76u_rr() argument
105 mutex_lock(&dev->usb.usb_ctrl_mtx); in mt76u_rr()
106 ret = __mt76u_rr(dev, addr); in mt76u_rr()
107 mutex_unlock(&dev->usb.usb_ctrl_mtx); in mt76u_rr()
112 void ___mt76u_wr(struct mt76_dev *dev, u8 req, u8 req_type, in ___mt76u_wr() argument
115 struct mt76_usb *usb = &dev->usb; in ___mt76u_wr()
118 __mt76u_vendor_request(dev, req, req_type, addr >> 16, in ___mt76u_wr()
120 trace_usb_reg_wr(dev, addr, val); in ___mt76u_wr()
124 static void __mt76u_wr(struct mt76_dev *dev, u32 addr, u32 val) in __mt76u_wr() argument
136 ___mt76u_wr(dev, req, USB_DIR_OUT | USB_TYPE_VENDOR, in __mt76u_wr()
140 static void mt76u_wr(struct mt76_dev *dev, u32 addr, u32 val) in mt76u_wr() argument
142 mutex_lock(&dev->usb.usb_ctrl_mtx); in mt76u_wr()
143 __mt76u_wr(dev, addr, val); in mt76u_wr()
144 mutex_unlock(&dev->usb.usb_ctrl_mtx); in mt76u_wr()
147 static u32 mt76u_rmw(struct mt76_dev *dev, u32 addr, in mt76u_rmw() argument
150 mutex_lock(&dev->usb.usb_ctrl_mtx); in mt76u_rmw()
151 val |= __mt76u_rr(dev, addr) & ~mask; in mt76u_rmw()
152 __mt76u_wr(dev, addr, val); in mt76u_rmw()
153 mutex_unlock(&dev->usb.usb_ctrl_mtx); in mt76u_rmw()
158 static void mt76u_copy(struct mt76_dev *dev, u32 offset, in mt76u_copy() argument
161 struct mt76_usb *usb = &dev->usb; in mt76u_copy()
178 ret = __mt76u_vendor_request(dev, MT_VEND_MULTI_WRITE, in mt76u_copy()
190 void mt76u_read_copy(struct mt76_dev *dev, u32 offset, in mt76u_read_copy() argument
193 struct mt76_usb *usb = &dev->usb; in mt76u_read_copy()
201 ret = __mt76u_vendor_request(dev, MT_VEND_READ_EXT, in mt76u_read_copy()
215 void mt76u_single_wr(struct mt76_dev *dev, const u8 req, in mt76u_single_wr() argument
218 mutex_lock(&dev->usb.usb_ctrl_mtx); in mt76u_single_wr()
219 __mt76u_vendor_request(dev, req, in mt76u_single_wr()
222 __mt76u_vendor_request(dev, req, in mt76u_single_wr()
225 mutex_unlock(&dev->usb.usb_ctrl_mtx); in mt76u_single_wr()
230 mt76u_req_wr_rp(struct mt76_dev *dev, u32 base, in mt76u_req_wr_rp() argument
233 struct mt76_usb *usb = &dev->usb; in mt76u_req_wr_rp()
237 __mt76u_wr(dev, base + data->reg, data->value); in mt76u_req_wr_rp()
247 mt76u_wr_rp(struct mt76_dev *dev, u32 base, in mt76u_wr_rp() argument
250 if (test_bit(MT76_STATE_MCU_RUNNING, &dev->phy.state)) in mt76u_wr_rp()
251 return dev->mcu_ops->mcu_wr_rp(dev, base, data, n); in mt76u_wr_rp()
253 return mt76u_req_wr_rp(dev, base, data, n); in mt76u_wr_rp()
257 mt76u_req_rd_rp(struct mt76_dev *dev, u32 base, struct mt76_reg_pair *data, in mt76u_req_rd_rp() argument
260 struct mt76_usb *usb = &dev->usb; in mt76u_req_rd_rp()
264 data->value = __mt76u_rr(dev, base + data->reg); in mt76u_req_rd_rp()
274 mt76u_rd_rp(struct mt76_dev *dev, u32 base, in mt76u_rd_rp() argument
277 if (test_bit(MT76_STATE_MCU_RUNNING, &dev->phy.state)) in mt76u_rd_rp()
278 return dev->mcu_ops->mcu_rd_rp(dev, base, data, n); in mt76u_rd_rp()
280 return mt76u_req_rd_rp(dev, base, data, n); in mt76u_rd_rp()
283 static bool mt76u_check_sg(struct mt76_dev *dev) in mt76u_check_sg() argument
285 struct usb_interface *uintf = to_usb_interface(dev->dev); in mt76u_check_sg()
320 mt76u_fill_rx_sg(struct mt76_dev *dev, struct mt76_queue *q, struct urb *urb, in mt76u_fill_rx_sg() argument
353 mt76u_refill_rx(struct mt76_dev *dev, struct mt76_queue *q, in mt76u_refill_rx() argument
356 enum mt76_rxq_id qid = q - &dev->q_rx[MT_RXQ_MAIN]; in mt76u_refill_rx()
359 if (qid == MT_RXQ_MAIN && dev->usb.sg_en) in mt76u_refill_rx()
360 return mt76u_fill_rx_sg(dev, q, urb, nsgs); in mt76u_refill_rx()
369 mt76u_urb_alloc(struct mt76_dev *dev, struct mt76_queue_entry *e, in mt76u_urb_alloc() argument
374 if (dev->usb.sg_en) in mt76u_urb_alloc()
383 if (dev->usb.sg_en && sg_max_size > 0) in mt76u_urb_alloc()
390 mt76u_rx_urb_alloc(struct mt76_dev *dev, struct mt76_queue *q, in mt76u_rx_urb_alloc() argument
393 enum mt76_rxq_id qid = q - &dev->q_rx[MT_RXQ_MAIN]; in mt76u_rx_urb_alloc()
397 err = mt76u_urb_alloc(dev, e, sg_size); in mt76u_rx_urb_alloc()
401 return mt76u_refill_rx(dev, q, e->urb, sg_size); in mt76u_rx_urb_alloc()
418 mt76u_fill_bulk_urb(struct mt76_dev *dev, int dir, int index, in mt76u_fill_bulk_urb() argument
422 struct usb_interface *uintf = to_usb_interface(dev->dev); in mt76u_fill_bulk_urb()
427 pipe = usb_rcvbulkpipe(udev, dev->usb.in_ep[index]); in mt76u_fill_bulk_urb()
429 pipe = usb_sndbulkpipe(udev, dev->usb.out_ep[index]); in mt76u_fill_bulk_urb()
431 urb->dev = udev; in mt76u_fill_bulk_urb()
455 mt76u_get_rx_entry_len(struct mt76_dev *dev, u8 *data, in mt76u_get_rx_entry_len() argument
461 if (dev->drv->drv_flags & MT_DRV_RX_DMA_HDR) in mt76u_get_rx_entry_len()
473 mt76u_build_rx_skb(struct mt76_dev *dev, void *data, in mt76u_build_rx_skb() argument
476 int head_room, drv_flags = dev->drv->drv_flags; in mt76u_build_rx_skb()
512 mt76u_process_rx_entry(struct mt76_dev *dev, struct urb *urb, in mt76u_process_rx_entry() argument
517 int len, nsgs = 1, head_room, drv_flags = dev->drv->drv_flags; in mt76u_process_rx_entry()
520 if (!test_bit(MT76_STATE_INITIALIZED, &dev->phy.state)) in mt76u_process_rx_entry()
523 len = mt76u_get_rx_entry_len(dev, data, urb->actual_length); in mt76u_process_rx_entry()
531 dev->drv->rx_check && !dev->drv->rx_check(dev, data, data_len)) in mt76u_process_rx_entry()
534 skb = mt76u_build_rx_skb(dev, data, data_len, buf_size); in mt76u_process_rx_entry()
550 dev->drv->rx_skb(dev, MT_RXQ_MAIN, skb, NULL); in mt76u_process_rx_entry()
557 struct mt76_dev *dev = dev_get_drvdata(&urb->dev->dev); in mt76u_complete_rx() local
561 trace_rx_urb(dev, urb); in mt76u_complete_rx()
570 dev_err_ratelimited(dev->dev, "rx urb failed: %d\n", in mt76u_complete_rx()
583 mt76_worker_schedule(&dev->usb.rx_worker); in mt76u_complete_rx()
589 mt76u_submit_rx_buf(struct mt76_dev *dev, enum mt76_rxq_id qid, in mt76u_submit_rx_buf() argument
594 mt76u_fill_bulk_urb(dev, USB_DIR_IN, ep, urb, in mt76u_submit_rx_buf()
595 mt76u_complete_rx, &dev->q_rx[qid]); in mt76u_submit_rx_buf()
596 trace_submit_urb(dev, urb); in mt76u_submit_rx_buf()
602 mt76u_process_rx_queue(struct mt76_dev *dev, struct mt76_queue *q) in mt76u_process_rx_queue() argument
604 int qid = q - &dev->q_rx[MT_RXQ_MAIN]; in mt76u_process_rx_queue()
613 count = mt76u_process_rx_entry(dev, urb, q->buf_size); in mt76u_process_rx_queue()
615 err = mt76u_refill_rx(dev, q, urb, count); in mt76u_process_rx_queue()
619 mt76u_submit_rx_buf(dev, qid, urb); in mt76u_process_rx_queue()
623 mt76_rx_poll_complete(dev, MT_RXQ_MAIN, NULL); in mt76u_process_rx_queue()
631 struct mt76_dev *dev = container_of(usb, struct mt76_dev, usb); in mt76u_rx_worker() local
635 mt76_for_each_q_rx(dev, i) in mt76u_rx_worker()
636 mt76u_process_rx_queue(dev, &dev->q_rx[i]); in mt76u_rx_worker()
641 mt76u_submit_rx_buffers(struct mt76_dev *dev, enum mt76_rxq_id qid) in mt76u_submit_rx_buffers() argument
643 struct mt76_queue *q = &dev->q_rx[qid]; in mt76u_submit_rx_buffers()
649 err = mt76u_submit_rx_buf(dev, qid, q->entry[i].urb); in mt76u_submit_rx_buffers()
661 mt76u_alloc_rx_queue(struct mt76_dev *dev, enum mt76_rxq_id qid) in mt76u_alloc_rx_queue() argument
663 struct mt76_queue *q = &dev->q_rx[qid]; in mt76u_alloc_rx_queue()
666 err = mt76_create_page_pool(dev, q); in mt76u_alloc_rx_queue()
671 q->entry = devm_kcalloc(dev->dev, in mt76u_alloc_rx_queue()
681 err = mt76u_rx_urb_alloc(dev, q, &q->entry[i]); in mt76u_alloc_rx_queue()
686 return mt76u_submit_rx_buffers(dev, qid); in mt76u_alloc_rx_queue()
689 int mt76u_alloc_mcu_queue(struct mt76_dev *dev) in mt76u_alloc_mcu_queue() argument
691 return mt76u_alloc_rx_queue(dev, MT_RXQ_MCU); in mt76u_alloc_mcu_queue()
696 mt76u_free_rx_queue(struct mt76_dev *dev, struct mt76_queue *q) in mt76u_free_rx_queue() argument
711 static void mt76u_free_rx(struct mt76_dev *dev) in mt76u_free_rx() argument
715 mt76_worker_teardown(&dev->usb.rx_worker); in mt76u_free_rx()
717 mt76_for_each_q_rx(dev, i) in mt76u_free_rx()
718 mt76u_free_rx_queue(dev, &dev->q_rx[i]); in mt76u_free_rx()
721 void mt76u_stop_rx(struct mt76_dev *dev) in mt76u_stop_rx() argument
725 mt76_worker_disable(&dev->usb.rx_worker); in mt76u_stop_rx()
727 mt76_for_each_q_rx(dev, i) { in mt76u_stop_rx()
728 struct mt76_queue *q = &dev->q_rx[i]; in mt76u_stop_rx()
737 int mt76u_resume_rx(struct mt76_dev *dev) in mt76u_resume_rx() argument
741 mt76_for_each_q_rx(dev, i) { in mt76u_resume_rx()
742 struct mt76_queue *q = &dev->q_rx[i]; in mt76u_resume_rx()
748 err = mt76u_submit_rx_buffers(dev, i); in mt76u_resume_rx()
753 mt76_worker_enable(&dev->usb.rx_worker); in mt76u_resume_rx()
762 struct mt76_dev *dev = container_of(usb, struct mt76_dev, usb); in mt76u_status_worker() local
767 if (!test_bit(MT76_STATE_RUNNING, &dev->phy.state)) in mt76u_status_worker()
771 q = dev->phy.q_tx[i]; in mt76u_status_worker()
782 mt76_queue_tx_complete(dev, q, &entry); in mt76u_status_worker()
786 wake_up(&dev->tx_wait); in mt76u_status_worker()
788 mt76_worker_schedule(&dev->tx_worker); in mt76u_status_worker()
791 if (dev->drv->tx_status_data && in mt76u_status_worker()
792 !test_and_set_bit(MT76_READING_STATS, &dev->phy.state)) in mt76u_status_worker()
793 queue_work(dev->wq, &dev->usb.stat_work); in mt76u_status_worker()
799 struct mt76_dev *dev; in mt76u_tx_status_data() local
804 dev = container_of(usb, struct mt76_dev, usb); in mt76u_tx_status_data()
807 if (test_bit(MT76_REMOVED, &dev->phy.state)) in mt76u_tx_status_data()
810 if (!dev->drv->tx_status_data(dev, &update)) in mt76u_tx_status_data()
815 if (count && test_bit(MT76_STATE_RUNNING, &dev->phy.state)) in mt76u_tx_status_data()
816 queue_work(dev->wq, &usb->stat_work); in mt76u_tx_status_data()
818 clear_bit(MT76_READING_STATS, &dev->phy.state); in mt76u_tx_status_data()
823 struct mt76_dev *dev = dev_get_drvdata(&urb->dev->dev); in mt76u_complete_tx() local
827 dev_err(dev->dev, "tx urb failed: %d\n", urb->status); in mt76u_complete_tx()
830 mt76_worker_schedule(&dev->usb.status_worker); in mt76u_complete_tx()
834 mt76u_tx_setup_buffers(struct mt76_dev *dev, struct sk_buff *skb, in mt76u_tx_setup_buffers() argument
839 if (!dev->usb.sg_en) { in mt76u_tx_setup_buffers()
853 mt76u_tx_queue_skb(struct mt76_dev *dev, struct mt76_queue *q, in mt76u_tx_queue_skb() argument
867 err = dev->drv->tx_prepare_skb(dev, NULL, qid, wcid, sta, &tx_info); in mt76u_tx_queue_skb()
871 err = mt76u_tx_setup_buffers(dev, tx_info.skb, q->entry[idx].urb); in mt76u_tx_queue_skb()
875 mt76u_fill_bulk_urb(dev, USB_DIR_OUT, q2ep(q->hw_idx), in mt76u_tx_queue_skb()
887 static void mt76u_tx_kick(struct mt76_dev *dev, struct mt76_queue *q) in mt76u_tx_kick() argument
895 trace_submit_urb(dev, urb); in mt76u_tx_kick()
899 set_bit(MT76_REMOVED, &dev->phy.state); in mt76u_tx_kick()
901 dev_err(dev->dev, "tx urb submit failed:%d\n", in mt76u_tx_kick()
909 static u8 mt76u_ac_to_hwq(struct mt76_dev *dev, u8 ac) in mt76u_ac_to_hwq() argument
911 if (mt76_chip(dev) == 0x7663) { in mt76u_ac_to_hwq()
929 static int mt76u_alloc_tx(struct mt76_dev *dev) in mt76u_alloc_tx() argument
936 dev->phy.q_tx[i] = dev->phy.q_tx[0]; in mt76u_alloc_tx()
940 q = devm_kzalloc(dev->dev, sizeof(*q), GFP_KERNEL); in mt76u_alloc_tx()
945 q->hw_idx = mt76u_ac_to_hwq(dev, i); in mt76u_alloc_tx()
947 dev->phy.q_tx[i] = q; in mt76u_alloc_tx()
949 q->entry = devm_kcalloc(dev->dev, in mt76u_alloc_tx()
957 err = mt76u_urb_alloc(dev, &q->entry[j], in mt76u_alloc_tx()
966 static void mt76u_free_tx(struct mt76_dev *dev) in mt76u_free_tx() argument
970 mt76_worker_teardown(&dev->usb.status_worker); in mt76u_free_tx()
976 q = dev->phy.q_tx[i]; in mt76u_free_tx()
987 void mt76u_stop_tx(struct mt76_dev *dev) in mt76u_stop_tx() argument
991 mt76_worker_disable(&dev->usb.status_worker); in mt76u_stop_tx()
993 ret = wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(&dev->phy), in mt76u_stop_tx()
1000 dev_err(dev->dev, "timed out waiting for pending tx\n"); in mt76u_stop_tx()
1003 q = dev->phy.q_tx[i]; in mt76u_stop_tx()
1011 mt76_worker_disable(&dev->tx_worker); in mt76u_stop_tx()
1017 q = dev->phy.q_tx[i]; in mt76u_stop_tx()
1024 mt76_queue_tx_complete(dev, q, &entry); in mt76u_stop_tx()
1028 mt76_worker_enable(&dev->tx_worker); in mt76u_stop_tx()
1031 cancel_work_sync(&dev->usb.stat_work); in mt76u_stop_tx()
1032 clear_bit(MT76_READING_STATS, &dev->phy.state); in mt76u_stop_tx()
1034 mt76_worker_enable(&dev->usb.status_worker); in mt76u_stop_tx()
1036 mt76_tx_status_check(dev, true); in mt76u_stop_tx()
1040 void mt76u_queues_deinit(struct mt76_dev *dev) in mt76u_queues_deinit() argument
1042 mt76u_stop_rx(dev); in mt76u_queues_deinit()
1043 mt76u_stop_tx(dev); in mt76u_queues_deinit()
1045 mt76u_free_rx(dev); in mt76u_queues_deinit()
1046 mt76u_free_tx(dev); in mt76u_queues_deinit()
1050 int mt76u_alloc_queues(struct mt76_dev *dev) in mt76u_alloc_queues() argument
1054 err = mt76u_alloc_rx_queue(dev, MT_RXQ_MAIN); in mt76u_alloc_queues()
1058 return mt76u_alloc_tx(dev); in mt76u_alloc_queues()
1067 int __mt76u_init(struct mt76_dev *dev, struct usb_interface *intf, in __mt76u_init() argument
1071 struct mt76_usb *usb = &dev->usb; in __mt76u_init()
1080 usb->data = devm_kmalloc(dev->dev, usb->data_len, GFP_KERNEL); in __mt76u_init()
1085 dev->bus = ops; in __mt76u_init()
1086 dev->queue_ops = &usb_queue_ops; in __mt76u_init()
1088 dev_set_drvdata(&udev->dev, dev); in __mt76u_init()
1090 usb->sg_en = mt76u_check_sg(dev); in __mt76u_init()
1096 err = mt76_worker_setup(dev->hw, &usb->rx_worker, mt76u_rx_worker, in __mt76u_init()
1101 err = mt76_worker_setup(dev->hw, &usb->status_worker, in __mt76u_init()
1113 int mt76u_init(struct mt76_dev *dev, struct usb_interface *intf) in mt76u_init() argument
1126 return __mt76u_init(dev, intf, &bus_ops); in mt76u_init()