Lines Matching refs:urb

457 static int rh_call_control (struct usb_hcd *hcd, struct urb *urb)  in rh_call_control()  argument
461 u8 *ubuf = urb->transfer_buffer; in rh_call_control()
473 status = usb_hcd_link_urb_to_ep(hcd, urb); in rh_call_control()
477 urb->hcpriv = hcd; /* Indicate it's queued */ in rh_call_control()
479 cmd = (struct usb_ctrlrequest *) urb->setup_packet; in rh_call_control()
485 if (wLength > urb->transfer_buffer_length) in rh_call_control()
502 urb->actual_length = 0; in rh_call_control()
597 urb->actual_length = rh_string(wValue & 0xff, in rh_call_control()
686 if (urb->transfer_buffer_length < len) in rh_call_control()
687 len = urb->transfer_buffer_length; in rh_call_control()
688 urb->actual_length = len; in rh_call_control()
712 usb_hcd_unlink_urb_from_ep(hcd, urb); in rh_call_control()
713 usb_hcd_giveback_urb(hcd, urb, status); in rh_call_control()
729 struct urb *urb; in usb_hcd_poll_rh_status() local
745 urb = hcd->status_urb; in usb_hcd_poll_rh_status()
746 if (urb) { in usb_hcd_poll_rh_status()
749 if (urb->transfer_buffer_length >= length) { in usb_hcd_poll_rh_status()
753 length = urb->transfer_buffer_length; in usb_hcd_poll_rh_status()
755 urb->actual_length = length; in usb_hcd_poll_rh_status()
756 memcpy(urb->transfer_buffer, buffer, length); in usb_hcd_poll_rh_status()
758 usb_hcd_unlink_urb_from_ep(hcd, urb); in usb_hcd_poll_rh_status()
759 usb_hcd_giveback_urb(hcd, urb, status); in usb_hcd_poll_rh_status()
787 static int rh_queue_status (struct usb_hcd *hcd, struct urb *urb) in rh_queue_status() argument
791 unsigned len = 1 + (urb->dev->maxchild / 8); in rh_queue_status()
794 if (hcd->status_urb || urb->transfer_buffer_length < len) { in rh_queue_status()
800 retval = usb_hcd_link_urb_to_ep(hcd, urb); in rh_queue_status()
804 hcd->status_urb = urb; in rh_queue_status()
805 urb->hcpriv = hcd; /* indicate it's queued */ in rh_queue_status()
818 static int rh_urb_enqueue (struct usb_hcd *hcd, struct urb *urb) in rh_urb_enqueue() argument
820 if (usb_endpoint_xfer_int(&urb->ep->desc)) in rh_urb_enqueue()
821 return rh_queue_status (hcd, urb); in rh_urb_enqueue()
822 if (usb_endpoint_xfer_control(&urb->ep->desc)) in rh_urb_enqueue()
823 return rh_call_control (hcd, urb); in rh_urb_enqueue()
832 static int usb_rh_urb_dequeue(struct usb_hcd *hcd, struct urb *urb, int status) in usb_rh_urb_dequeue() argument
838 rc = usb_hcd_check_unlink_urb(hcd, urb, status); in usb_rh_urb_dequeue()
842 if (usb_endpoint_num(&urb->ep->desc) == 0) { /* Control URB */ in usb_rh_urb_dequeue()
848 if (urb == hcd->status_urb) { in usb_rh_urb_dequeue()
850 usb_hcd_unlink_urb_from_ep(hcd, urb); in usb_rh_urb_dequeue()
851 usb_hcd_giveback_urb(hcd, urb, status); in usb_rh_urb_dequeue()
1132 int usb_hcd_link_urb_to_ep(struct usb_hcd *hcd, struct urb *urb) in usb_hcd_link_urb_to_ep() argument
1139 if (unlikely(atomic_read(&urb->reject))) { in usb_hcd_link_urb_to_ep()
1144 if (unlikely(!urb->ep->enabled)) { in usb_hcd_link_urb_to_ep()
1149 if (unlikely(!urb->dev->can_submit)) { in usb_hcd_link_urb_to_ep()
1159 urb->unlinked = 0; in usb_hcd_link_urb_to_ep()
1160 list_add_tail(&urb->urb_list, &urb->ep->urb_list); in usb_hcd_link_urb_to_ep()
1190 int usb_hcd_check_unlink_urb(struct usb_hcd *hcd, struct urb *urb, in usb_hcd_check_unlink_urb() argument
1196 list_for_each(tmp, &urb->ep->urb_list) { in usb_hcd_check_unlink_urb()
1197 if (tmp == &urb->urb_list) in usb_hcd_check_unlink_urb()
1200 if (tmp != &urb->urb_list) in usb_hcd_check_unlink_urb()
1206 if (urb->unlinked) in usb_hcd_check_unlink_urb()
1208 urb->unlinked = status; in usb_hcd_check_unlink_urb()
1223 void usb_hcd_unlink_urb_from_ep(struct usb_hcd *hcd, struct urb *urb) in usb_hcd_unlink_urb_from_ep() argument
1227 list_del_init(&urb->urb_list); in usb_hcd_unlink_urb_from_ep()
1311 void usb_hcd_unmap_urb_setup_for_dma(struct usb_hcd *hcd, struct urb *urb) in usb_hcd_unmap_urb_setup_for_dma() argument
1314 (urb->transfer_flags & URB_SETUP_MAP_SINGLE)) in usb_hcd_unmap_urb_setup_for_dma()
1316 urb->setup_dma, in usb_hcd_unmap_urb_setup_for_dma()
1319 else if (urb->transfer_flags & URB_SETUP_MAP_LOCAL) in usb_hcd_unmap_urb_setup_for_dma()
1320 hcd_free_coherent(urb->dev->bus, in usb_hcd_unmap_urb_setup_for_dma()
1321 &urb->setup_dma, in usb_hcd_unmap_urb_setup_for_dma()
1322 (void **) &urb->setup_packet, in usb_hcd_unmap_urb_setup_for_dma()
1327 urb->transfer_flags &= ~(URB_SETUP_MAP_SINGLE | URB_SETUP_MAP_LOCAL); in usb_hcd_unmap_urb_setup_for_dma()
1331 static void unmap_urb_for_dma(struct usb_hcd *hcd, struct urb *urb) in unmap_urb_for_dma() argument
1334 hcd->driver->unmap_urb_for_dma(hcd, urb); in unmap_urb_for_dma()
1336 usb_hcd_unmap_urb_for_dma(hcd, urb); in unmap_urb_for_dma()
1339 void usb_hcd_unmap_urb_for_dma(struct usb_hcd *hcd, struct urb *urb) in usb_hcd_unmap_urb_for_dma() argument
1343 usb_hcd_unmap_urb_setup_for_dma(hcd, urb); in usb_hcd_unmap_urb_for_dma()
1345 dir = usb_urb_dir_in(urb) ? DMA_FROM_DEVICE : DMA_TO_DEVICE; in usb_hcd_unmap_urb_for_dma()
1347 (urb->transfer_flags & URB_DMA_MAP_SG)) in usb_hcd_unmap_urb_for_dma()
1349 urb->sg, in usb_hcd_unmap_urb_for_dma()
1350 urb->num_sgs, in usb_hcd_unmap_urb_for_dma()
1353 (urb->transfer_flags & URB_DMA_MAP_PAGE)) in usb_hcd_unmap_urb_for_dma()
1355 urb->transfer_dma, in usb_hcd_unmap_urb_for_dma()
1356 urb->transfer_buffer_length, in usb_hcd_unmap_urb_for_dma()
1359 (urb->transfer_flags & URB_DMA_MAP_SINGLE)) in usb_hcd_unmap_urb_for_dma()
1361 urb->transfer_dma, in usb_hcd_unmap_urb_for_dma()
1362 urb->transfer_buffer_length, in usb_hcd_unmap_urb_for_dma()
1364 else if (urb->transfer_flags & URB_MAP_LOCAL) in usb_hcd_unmap_urb_for_dma()
1365 hcd_free_coherent(urb->dev->bus, in usb_hcd_unmap_urb_for_dma()
1366 &urb->transfer_dma, in usb_hcd_unmap_urb_for_dma()
1367 &urb->transfer_buffer, in usb_hcd_unmap_urb_for_dma()
1368 urb->transfer_buffer_length, in usb_hcd_unmap_urb_for_dma()
1372 urb->transfer_flags &= ~(URB_DMA_MAP_SG | URB_DMA_MAP_PAGE | in usb_hcd_unmap_urb_for_dma()
1377 static int map_urb_for_dma(struct usb_hcd *hcd, struct urb *urb, in map_urb_for_dma() argument
1381 return hcd->driver->map_urb_for_dma(hcd, urb, mem_flags); in map_urb_for_dma()
1383 return usb_hcd_map_urb_for_dma(hcd, urb, mem_flags); in map_urb_for_dma()
1386 int usb_hcd_map_urb_for_dma(struct usb_hcd *hcd, struct urb *urb, in usb_hcd_map_urb_for_dma() argument
1398 if (usb_endpoint_xfer_control(&urb->ep->desc)) { in usb_hcd_map_urb_for_dma()
1403 urb->dev->bus, mem_flags, in usb_hcd_map_urb_for_dma()
1404 &urb->setup_dma, in usb_hcd_map_urb_for_dma()
1405 (void **)&urb->setup_packet, in usb_hcd_map_urb_for_dma()
1410 urb->transfer_flags |= URB_SETUP_MAP_LOCAL; in usb_hcd_map_urb_for_dma()
1412 if (object_is_on_stack(urb->setup_packet)) { in usb_hcd_map_urb_for_dma()
1417 urb->setup_dma = dma_map_single( in usb_hcd_map_urb_for_dma()
1419 urb->setup_packet, in usb_hcd_map_urb_for_dma()
1423 urb->setup_dma)) in usb_hcd_map_urb_for_dma()
1425 urb->transfer_flags |= URB_SETUP_MAP_SINGLE; in usb_hcd_map_urb_for_dma()
1429 dir = usb_urb_dir_in(urb) ? DMA_FROM_DEVICE : DMA_TO_DEVICE; in usb_hcd_map_urb_for_dma()
1430 if (urb->transfer_buffer_length != 0 in usb_hcd_map_urb_for_dma()
1431 && !(urb->transfer_flags & URB_NO_TRANSFER_DMA_MAP)) { in usb_hcd_map_urb_for_dma()
1434 urb->dev->bus, mem_flags, in usb_hcd_map_urb_for_dma()
1435 &urb->transfer_dma, in usb_hcd_map_urb_for_dma()
1436 &urb->transfer_buffer, in usb_hcd_map_urb_for_dma()
1437 urb->transfer_buffer_length, in usb_hcd_map_urb_for_dma()
1440 urb->transfer_flags |= URB_MAP_LOCAL; in usb_hcd_map_urb_for_dma()
1442 if (urb->num_sgs) { in usb_hcd_map_urb_for_dma()
1446 if (usb_endpoint_xfer_isoc(&urb->ep->desc)) { in usb_hcd_map_urb_for_dma()
1453 urb->sg, in usb_hcd_map_urb_for_dma()
1454 urb->num_sgs, in usb_hcd_map_urb_for_dma()
1459 urb->transfer_flags |= URB_DMA_MAP_SG; in usb_hcd_map_urb_for_dma()
1460 urb->num_mapped_sgs = n; in usb_hcd_map_urb_for_dma()
1461 if (n != urb->num_sgs) in usb_hcd_map_urb_for_dma()
1462 urb->transfer_flags |= in usb_hcd_map_urb_for_dma()
1464 } else if (urb->sg) { in usb_hcd_map_urb_for_dma()
1465 struct scatterlist *sg = urb->sg; in usb_hcd_map_urb_for_dma()
1466 urb->transfer_dma = dma_map_page( in usb_hcd_map_urb_for_dma()
1470 urb->transfer_buffer_length, in usb_hcd_map_urb_for_dma()
1473 urb->transfer_dma)) in usb_hcd_map_urb_for_dma()
1476 urb->transfer_flags |= URB_DMA_MAP_PAGE; in usb_hcd_map_urb_for_dma()
1477 } else if (object_is_on_stack(urb->transfer_buffer)) { in usb_hcd_map_urb_for_dma()
1481 urb->transfer_dma = dma_map_single( in usb_hcd_map_urb_for_dma()
1483 urb->transfer_buffer, in usb_hcd_map_urb_for_dma()
1484 urb->transfer_buffer_length, in usb_hcd_map_urb_for_dma()
1487 urb->transfer_dma)) in usb_hcd_map_urb_for_dma()
1490 urb->transfer_flags |= URB_DMA_MAP_SINGLE; in usb_hcd_map_urb_for_dma()
1493 if (ret && (urb->transfer_flags & (URB_SETUP_MAP_SINGLE | in usb_hcd_map_urb_for_dma()
1495 usb_hcd_unmap_urb_for_dma(hcd, urb); in usb_hcd_map_urb_for_dma()
1508 int usb_hcd_submit_urb (struct urb *urb, gfp_t mem_flags) in usb_hcd_submit_urb() argument
1511 struct usb_hcd *hcd = bus_to_hcd(urb->dev->bus); in usb_hcd_submit_urb()
1517 usb_get_urb(urb); in usb_hcd_submit_urb()
1518 atomic_inc(&urb->use_count); in usb_hcd_submit_urb()
1519 atomic_inc(&urb->dev->urbnum); in usb_hcd_submit_urb()
1520 usbmon_urb_submit(&hcd->self, urb); in usb_hcd_submit_urb()
1530 if (is_root_hub(urb->dev)) { in usb_hcd_submit_urb()
1531 status = rh_urb_enqueue(hcd, urb); in usb_hcd_submit_urb()
1533 status = map_urb_for_dma(hcd, urb, mem_flags); in usb_hcd_submit_urb()
1535 status = hcd->driver->urb_enqueue(hcd, urb, mem_flags); in usb_hcd_submit_urb()
1537 unmap_urb_for_dma(hcd, urb); in usb_hcd_submit_urb()
1542 usbmon_urb_submit_error(&hcd->self, urb, status); in usb_hcd_submit_urb()
1543 urb->hcpriv = NULL; in usb_hcd_submit_urb()
1544 INIT_LIST_HEAD(&urb->urb_list); in usb_hcd_submit_urb()
1545 atomic_dec(&urb->use_count); in usb_hcd_submit_urb()
1553 atomic_dec(&urb->dev->urbnum); in usb_hcd_submit_urb()
1554 if (atomic_read(&urb->reject)) in usb_hcd_submit_urb()
1556 usb_put_urb(urb); in usb_hcd_submit_urb()
1568 static int unlink1(struct usb_hcd *hcd, struct urb *urb, int status) in unlink1() argument
1572 if (is_root_hub(urb->dev)) in unlink1()
1573 value = usb_rh_urb_dequeue(hcd, urb, status); in unlink1()
1579 value = hcd->driver->urb_dequeue(hcd, urb, status); in unlink1()
1590 int usb_hcd_unlink_urb (struct urb *urb, int status) in usb_hcd_unlink_urb() argument
1593 struct usb_device *udev = urb->dev; in usb_hcd_unlink_urb()
1603 if (atomic_read(&urb->use_count) > 0) { in usb_hcd_unlink_urb()
1609 hcd = bus_to_hcd(urb->dev->bus); in usb_hcd_unlink_urb()
1610 retval = unlink1(hcd, urb, status); in usb_hcd_unlink_urb()
1615 urb, retval); in usb_hcd_unlink_urb()
1623 static void __usb_hcd_giveback_urb(struct urb *urb) in __usb_hcd_giveback_urb() argument
1625 struct usb_hcd *hcd = bus_to_hcd(urb->dev->bus); in __usb_hcd_giveback_urb()
1626 struct usb_anchor *anchor = urb->anchor; in __usb_hcd_giveback_urb()
1627 int status = urb->unlinked; in __usb_hcd_giveback_urb()
1629 urb->hcpriv = NULL; in __usb_hcd_giveback_urb()
1630 if (unlikely((urb->transfer_flags & URB_SHORT_NOT_OK) && in __usb_hcd_giveback_urb()
1631 urb->actual_length < urb->transfer_buffer_length && in __usb_hcd_giveback_urb()
1635 unmap_urb_for_dma(hcd, urb); in __usb_hcd_giveback_urb()
1636 usbmon_urb_complete(&hcd->self, urb, status); in __usb_hcd_giveback_urb()
1638 usb_unanchor_urb(urb); in __usb_hcd_giveback_urb()
1643 urb->status = status; in __usb_hcd_giveback_urb()
1649 kcov_remote_start_usb_softirq((u64)urb->dev->bus->busnum); in __usb_hcd_giveback_urb()
1650 urb->complete(urb); in __usb_hcd_giveback_urb()
1654 atomic_dec(&urb->use_count); in __usb_hcd_giveback_urb()
1662 if (unlikely(atomic_read(&urb->reject))) in __usb_hcd_giveback_urb()
1664 usb_put_urb(urb); in __usb_hcd_giveback_urb()
1678 struct urb *urb; in usb_giveback_urb_bh() local
1680 urb = list_entry(local_list.next, struct urb, urb_list); in usb_giveback_urb_bh()
1681 list_del_init(&urb->urb_list); in usb_giveback_urb_bh()
1682 bh->completing_ep = urb->ep; in usb_giveback_urb_bh()
1683 __usb_hcd_giveback_urb(urb); in usb_giveback_urb_bh()
1723 void usb_hcd_giveback_urb(struct usb_hcd *hcd, struct urb *urb, int status) in usb_hcd_giveback_urb() argument
1729 if (likely(!urb->unlinked)) in usb_hcd_giveback_urb()
1730 urb->unlinked = status; in usb_hcd_giveback_urb()
1732 if (!hcd_giveback_urb_in_bh(hcd) && !is_root_hub(urb->dev)) { in usb_hcd_giveback_urb()
1733 __usb_hcd_giveback_urb(urb); in usb_hcd_giveback_urb()
1737 if (usb_pipeisoc(urb->pipe) || usb_pipeint(urb->pipe)) in usb_hcd_giveback_urb()
1743 list_add_tail(&urb->urb_list, &bh->head); in usb_hcd_giveback_urb()
1766 struct urb *urb; in usb_hcd_flush_endpoint() local
1776 list_for_each_entry_reverse(urb, &ep->urb_list, urb_list) { in usb_hcd_flush_endpoint()
1779 if (urb->unlinked) in usb_hcd_flush_endpoint()
1781 usb_get_urb (urb); in usb_hcd_flush_endpoint()
1782 is_in = usb_urb_dir_in(urb); in usb_hcd_flush_endpoint()
1786 unlink1(hcd, urb, -ESHUTDOWN); in usb_hcd_flush_endpoint()
1789 urb, usb_endpoint_num(&ep->desc), in usb_hcd_flush_endpoint()
1792 usb_put_urb (urb); in usb_hcd_flush_endpoint()
1805 urb = NULL; in usb_hcd_flush_endpoint()
1807 urb = list_entry (ep->urb_list.prev, struct urb, in usb_hcd_flush_endpoint()
1809 usb_get_urb (urb); in usb_hcd_flush_endpoint()
1813 if (urb) { in usb_hcd_flush_endpoint()
1814 usb_kill_urb (urb); in usb_hcd_flush_endpoint()
1815 usb_put_urb (urb); in usb_hcd_flush_endpoint()
2120 static void usb_ehset_completion(struct urb *urb) in usb_ehset_completion() argument
2122 struct completion *done = urb->context; in usb_ehset_completion()
2132 static struct urb *request_single_step_set_feature_urb( in request_single_step_set_feature_urb()
2138 struct urb *urb; in request_single_step_set_feature_urb() local
2141 urb = usb_alloc_urb(0, GFP_KERNEL); in request_single_step_set_feature_urb()
2142 if (!urb) in request_single_step_set_feature_urb()
2145 urb->pipe = usb_rcvctrlpipe(udev, 0); in request_single_step_set_feature_urb()
2147 urb->ep = &udev->ep0; in request_single_step_set_feature_urb()
2148 urb->dev = udev; in request_single_step_set_feature_urb()
2149 urb->setup_packet = (void *)dr; in request_single_step_set_feature_urb()
2150 urb->transfer_buffer = buf; in request_single_step_set_feature_urb()
2151 urb->transfer_buffer_length = USB_DT_DEVICE_SIZE; in request_single_step_set_feature_urb()
2152 urb->complete = usb_ehset_completion; in request_single_step_set_feature_urb()
2153 urb->status = -EINPROGRESS; in request_single_step_set_feature_urb()
2154 urb->actual_length = 0; in request_single_step_set_feature_urb()
2155 urb->transfer_flags = URB_DIR_IN; in request_single_step_set_feature_urb()
2156 usb_get_urb(urb); in request_single_step_set_feature_urb()
2157 atomic_inc(&urb->use_count); in request_single_step_set_feature_urb()
2158 atomic_inc(&urb->dev->urbnum); in request_single_step_set_feature_urb()
2159 if (map_urb_for_dma(hcd, urb, GFP_KERNEL)) { in request_single_step_set_feature_urb()
2160 usb_put_urb(urb); in request_single_step_set_feature_urb()
2161 usb_free_urb(urb); in request_single_step_set_feature_urb()
2165 urb->context = done; in request_single_step_set_feature_urb()
2166 return urb; in request_single_step_set_feature_urb()
2173 struct urb *urb; in ehset_single_step_set_feature() local
2200 urb = request_single_step_set_feature_urb(udev, dr, buf, &done); in ehset_single_step_set_feature()
2201 if (!urb) in ehset_single_step_set_feature()
2205 retval = hcd->driver->submit_single_step_set_feature(hcd, urb, 1); in ehset_single_step_set_feature()
2209 usb_kill_urb(urb); in ehset_single_step_set_feature()
2218 urb->status = -EINPROGRESS; in ehset_single_step_set_feature()
2219 usb_get_urb(urb); in ehset_single_step_set_feature()
2220 atomic_inc(&urb->use_count); in ehset_single_step_set_feature()
2221 atomic_inc(&urb->dev->urbnum); in ehset_single_step_set_feature()
2222 retval = hcd->driver->submit_single_step_set_feature(hcd, urb, 0); in ehset_single_step_set_feature()
2225 usb_kill_urb(urb); in ehset_single_step_set_feature()
2231 usb_free_urb(urb); in ehset_single_step_set_feature()