mctp_usb
static int mctp_usb_rx_queue(struct mctp_usb *mctp_usb, gfp_t gfp)
skb = __netdev_alloc_skb(mctp_usb->netdev, MCTP_USB_XFER_SIZE, gfp);
usb_fill_bulk_urb(mctp_usb->rx_urb, mctp_usb->usbdev,
usb_rcvbulkpipe(mctp_usb->usbdev, mctp_usb->ep_in),
rc = usb_submit_urb(mctp_usb->rx_urb, gfp);
netdev_dbg(mctp_usb->netdev, "rx urb submit failure: %d\n", rc);
schedule_delayed_work(&mctp_usb->rx_retry_work, RX_RETRY_DELAY);
struct mctp_usb *mctp_usb = netdev_priv(netdev);
mctp_usb_rx_queue(mctp_usb, GFP_ATOMIC);
struct mctp_usb *mctp_usb = container_of(work, struct mctp_usb,
if (READ_ONCE(mctp_usb->stopped))
mctp_usb_rx_queue(mctp_usb, GFP_KERNEL);
struct mctp_usb *mctp_usb = netdev_priv(dev);
WRITE_ONCE(mctp_usb->stopped, false);
return mctp_usb_rx_queue(mctp_usb, GFP_KERNEL);
struct mctp_usb *mctp_usb = netdev_priv(dev);
WRITE_ONCE(mctp_usb->stopped, true);
usb_kill_urb(mctp_usb->rx_urb);
usb_kill_urb(mctp_usb->tx_urb);
cancel_delayed_work_sync(&mctp_usb->rx_retry_work);
struct mctp_usb *dev;
struct mctp_usb *dev = usb_get_intfdata(intf);
struct mctp_usb *mctp_usb = netdev_priv(dev);
urb = mctp_usb->tx_urb;
usb_fill_bulk_urb(urb, mctp_usb->usbdev,
usb_sndbulkpipe(mctp_usb->usbdev, mctp_usb->ep_out),