Searched refs:ud (Results 1 - 25 of 35) sorted by relevance

12

/drivers/staging/usbip/
H A Dusbip_event.c25 static int event_handler(struct usbip_device *ud) argument
32 while (usbip_event_happened(ud)) {
33 usbip_dbg_eh("pending event %lx\n", ud->event);
39 if (ud->event & USBIP_EH_SHUTDOWN) {
40 ud->eh_ops.shutdown(ud);
41 ud->event &= ~USBIP_EH_SHUTDOWN;
45 if (ud->event & USBIP_EH_RESET) {
46 ud->eh_ops.reset(ud);
66 struct usbip_device *ud = data; local
81 usbip_start_eh(struct usbip_device *ud) argument
96 usbip_stop_eh(struct usbip_device *ud) argument
106 usbip_event_add(struct usbip_device *ud, unsigned long event) argument
115 usbip_event_happened(struct usbip_device *ud) argument
[all...]
H A Dstub_dev.c69 spin_lock(&sdev->ud.lock);
70 status = sdev->ud.status;
71 spin_unlock(&sdev->ud.lock);
99 spin_lock(&sdev->ud.lock);
101 if (sdev->ud.status != SDEV_ST_AVAILABLE) {
103 spin_unlock(&sdev->ud.lock);
109 spin_unlock(&sdev->ud.lock);
112 sdev->ud.tcp_socket = socket;
114 spin_unlock(&sdev->ud.lock);
116 sdev->ud
173 stub_shutdown_connection(struct usbip_device *ud) argument
228 stub_device_reset(struct usbip_device *ud) argument
260 stub_device_unusable(struct usbip_device *ud) argument
[all...]
H A Dvhci_sysfs.c55 spin_lock(&vdev->ud.lock);
56 out += sprintf(out, "%03u %03u ", i, vdev->ud.status);
58 if (vdev->ud.status == VDEV_ST_USED) {
61 out += sprintf(out, "%16p ", vdev->ud.tcp_socket);
69 spin_unlock(&vdev->ud.lock);
90 spin_lock(&vdev->ud.lock);
91 if (vdev->ud.status == VDEV_ST_NULL) {
92 pr_err("not connected %d\n", vdev->ud.status);
95 spin_unlock(&vdev->ud.lock);
102 spin_unlock(&vdev->ud
[all...]
H A Dvhci_rx.c69 struct usbip_device *ud = &vdev->ud; local
81 usbip_event_add(ud, VDEV_EVENT_ERROR_TCP);
89 if (usbip_recv_xbuff(ud, urb) < 0)
93 if (usbip_recv_iso(ud, urb) < 0)
97 usbip_pad_iso(ud, urb);
197 static void vhci_rx_pdu(struct usbip_device *ud) argument
201 struct vhci_device *vdev = container_of(ud, struct vhci_device, ud);
208 ret = usbip_recv(ud
258 struct usbip_device *ud = data; local
[all...]
H A Dstub_rx.c307 struct usbip_device *ud = &sdev->ud; local
311 spin_lock(&ud->lock);
312 if (ud->status == SDEV_ST_USED) {
316 spin_unlock(&ud->lock);
326 struct usbip_device *ud = &sdev->ud; local
335 usbip_event_add(ud, SDEV_EVENT_ERROR_MALLOC);
462 struct usbip_device *ud = &sdev->ud; local
546 stub_rx_pdu(struct usbip_device *ud) argument
595 struct usbip_device *ud = data; local
[all...]
H A Dvhci_hcd.c143 /* spin_lock(&the_controller->vdev[rhport].ud.lock);
144 * the_controller->vdev[rhport].ud.status = VDEV_CONNECT;
145 * spin_unlock(&the_controller->vdev[rhport].ud.lock); */
165 * spin_lock(&vdev->ud.lock);
166 * vdev->ud.status = VHC_ST_DISCONNECT;
167 * spin_unlock(&vdev->ud.lock); */
380 if (dum->vdev[rhport].ud.status ==
385 dum->vdev[rhport].ud.status);
487 usbip_event_add(&vdev->ud, VDEV_EVENT_ERROR_MALLOC);
531 spin_lock(&vdev->ud
813 vhci_shutdown_connection(struct usbip_device *ud) argument
866 vhci_device_reset(struct usbip_device *ud) argument
885 vhci_device_unusable(struct usbip_device *ud) argument
[all...]
H A Dstub_tx.c46 usbip_event_add(&sdev->ud, VDEV_EVENT_ERROR_MALLOC);
185 usbip_event_add(&sdev->ud, SDEV_EVENT_ERROR_MALLOC);
238 usbip_event_add(&sdev->ud,
250 usbip_event_add(&sdev->ud,
262 ret = kernel_sendmsg(sdev->ud.tcp_socket, &msg,
270 usbip_event_add(&sdev->ud, SDEV_EVENT_ERROR_TCP);
337 ret = kernel_sendmsg(sdev->ud.tcp_socket, &msg, iov,
343 usbip_event_add(&sdev->ud, SDEV_EVENT_ERROR_TCP);
365 struct usbip_device *ud = data; local
366 struct stub_device *sdev = container_of(ud, struc
[all...]
H A Dvhci_tx.c109 usbip_event_add(&vdev->ud,
119 ret = kernel_sendmsg(vdev->ud.tcp_socket, &msg, iov, 3, txsize);
124 usbip_event_add(&vdev->ud, VDEV_EVENT_ERROR_TCP);
189 ret = kernel_sendmsg(vdev->ud.tcp_socket, &msg, iov, 1, txsize);
193 usbip_event_add(&vdev->ud, VDEV_EVENT_ERROR_TCP);
207 struct usbip_device *ud = data; local
208 struct vhci_device *vdev = container_of(ud, struct vhci_device, ud);
H A Dusbip_common.h308 int usbip_recv_iso(struct usbip_device *ud, struct urb *urb);
309 void usbip_pad_iso(struct usbip_device *ud, struct urb *urb);
310 int usbip_recv_xbuff(struct usbip_device *ud, struct urb *urb);
313 int usbip_start_eh(struct usbip_device *ud);
314 void usbip_stop_eh(struct usbip_device *ud);
315 void usbip_event_add(struct usbip_device *ud, unsigned long event);
316 int usbip_event_happened(struct usbip_device *ud);
H A Dusbip_common.c665 int usbip_recv_iso(struct usbip_device *ud, struct urb *urb) argument
689 ret = usbip_recv(ud->tcp_socket, buff, size);
695 if (ud->side == USBIP_STUB)
696 usbip_event_add(ud, SDEV_EVENT_ERROR_TCP);
698 usbip_event_add(ud, VDEV_EVENT_ERROR_TCP);
719 if (ud->side == USBIP_STUB)
720 usbip_event_add(ud, SDEV_EVENT_ERROR_TCP);
722 usbip_event_add(ud, VDEV_EVENT_ERROR_TCP);
738 void usbip_pad_iso(struct usbip_device *ud, struct urb *urb) argument
772 int usbip_recv_xbuff(struct usbip_device *ud, struc argument
[all...]
H A Dstub.h39 struct usbip_device ud; member in struct:stub_device
H A Dvhci.h38 struct usbip_device ud; member in struct:vhci_device
/drivers/net/wireless/ath/ath5k/
H A Ddesc.c89 tx_ctl = &desc->ud.ds_tx5210.tx_ctl;
109 memset(&desc->ud.ds_tx5210, 0, sizeof(struct ath5k_hw_5210_tx_desc));
260 tx_ctl = &desc->ud.ds_tx5212.tx_ctl;
284 memset(&desc->ud.ds_tx5212.tx_stat, 0,
285 sizeof(desc->ud.ds_tx5212.tx_stat));
404 tx_ctl = &desc->ud.ds_tx5212.tx_ctl;
447 tx_ctl = &desc->ud.ds_tx5210.tx_ctl;
448 tx_status = &desc->ud.ds_tx5210.tx_stat;
502 tx_ctl = &desc->ud.ds_tx5212.tx_ctl;
503 tx_status = &desc->ud
[all...]
H A Ddesc.h345 * @ud: Union containing hw_5xxx_tx_desc structs and hw_all_rx_desc
357 } ud; member in struct:ath5k_desc
/drivers/infiniband/hw/ipath/
H A Dipath_ud.c68 qp = ipath_lookup_qpn(&dev->qp_table, swqe->wr.wr.ud.remote_qpn);
80 ((int) swqe->wr.wr.ud.remote_qkey < 0 ?
81 sqp->qkey : swqe->wr.wr.ud.remote_qkey) != qp->qkey)) {
178 ah_attr = &to_iah(swqe->wr.wr.ud.ah)->attr;
283 ah_attr = &to_iah(wqe->wr.wr.ud.ah)->attr;
345 ohdr->u.ud.imm_data = wqe->wr.ex.imm_data;
375 cpu_to_be32(wqe->wr.wr.ud.remote_qpn);
381 ohdr->u.ud.deth[0] = cpu_to_be32((int)wqe->wr.wr.ud.remote_qkey < 0 ?
382 qp->qkey : wqe->wr.wr.ud
[all...]
/drivers/infiniband/hw/qib/
H A Dqib_ud.c61 qp = qib_lookup_qpn(ibp, swqe->wr.wr.ud.remote_qpn);
72 ah_attr = &to_iah(swqe->wr.wr.ud.ah)->attr;
102 qkey = (int)swqe->wr.wr.ud.remote_qkey < 0 ?
103 sqp->qkey : swqe->wr.wr.ud.remote_qkey;
210 swqe->wr.wr.ud.pkey_index : 0;
277 ah_attr = &to_iah(wqe->wr.wr.ud.ah)->attr;
340 ohdr->u.ud.imm_data = wqe->wr.ex.imm_data;
363 wqe->wr.wr.ud.pkey_index : qp->s_pkey_index);
371 cpu_to_be32(wqe->wr.wr.ud.remote_qpn);
377 ohdr->u.ud
[all...]
/drivers/infiniband/hw/ehca/
H A Dehca_reqs.c118 struct ib_mad_hdr *mad_hdr = send_wr->wr.ud.mad_hdr;
163 u32 remote_qkey = send_wr->wr.ud.remote_qkey;
226 if (send_wr->wr.ud.remote_qkey & 0x80000000)
229 wqe_p->destination_qp_number = send_wr->wr.ud.remote_qpn << 8;
231 if (unlikely(!send_wr->wr.ud.ah)) {
232 ehca_gen_err("wr.ud.ah is NULL. qp=%p", qp);
235 if (unlikely(send_wr->wr.ud.remote_qpn == 0)) {
239 my_av = container_of(send_wr->wr.ud.ah, struct ehca_av, ib_ah);
258 wqe_p->pkeyi = send_wr->wr.ud.pkey_index;
/drivers/infiniband/core/
H A Dagent.c124 mad_send_wr->send_wr.wr.ud.port_num = port_num;
H A Dcma.c1211 event.param.ud.private_data = ib_event->private_data + offset;
1212 event.param.ud.private_data_len =
2460 event.param.ud.private_data = ib_event->private_data;
2461 event.param.ud.private_data_len = IB_CM_SIDR_REP_PRIVATE_DATA_SIZE;
2480 &event.param.ud.ah_attr);
2481 event.param.ud.qp_num = rep->qpn;
2482 event.param.ud.qkey = rep->qkey;
2934 event.param.ud.private_data = mc->context;
2939 &event.param.ud.ah_attr);
2940 event.param.ud
[all...]
H A Ducma.c254 event->param.ud.private_data;
281 ucma_copy_ud_event(&uevent->resp.param.ud, &event->param.ud);
H A Duverbs_cmd.c1982 next->wr.ud.ah = idr_read_ah(user_wr->wr.ud.ah,
1984 if (!next->wr.ud.ah) {
1988 next->wr.ud.remote_qpn = user_wr->wr.ud.remote_qpn;
1989 next->wr.ud.remote_qkey = user_wr->wr.ud.remote_qkey;
2057 if (is_ud && wr->wr.ud.ah)
2058 put_ah_read(wr->wr.ud.ah);
/drivers/infiniband/hw/mlx4/
H A Dqp.c1356 struct mlx4_ib_ah *ah = to_mah(wr->wr.ud.ah);
1452 ib_get_cached_pkey(ib_dev, sqp->qp.port, wr->wr.ud.pkey_index, &pkey);
1454 sqp->ud_header.bth.destination_qpn = cpu_to_be32(wr->wr.ud.remote_qpn);
1456 sqp->ud_header.deth.qkey = cpu_to_be32(wr->wr.ud.remote_qkey & 0x80000000 ?
1457 sqp->qkey : wr->wr.ud.remote_qkey);
1606 memcpy(dseg->av, &to_mah(wr->wr.ud.ah)->av, sizeof (struct mlx4_av));
1607 dseg->dqpn = cpu_to_be32(wr->wr.ud.remote_qpn);
1608 dseg->qkey = cpu_to_be32(wr->wr.ud.remote_qkey);
1609 dseg->vlan = to_mah(wr->wr.ud.ah)->av.eth.vlan;
1610 memcpy(dseg->mac, to_mah(wr->wr.ud
[all...]
/drivers/staging/crystalhd/
H A Dcrystalhd_lnx.c112 static inline int crystalhd_user_data(unsigned long ud, void *dr, int size, int set) argument
116 if (!ud || !dr) {
122 rc = copy_to_user((void *)ud, dr, size);
124 rc = copy_from_user(dr, (void *)ud, size);
/drivers/infiniband/hw/mthca/
H A Dmthca_qp.c1485 mthca_ah_grh_present(to_mah(wr->wr.ud.ah)), 0,
1488 err = mthca_read_ah(dev, to_mah(wr->wr.ud.ah), &sqp->ud_header);
1522 wr->wr.ud.pkey_index, &pkey);
1524 sqp->ud_header.bth.destination_qpn = cpu_to_be32(wr->wr.ud.remote_qpn);
1526 sqp->ud_header.deth.qkey = cpu_to_be32(wr->wr.ud.remote_qkey & 0x80000000 ?
1527 sqp->qkey : wr->wr.ud.remote_qkey);
1584 useg->lkey = cpu_to_be32(to_mah(wr->wr.ud.ah)->key);
1585 useg->av_addr = cpu_to_be64(to_mah(wr->wr.ud.ah)->avdma);
1586 useg->dqpn = cpu_to_be32(wr->wr.ud.remote_qpn);
1587 useg->qkey = cpu_to_be32(wr->wr.ud
[all...]
/drivers/infiniband/ulp/ipoib/
H A Dipoib_ib.c524 priv->tx_wr.wr.ud.remote_qpn = qpn;
525 priv->tx_wr.wr.ud.ah = address;
528 priv->tx_wr.wr.ud.mss = skb_shinfo(skb)->gso_size;
529 priv->tx_wr.wr.ud.header = head;
530 priv->tx_wr.wr.ud.hlen = hlen;

Completed in 1054 milliseconds

12