Lines Matching refs:iu

209 	struct srp_iu *iu;
211 iu = kmalloc(sizeof *iu, gfp_mask);
212 if (!iu)
215 iu->buf = kzalloc(size, gfp_mask);
216 if (!iu->buf)
219 iu->dma = ib_dma_map_single(host->srp_dev->dev, iu->buf, size,
221 if (ib_dma_mapping_error(host->srp_dev->dev, iu->dma))
224 iu->size = size;
225 iu->direction = direction;
227 return iu;
230 kfree(iu->buf);
232 kfree(iu);
237 static void srp_free_iu(struct srp_host *host, struct srp_iu *iu)
239 if (!iu)
242 ib_dma_unmap_single(host->srp_dev->dev, iu->dma, iu->size,
243 iu->direction);
244 kfree(iu->buf);
245 kfree(iu);
1521 static void srp_put_tx_iu(struct srp_target_port *target, struct srp_iu *iu,
1527 list_add(&iu->list, &target->free_tx);
1550 struct srp_iu *iu;
1567 iu = list_first_entry(&target->free_tx, struct srp_iu, list);
1568 list_del(&iu->list);
1569 return iu;
1573 struct srp_iu *iu, int len)
1578 list.addr = iu->dma;
1583 wr.wr_id = (uintptr_t) iu;
1592 static int srp_post_recv(struct srp_target_port *target, struct srp_iu *iu)
1597 list.addr = iu->dma;
1598 list.length = iu->size;
1602 wr.wr_id = (uintptr_t) iu;
1669 struct srp_iu *iu;
1674 iu = __srp_get_tx_iu(target, SRP_IU_RSP);
1677 if (!iu) {
1683 ib_dma_sync_single_for_cpu(dev, iu->dma, len, DMA_TO_DEVICE);
1684 memcpy(iu->buf, rsp, len);
1685 ib_dma_sync_single_for_device(dev, iu->dma, len, DMA_TO_DEVICE);
1687 err = srp_post_send(target, iu, len);
1691 srp_put_tx_iu(target, iu, SRP_IU_RSP);
1731 struct srp_iu *iu = (struct srp_iu *) (uintptr_t) wc->wr_id;
1735 ib_dma_sync_single_for_cpu(dev, iu->dma, target->max_ti_iu_len,
1738 opcode = *(u8 *) iu->buf;
1744 iu->buf, wc->byte_len, true);
1749 srp_process_rsp(target, iu->buf);
1753 srp_process_cred_req(target, iu->buf);
1757 srp_process_aer_req(target, iu->buf);
1772 ib_dma_sync_single_for_device(dev, iu->dma, target->max_ti_iu_len,
1775 res = srp_post_recv(target, iu);
1811 PFX "failed %s status %d for iu %p\n",
1839 struct srp_iu *iu;
1843 iu = (struct srp_iu *) (uintptr_t) wc.wr_id;
1844 list_add(&iu->list, &target->free_tx);
1856 struct srp_iu *iu;
1877 iu = __srp_get_tx_iu(target, SRP_IU_CMD);
1878 if (!iu)
1886 ib_dma_sync_single_for_cpu(dev, iu->dma, target->max_iu_len,
1891 cmd = iu->buf;
1900 req->cmd = iu;
1917 ib_dma_sync_single_for_device(dev, iu->dma, target->max_iu_len,
1920 if (srp_post_send(target, iu, len)) {
1937 srp_put_tx_iu(target, iu, SRP_IU_CMD);
2093 struct srp_iu *iu = target->rx_ring[i];
2094 ret = srp_post_recv(target, iu);
2317 struct srp_iu *iu;
2331 iu = __srp_get_tx_iu(target, SRP_IU_TSK_MGMT);
2334 if (!iu) {
2340 ib_dma_sync_single_for_cpu(dev, iu->dma, sizeof *tsk_mgmt,
2342 tsk_mgmt = iu->buf;
2351 ib_dma_sync_single_for_device(dev, iu->dma, sizeof *tsk_mgmt,
2353 if (srp_post_send(target, iu, sizeof *tsk_mgmt)) {
2354 srp_put_tx_iu(target, iu, SRP_IU_TSK_MGMT);