Searched refs:sgl (Results 26 - 50 of 211) sorted by relevance

123456789

/drivers/scsi/
H A Dstorvsc_drv.c542 static void destroy_bounce_buffer(struct scatterlist *sgl, argument
549 page_buf = sg_page((&sgl[i]));
554 kfree(sgl);
557 static int do_bounce_buffer(struct scatterlist *sgl, unsigned int sg_count) argument
569 if (sgl[i].offset + sgl[i].length != PAGE_SIZE)
573 if (sgl[i].offset != 0)
577 if (sgl[i].length != PAGE_SIZE || sgl[i].offset != 0)
584 static struct scatterlist *create_bounce_buffer(struct scatterlist *sgl, argument
617 sg_kmap_atomic(struct scatterlist *sgl, int idx) argument
1567 struct scatterlist *sgl; local
[all...]
H A D3w-sas.h236 TW_SG_Entry_ISO sgl[TW_LIBERATOR_MAX_SGL_LENGTH_OLD]; member in struct:TW_Command::__anon5047::__anon5048
240 TW_SG_Entry_ISO sgl[TW_LIBERATOR_MAX_SGL_LENGTH_OLD]; member in struct:TW_Command::__anon5047::__anon5049
/drivers/staging/android/ion/
H A Dion_heap.c48 for_each_sg(table->sgl, sg, table->nents, i) {
81 for_each_sg(table->sgl, sg, table->nents, i) {
118 static int ion_heap_sglist_zero(struct scatterlist *sgl, unsigned int nents, argument
126 for_each_sg_page(sgl, &piter, nents, 0) {
151 return ion_heap_sglist_zero(table->sgl, table->nents, pgprot);
H A Dion_system_heap.c162 sg = table->sgl;
195 for_each_sg(table->sgl, sg, table->nents, i)
346 sg_set_page(table->sgl, page, len, 0);
366 struct page *page = sg_page(table->sgl);
381 struct page *page = sg_page(table->sgl);
/drivers/dma/sh/
H A Dshdma-base.c538 struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr,
548 for_each_sg(sgl, sg, sg_len, i)
565 for_each_sg(sgl, sg, sg_len, i) {
639 struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len,
662 return shdma_prep_sg(schan, sgl, sg_len, &slave_addr,
680 struct scatterlist *sgl; local
708 sgl = kcalloc(sg_len, sizeof(*sgl), GFP_KERNEL);
709 if (!sgl)
712 sg_init_table(sgl, sg_le
537 shdma_prep_sg(struct shdma_chan *schan, struct scatterlist *sgl, unsigned int sg_len, dma_addr_t *addr, enum dma_transfer_direction direction, unsigned long flags, bool cyclic) argument
638 shdma_prep_slave_sg( struct dma_chan *chan, struct scatterlist *sgl, unsigned int sg_len, enum dma_transfer_direction direction, unsigned long flags, void *context) argument
[all...]
/drivers/scsi/bnx2fc/
H A Dbnx2fc_hwi.c1470 struct fcoe_ext_mul_sges_ctx *sgl; local
1518 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.lo =
1520 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_addr.hi =
1522 task->txwr_only.sgl_ctx.sgl.mul_sgl.sgl_size =
1524 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_off =
1526 task->txwr_only.sgl_ctx.sgl.mul_sgl.cur_sge_idx = i;
1536 sgl = &task->rxwr_only.union_ctx.read_info.sgl_ctx.sgl;
1537 sgl->mul_sgl.cur_sge_addr.lo = (u32)phys_addr;
1538 sgl
1593 struct fcoe_ext_mul_sges_ctx *sgl; local
1691 struct fcoe_ext_mul_sges_ctx *sgl; local
[all...]
/drivers/net/ethernet/intel/i40e/
H A Di40e_fcoe.c176 if (ddp->sgl) {
177 dma_unmap_sg(&pf->pdev->dev, ddp->sgl, ddp->sgc,
179 ddp->sgl = NULL;
745 if (!ddp->sgl)
814 * @sgl: the scatter-gather list for this request
821 struct scatterlist *sgl, unsigned int sgc,
852 if (ddp->sgl) {
853 dev_info(&pf->pdev->dev, "xid 0x%x w/ non-null sgl=%p nents=%d\n",
854 xid, ddp->sgl, ddp->sgc);
870 /* setup dma from scsi command sgl */
820 i40e_fcoe_ddp_setup(struct net_device *netdev, u16 xid, struct scatterlist *sgl, unsigned int sgc, int target_mode) argument
967 i40e_fcoe_ddp_get(struct net_device *netdev, u16 xid, struct scatterlist *sgl, unsigned int sgc) argument
988 i40e_fcoe_ddp_target(struct net_device *netdev, u16 xid, struct scatterlist *sgl, unsigned int sgc) argument
[all...]
/drivers/net/ethernet/chelsio/cxgb4vf/
H A Dsge.c158 struct ulptx_sgl *sgl; /* scatter/gather list in TX Queue */ member in struct:tx_sw_desc
306 const struct ulptx_sgl *sgl, const struct sge_txq *tq)
312 dma_unmap_single(dev, be64_to_cpu(sgl->addr0),
313 be32_to_cpu(sgl->len0), DMA_TO_DEVICE);
315 dma_unmap_page(dev, be64_to_cpu(sgl->addr0),
316 be32_to_cpu(sgl->len0), DMA_TO_DEVICE);
324 for (p = sgl->sge; nfrags >= 2; nfrags -= 2) {
393 unmap_sgl(dev, sdesc->skb, sdesc->sgl, tq);
872 * @sgl: starting location for writing the SGL
886 struct ulptx_sgl *sgl, u6
305 unmap_sgl(struct device *dev, const struct sk_buff *skb, const struct ulptx_sgl *sgl, const struct sge_txq *tq) argument
885 write_sgl(const struct sk_buff *skb, struct sge_txq *tq, struct ulptx_sgl *sgl, u64 *end, unsigned int start, const dma_addr_t *addr) argument
1316 struct ulptx_sgl *sgl = (struct ulptx_sgl *)(cpl + 1); local
[all...]
/drivers/infiniband/hw/mlx5/
H A Dmem.c66 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) {
125 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) {
/drivers/xen/
H A Dswiotlb-xen.c546 xen_swiotlb_map_sg_attrs(struct device *hwdev, struct scatterlist *sgl, argument
555 for_each_sg(sgl, sg, nelems, i) {
571 xen_swiotlb_unmap_sg_attrs(hwdev, sgl, i, dir,
573 sg_dma_len(sgl) = 0;
604 xen_swiotlb_unmap_sg_attrs(struct device *hwdev, struct scatterlist *sgl, argument
613 for_each_sg(sgl, sg, nelems, i)
627 xen_swiotlb_sync_sg(struct device *hwdev, struct scatterlist *sgl, argument
634 for_each_sg(sgl, sg, nelems, i)
/drivers/gpu/drm/
H A Ddrm_cache.c117 for_each_sg_page(st->sgl, &sg_iter, st->nents, 0)
/drivers/misc/genwqe/
H A Dcard_base.h345 * @sgl: scatter gather list needs to be 128 byte aligned
346 * @sgl_dma_addr: dma address of sgl
347 * @sgl_size: size of area used for sgl
355 struct sg_entry *sgl; member in struct:genwqe_sgl
356 size_t sgl_size; /* size of sgl */
373 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl,
376 int genwqe_setup_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl,
379 int genwqe_free_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl);
/drivers/spi/
H A Dspi-pxa2xx-dma.c55 for_each_sg(sgt->sgl, sg, sgt->nents, i) {
67 nents = dma_map_sg(dmadev, sgt->sgl, sgt->nents, dir);
88 dma_unmap_sg(dmadev, sgt->sgl, sgt->nents, dir);
206 return dmaengine_prep_slave_sg(chan, sgt->sgl, nents, dir,
H A Dspi-pl022.c808 dma_unmap_sg(pl022->dma_tx_channel->device->dev, pl022->sgt_tx.sgl,
810 dma_unmap_sg(pl022->dma_rx_channel->device->dev, pl022->sgt_rx.sgl,
821 BUG_ON(!pl022->sgt_rx.sgl);
835 pl022->sgt_rx.sgl,
839 for_each_sg(pl022->sgt_rx.sgl, sg, pl022->sgt_rx.nents, i) {
849 for_each_sg(pl022->sgt_tx.sgl, sg, pl022->sgt_tx.nents, i) {
886 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) {
907 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) {
1061 rx_sglen = dma_map_sg(rxchan->device->dev, pl022->sgt_rx.sgl,
1066 tx_sglen = dma_map_sg(txchan->device->dev, pl022->sgt_tx.sgl,
[all...]
/drivers/media/pci/cx23885/
H A Dcx23885-vbi.c154 ret = dma_map_sg(&dev->pci->dev, sgt->sgl, sgt->nents, DMA_FROM_DEVICE);
159 sgt->sgl,
175 dma_unmap_sg(&dev->pci->dev, sgt->sgl, sgt->nents, DMA_FROM_DEVICE);
/drivers/char/
H A Dvirtio_console.c876 struct sg_list *sgl = sd->u.data; local
879 if (sgl->n == sgl->size)
889 sg_set_page(&(sgl->sg[sgl->n]), buf->page, len, buf->offset);
908 sg_set_page(&(sgl->sg[sgl->n]), page, len, offset);
910 sgl->n++;
911 sgl->len += len;
922 struct sg_list sgl; local
[all...]
/drivers/scsi/isci/
H A Drequest.c582 stp_req->sgl.offset = 0;
583 stp_req->sgl.set = SCU_SGL_ELEMENT_PAIR_A;
587 stp_req->sgl.index = 0;
590 stp_req->sgl.index = -1;
1261 struct scu_sgl_element *sgl; local
1264 struct isci_stp_pio_sgl *pio_sgl = &stp_req->sgl;
1268 sgl = NULL;
1272 sgl = NULL;
1275 sgl = &sgl_pair->B;
1280 sgl
1355 struct scu_sgl_element *sgl; local
[all...]
/drivers/media/pci/saa7134/
H A Dsaa7134-ts.c111 ret = dma_map_sg(&dev->pci->dev, dma->sgl, dma->nents, DMA_FROM_DEVICE);
114 return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents,
126 dma_unmap_sg(&dev->pci->dev, dma->sgl, dma->nents, DMA_FROM_DEVICE);
/drivers/gpu/drm/i915/
H A Di915_gem_render_state.c86 page = sg_page(so->obj->pages->sgl);
/drivers/tty/serial/
H A Dimx.c500 struct scatterlist *sgl = &sport->tx_sgl[0]; local
504 dma_unmap_sg(sport->port.dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE);
528 struct scatterlist *sgl = sport->tx_sgl; local
543 sg_init_table(sgl, 2);
544 sg_set_buf(sgl, xmit->buf + xmit->tail,
546 sg_set_buf(sgl + 1, xmit->buf, xmit->head);
549 sg_init_one(sgl, xmit->buf + xmit->tail, sport->tx_bytes);
552 ret = dma_map_sg(dev, sgl, sport->dma_tx_nents, DMA_TO_DEVICE);
557 desc = dmaengine_prep_slave_sg(chan, sgl, sport->dma_tx_nents,
899 struct scatterlist *sgl local
923 struct scatterlist *sgl = &sport->rx_sgl; local
[all...]
H A Dmxs-auart.c215 struct scatterlist *sgl = &s->tx_sgl; local
229 sg_init_one(sgl, s->tx_dma_buf, size);
230 dma_map_sg(s->dev, sgl, 1, DMA_TO_DEVICE);
231 desc = dmaengine_prep_slave_sg(channel, sgl,
465 struct scatterlist *sgl = &s->rx_sgl; local
481 sg_init_one(sgl, s->rx_dma_buf, UART_XMIT_SIZE);
482 dma_map_sg(s->dev, sgl, 1, DMA_FROM_DEVICE);
483 desc = dmaengine_prep_slave_sg(channel, sgl, 1, DMA_DEV_TO_MEM,
/drivers/dma/
H A Dcoh901318_lli.c233 struct scatterlist *sgl, unsigned int nents,
258 for_each_sg(sgl, sg, nents, i) {
231 coh901318_lli_fill_sg(struct coh901318_pool *pool, struct coh901318_lli *lli, struct scatterlist *sgl, unsigned int nents, dma_addr_t dev_addr, u32 ctrl_chained, u32 ctrl, u32 ctrl_last, enum dma_transfer_direction dir, u32 ctrl_irq_mask) argument
/drivers/net/wireless/brcm80211/brcmfmac/
H A Dbcmsdh.c513 struct scatterlist *sgl; local
561 mmc_dat.sg = sdiodev->sgtable.sgl;
577 sgl = sdiodev->sgtable.sgl;
587 sg_set_buf(sgl, pkt_data, sg_data_sz);
590 sgl = sg_next(sgl);
659 sg_init_table(sdiodev->sgtable.sgl, sdiodev->sgtable.orig_nents);
/drivers/scsi/be2iscsi/
H A Dbe_mgmt.h55 struct amap_mcc_sge sgl[19]; member in union:amap_mcc_wrb_payload::__anon5192
84 struct mcc_sge sgl[19]; member in union:mcc_wrb_payload::__anon5193
/drivers/scsi/megaraid/
H A Dmegaraid_sas_base.c1247 &pthru->sgl);
1251 &pthru->sgl);
1254 &pthru->sgl);
1390 &ldio->sgl);
1393 ldio->sge_count = megasas_make_sgl64(instance, scp, &ldio->sgl);
1395 ldio->sge_count = megasas_make_sgl32(instance, scp, &ldio->sgl);
1477 mfi_sgl = &ldio->sgl;
1487 mfi_sgl = &pthru->sgl;
1498 printk(KERN_ERR "megasas: sgl len : 0x%x, sgl add
[all...]

Completed in 3070 milliseconds

123456789