Searched refs:nents (Results 26 - 50 of 111) sorted by relevance

12345

/drivers/parisc/
H A Dccio-dma.c895 * @nents: The number of entries in the scatter/gather list.
901 ccio_map_sg(struct device *dev, struct scatterlist *sglist, int nents, argument
914 DBG_RUN_SG("%s() START %d entries\n", __func__, nents);
917 if (nents == 1) {
925 for(i = 0; i < nents; i++)
942 coalesced = iommu_coalesce_chunks(ioc, dev, sglist, nents, ccio_alloc_range);
952 filled = iommu_fill_pdir(ioc, sglist, nents, hint, ccio_io_pdir_entry);
972 * @nents: The number of entries in the scatter/gather list.
978 ccio_unmap_sg(struct device *dev, struct scatterlist *sglist, int nents, argument
987 __func__, nents, sg_virt_add
[all...]
H A Dsba_iommu.c269 * @nents: number of entries in SG list
274 sba_dump_sg( struct ioc *ioc, struct scatterlist *startsg, int nents) argument
276 while (nents-- > 0) {
278 nents,
929 * @nents: number of entries in list
935 sba_map_sg(struct device *dev, struct scatterlist *sglist, int nents, argument
942 DBG_RUN_SG("%s() START %d entries\n", __func__, nents);
947 if (nents == 1) {
960 sba_dump_sg(ioc, sglist, nents);
977 coalesced = iommu_coalesce_chunks(ioc, dev, sglist, nents, sba_alloc_rang
1019 sba_unmap_sg(struct device *dev, struct scatterlist *sglist, int nents, enum dma_data_direction direction) argument
[all...]
/drivers/gpu/drm/armada/
H A Darmada_gem.c456 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0) {
457 num = sgt->nents;
467 if (dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir) == 0)
498 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents, dir);
502 for_each_sg(sgt->sgl, sg, sgt->nents, i)
600 if (dobj->sgt->nents > 1) {
/drivers/media/pci/saa7134/
H A Dsaa7134-ts.c111 ret = dma_map_sg(&dev->pci->dev, dma->sgl, dma->nents, DMA_FROM_DEVICE);
114 return saa7134_pgtable_build(dev->pci, &dmaq->pt, dma->sgl, dma->nents,
126 dma_unmap_sg(&dev->pci->dev, dma->sgl, dma->nents, DMA_FROM_DEVICE);
/drivers/media/common/saa7146/
H A Dsaa7146_core.c191 pt->nents = pages;
192 slen = pci_map_sg(pci,pt->slist,pt->nents,PCI_DMA_FROMDEVICE);
202 pci_unmap_sg(pci, pt->slist, pt->nents, PCI_DMA_FROMDEVICE);
216 pci_unmap_sg(pci, pt->slist, pt->nents, PCI_DMA_FROMDEVICE);
/drivers/scsi/libfc/
H A Dfc_libfc.h136 u32 *nents, size_t *offset,
/drivers/scsi/
H A Dscsi_lib.c564 static inline unsigned int scsi_sgtable_index(unsigned short nents) argument
568 BUG_ON(nents > SCSI_MAX_SG_SEGMENTS);
570 if (nents <= 8)
573 index = get_count_order(nents) - 3;
578 static void scsi_sg_free(struct scatterlist *sgl, unsigned int nents) argument
582 sgp = scsi_sg_pools + scsi_sgtable_index(nents);
586 static struct scatterlist *scsi_sg_alloc(unsigned int nents, gfp_t gfp_mask) argument
590 sgp = scsi_sg_pools + scsi_sgtable_index(nents);
596 if (mq && sdb->table.nents <= SCSI_MAX_SG_SEGMENTS)
601 static int scsi_alloc_sgtable(struct scsi_data_buffer *sdb, int nents, argument
[all...]
/drivers/infiniband/hw/cxgb3/
H A Dcxio_dbg.c139 void cxio_dump_rqt(struct cxio_rdev *rdev, u32 hwtid, int nents) argument
142 int size = nents * 64;
H A Dcxio_hal.h207 void cxio_dump_rqt(struct cxio_rdev *rdev, u32 hwtid, int nents);
/drivers/media/pci/cx23885/
H A Dcx23885-vbi.c154 ret = dma_map_sg(&dev->pci->dev, sgt->sgl, sgt->nents, DMA_FROM_DEVICE);
175 dma_unmap_sg(&dev->pci->dev, sgt->sgl, sgt->nents, DMA_FROM_DEVICE);
/drivers/staging/android/ion/
H A Dion_chunk_heap.c108 dma_sync_sg_for_device(NULL, table->sgl, table->nents,
111 for_each_sg(table->sgl, sg, table->nents, i) {
H A Dion_carveout_heap.c119 dma_sync_sg_for_device(NULL, table->sgl, table->nents,
/drivers/gpu/drm/vmwgfx/
H A Dvmwgfx_buffer.c336 dma_unmap_sg(dev, vmw_tt->sgt.sgl, vmw_tt->sgt.nents,
338 vmw_tt->sgt.nents = vmw_tt->sgt.orig_nents;
364 vmw_tt->sgt.nents = ret;
420 if (vsgt->num_pages > vmw_tt->sgt.nents) {
423 vmw_tt->sgt.nents);
/drivers/media/v4l2-core/
H A Dvideobuf2-dma-contig.c77 for_each_sg(sgt->sgl, s, sgt->nents, i) {
123 dma_sync_sg_for_device(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir);
135 dma_sync_sg_for_cpu(buf->dev, sgt->sgl, sgt->nents, buf->dma_dir);
653 sgt->nents = dma_map_sg(buf->dev, sgt->sgl, sgt->orig_nents,
655 if (sgt->nents <= 0) {
/drivers/crypto/
H A Dpicoxcell_crypto.c295 unsigned nents, mapped_ents; local
300 nents = sg_count(payload, nbytes);
301 mapped_ents = dma_map_sg(engine->dev, payload, nents, dir);
317 dma_unmap_sg(engine->dev, payload, nents, dir);
328 unsigned nents = sg_count(areq->src, areq->cryptlen); local
350 src_ents = dma_map_sg(engine->dev, areq->src, nents,
352 dst_ents = dma_map_sg(engine->dev, areq->dst, nents,
355 src_ents = dma_map_sg(engine->dev, areq->src, nents,
412 unsigned nents = sg_count(areq->src, areq->cryptlen); local
415 dma_unmap_sg(engine->dev, areq->src, nents, DMA_TO_DEVIC
435 unsigned nents = sg_count(payload, nbytes); local
[all...]
H A Dmxs-dcp.c259 const int nents = sg_nents(req->src); local
289 for_each_sg(req->src, src, nents, i) {
568 const int nents = sg_nents(req->src); local
583 for_each_sg(req->src, src, nents, i) {
/drivers/net/wireless/brcm80211/brcmfmac/
H A Dbcmsdh.c885 uint nents; local
891 nents = max_t(uint, BRCMF_DEFAULT_RXGLOM_SIZE, brcmf_sdiod_txglomsz);
892 nents += (nents >> 4) + 1;
894 WARN_ON(nents > sdiodev->max_segment_count);
896 brcmf_dbg(TRACE, "nents=%d\n", nents);
897 err = sg_alloc_table(&sdiodev->sgtable, nents, GFP_KERNEL);
/drivers/infiniband/ulp/isert/
H A Dib_isert.h76 int nents; member in struct:isert_data_buf
/drivers/spi/
H A Dspi-pl022.c809 pl022->sgt_tx.nents, DMA_TO_DEVICE);
811 pl022->sgt_rx.nents, DMA_FROM_DEVICE);
836 pl022->sgt_rx.nents,
839 for_each_sg(pl022->sgt_rx.sgl, sg, pl022->sgt_rx.nents, i) {
849 for_each_sg(pl022->sgt_tx.sgl, sg, pl022->sgt_tx.nents, i) {
886 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) {
907 for_each_sg(sgtab->sgl, sg, sgtab->nents, i) {
1062 pl022->sgt_rx.nents, DMA_FROM_DEVICE);
1067 pl022->sgt_tx.nents, DMA_TO_DEVICE);
1106 pl022->sgt_tx.nents, DMA_TO_DEVIC
[all...]
/drivers/gpu/drm/i915/
H A Di915_gem_tiling.c481 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) {
511 for_each_sg_page(obj->pages->sgl, &sg_iter, obj->pages->nents, 0) {
/drivers/gpu/drm/radeon/
H A Dradeon_ttm.c509 unsigned pinned = 0, nents; local
550 nents = dma_map_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction);
551 if (nents != ttm->sg->nents)
579 dma_unmap_sg(rdev->dev, ttm->sg->sgl, ttm->sg->nents, direction);
581 for_each_sg(ttm->sg->sgl, sg, ttm->sg->nents, i) {
/drivers/gpu/drm/
H A Ddrm_prime.c156 dma_unmap_sg(attach->dev, sgt->sgl, sgt->nents,
204 if (!dma_map_sg(attach->dev, sgt->sgl, sgt->nents, dir)) {
716 for_each_sg(sgt->sgl, sg, sgt->nents, count) {
/drivers/hsi/
H A Dhsi.c532 * @nents: Number of memory entries
535 * nents can be 0. This mainly makes sense for read transfer.
541 struct hsi_msg *hsi_alloc_msg(unsigned int nents, gfp_t flags) argument
550 if (!nents)
553 err = sg_alloc_table(&msg->sgt, nents, flags);
569 * fields set beforehand. If nents > 0 then the client has to initialize
/drivers/char/agp/
H A Dintel-gtt.c113 st->sgl, st->nents, PCI_DMA_BIDIRECTIONAL))
132 st.orig_nents = st.nents = num_sg;
857 for_each_sg(st->sgl, sg, st->nents, i) {
924 mem->num_sg = st.nents;
/drivers/crypto/ux500/hash/
H A Dhash_alg.h307 * @nents: Number of sg entries.
316 int nents; member in struct:hash_dma

Completed in 3739 milliseconds

12345