/drivers/gpu/drm/msm/ |
H A D | msm_gem_prime.c | 41 struct dma_buf_attachment *attach, struct sg_table *sg) 43 return msm_gem_import(dev, attach->dmabuf->size, sg); 40 msm_gem_prime_import_sg_table(struct drm_device *dev, struct dma_buf_attachment *attach, struct sg_table *sg) argument
|
H A D | msm_iommu.c | 51 struct scatterlist *sg; local 59 for_each_sg(sgt->sgl, sg, sgt->nents, i) { 60 u32 pa = sg_phys(sg) - sg->offset; 61 size_t bytes = sg->length + sg->offset; 77 for_each_sg(sgt->sgl, sg, i, j) { 78 size_t bytes = sg->length + sg->offset; 90 struct scatterlist *sg; local [all...] |
/drivers/net/wireless/orinoco/ |
H A D | mic.c | 51 struct scatterlist sg[2]; local 68 sg_init_table(sg, 2); 69 sg_set_buf(&sg[0], hdr, sizeof(hdr)); 70 sg_set_buf(&sg[1], data, data_len); 77 return crypto_hash_digest(&desc, sg, data_len + sizeof(hdr),
|
/drivers/crypto/caam/ |
H A D | sg_sw_sec4.h | 33 sg_to_sec4_sg(struct scatterlist *sg, int sg_count, argument 37 dma_to_sec4_sg_one(sec4_sg_ptr, sg_dma_address(sg), 38 sg_dma_len(sg), offset); 40 sg = scatterwalk_sg_next(sg); 50 static inline void sg_to_sec4_sg_last(struct scatterlist *sg, int sg_count, argument 54 sec4_sg_ptr = sg_to_sec4_sg(sg, sg_count, sec4_sg_ptr, offset); 62 struct scatterlist *sg = sg_list; local 67 nbytes -= sg->length; 68 if (!sg_is_last(sg) 88 dma_map_sg_chained(struct device *dev, struct scatterlist *sg, unsigned int nents, enum dma_data_direction dir, bool chained) argument 104 dma_unmap_sg_chained(struct device *dev, struct scatterlist *sg, unsigned int nents, enum dma_data_direction dir, bool chained) argument [all...] |
/drivers/gpu/drm/nouveau/ |
H A D | nouveau_prime.c | 61 struct sg_table *sg) 72 sg, robj, &nvbo); 59 nouveau_gem_prime_import_sg_table(struct drm_device *dev, struct dma_buf_attachment *attach, struct sg_table *sg) argument
|
/drivers/gpu/drm/radeon/ |
H A D | radeon_prime.c | 62 struct sg_table *sg) 71 RADEON_GEM_DOMAIN_GTT, 0, sg, resv, &bo); 60 radeon_gem_prime_import_sg_table(struct drm_device *dev, struct dma_buf_attachment *attach, struct sg_table *sg) argument
|
/drivers/media/pci/tw68/ |
H A D | tw68-risc.c | 46 struct scatterlist *sg; local 62 sg = sglist; 65 while (offset && offset >= sg_dma_len(sg)) { 66 offset -= sg_dma_len(sg); 67 sg = sg_next(sg); 69 if (bpl <= sg_dma_len(sg) - offset) { 73 *(rp++) = cpu_to_le32(sg_dma_address(sg) + offset); 84 done = (sg_dma_len(sg) - offset); 88 *(rp++) = cpu_to_le32(sg_dma_address(sg) [all...] |
/drivers/scsi/arm/ |
H A D | scsi.h | 23 * copy_SCp_to_sg() Assumes contiguous allocation at @sg of at-most @max 25 * (possibly chained) sg-list 27 static inline int copy_SCp_to_sg(struct scatterlist *sg, struct scsi_pointer *SCp, int max) argument 36 sg_set_buf(sg, SCp->ptr, SCp->this_residual); 43 *(++sg) = *src_sg; 44 sg_mark_end(sg); 100 struct scatterlist *sg; local 103 scsi_for_each_sg(SCpnt, sg, sg_count, i) 104 len += sg->length;
|
/drivers/usb/storage/ |
H A D | protocol.c | 139 struct scatterlist *sg = *sgptr; local 143 if (sg) 144 nents = sg_nents(sg); 146 sg = scsi_sglist(srb); 148 sg_miter_start(&miter, sg, nents, dir == FROM_XFER_BUF ? 163 if (*offset + len < miter.piter.sg->length) { 165 *sgptr = miter.piter.sg; 168 *sgptr = sg_next(miter.piter.sg); 185 struct scatterlist *sg = NULL; local 188 buflen = usb_stor_access_xfer_buf(buffer, buflen, srb, &sg, [all...] |
/drivers/crypto/qce/ |
H A D | sha.h | 37 * @src_orig: original request sg list 45 * @sg: used to chain sg lists 64 struct scatterlist sg[2]; member in struct:qce_sha_reqctx
|
H A D | dma.c | 57 int qce_mapsg(struct device *dev, struct scatterlist *sg, int nents, argument 63 while (sg) { 64 err = dma_map_sg(dev, sg, 1, dir); 67 sg = scatterwalk_sg_next(sg); 70 err = dma_map_sg(dev, sg, nents, dir); 78 void qce_unmapsg(struct device *dev, struct scatterlist *sg, int nents, argument 82 while (sg) { 83 dma_unmap_sg(dev, sg, 1, dir); 84 sg 92 struct scatterlist *sg = sglist; local 112 struct scatterlist *sg = sgt->sgl, *sg_last = NULL; local 134 qce_dma_prep_sg(struct dma_chan *chan, struct scatterlist *sg, int nents, unsigned long flags, enum dma_transfer_direction dir, dma_async_tx_callback cb, void *cb_param) argument [all...] |
/drivers/gpu/drm/omapdrm/ |
H A D | omap_gem_dmabuf.c | 29 struct sg_table *sg; local 33 sg = kzalloc(sizeof(*sg), GFP_KERNEL); 34 if (!sg) 44 ret = sg_alloc_table(sg, 1, GFP_KERNEL); 48 sg_init_table(sg->sgl, 1); 49 sg_dma_len(sg->sgl) = obj->size; 50 sg_set_page(sg->sgl, pfn_to_page(PFN_DOWN(paddr)), obj->size, 0); 51 sg_dma_address(sg->sgl) = paddr; 56 return sg; 62 omap_gem_unmap_dma_buf(struct dma_buf_attachment *attachment, struct sg_table *sg, enum dma_data_direction dir) argument [all...] |
/drivers/message/i2o/ |
H A D | memory.c | 115 * @sg: SG list to be mapped 127 int i2o_dma_map_sg(struct i2o_controller *c, struct scatterlist *sg, argument 144 sg_count = dma_map_sg(&c->pdev->dev, sg, sg_count, direction); 158 *mptr++ = cpu_to_le32(sg_flags | sg_dma_len(sg)); 159 *mptr++ = cpu_to_le32(i2o_dma_low(sg_dma_address(sg))); 162 *mptr++ = cpu_to_le32(i2o_dma_high(sg_dma_address(sg))); 164 sg = sg_next(sg);
|
/drivers/staging/android/ion/ |
H A D | ion_chunk_heap.c | 44 struct scatterlist *sg; local 67 sg = table->sgl; 73 sg_set_page(sg, pfn_to_page(PFN_DOWN(paddr)), 75 sg = sg_next(sg); 82 sg = table->sgl; 84 gen_pool_free(chunk_heap->pool, page_to_phys(sg_page(sg)), 85 sg->length); 86 sg = sg_next(sg); 99 struct scatterlist *sg; local [all...] |
/drivers/char/hw_random/ |
H A D | virtio-rng.c | 57 struct scatterlist sg; local 59 sg_init_one(&sg, buf, size); 62 virtqueue_add_inbuf(vi->vq, &sg, 1, buf, GFP_KERNEL);
|
/drivers/gpu/drm/i915/ |
H A D | i915_gem_stolen.c | 339 struct scatterlist *sg; local 358 sg = st->sgl; 359 sg->offset = 0; 360 sg->length = size; 362 sg_dma_address(sg) = (dma_addr_t)dev_priv->mm.stolen_base + offset; 363 sg_dma_len(sg) = size;
|
/drivers/gpu/drm/udl/ |
H A D | udl_gem.c | 181 drm_prime_gem_destroy(gem_obj, obj->sg); 226 struct sg_table *sg, 239 obj->sg = sg; 246 drm_prime_sg_to_page_addr_arrays(sg, obj->pages, NULL, npages); 256 struct sg_table *sg; local 270 sg = dma_buf_map_attachment(attach, DMA_BIDIRECTIONAL); 271 if (IS_ERR(sg)) { 272 ret = PTR_ERR(sg); 276 ret = udl_prime_create(dev, dma_buf->size, sg, 224 udl_prime_create(struct drm_device *dev, size_t size, struct sg_table *sg, struct udl_gem_object **obj_p) argument [all...] |
/drivers/ide/ |
H A D | ide-dma-sff.c | 120 struct scatterlist *sg; local 123 for_each_sg(hwif->sg_table, sg, cmd->sg_nents, i) { 126 cur_addr = sg_dma_address(sg); 127 cur_len = sg_dma_len(sg);
|
/drivers/infiniband/core/ |
H A D | umem.c | 48 struct scatterlist *sg; local 57 for_each_sg(umem->sg_head.sgl, sg, umem->npages, i) { 59 page = sg_page(sg); 91 struct scatterlist *sg, *sg_list_start; local 174 for_each_sg(sg_list_start, sg, ret, i) { 178 sg_set_page(sg, page_list[i], PAGE_SIZE, 0); 182 sg_list_start = sg; 284 struct scatterlist *sg; local 289 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, i) 290 n += sg_dma_len(sg) >> shif [all...] |
/drivers/infiniband/hw/ipath/ |
H A D | ipath_dma.c | 104 struct scatterlist *sg; local 111 for_each_sg(sgl, sg, nents, i) { 112 addr = (u64) page_address(sg_page(sg)); 118 sg->dma_address = addr + sg->offset; 120 sg->dma_length = sg->length; 127 struct scatterlist *sg, int nents, 126 ipath_unmap_sg(struct ib_device *dev, struct scatterlist *sg, int nents, enum dma_data_direction direction) argument
|
/drivers/infiniband/hw/mlx5/ |
H A D | mem.c | 56 struct scatterlist *sg; local 66 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { 67 len = sg_dma_len(sg) >> page_shift; 68 pfn = sg_dma_address(sg) >> page_shift; 121 struct scatterlist *sg; local 125 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { 126 len = sg_dma_len(sg) >> umem_page_shift; 127 base = sg_dma_address(sg);
|
/drivers/infiniband/hw/qib/ |
H A D | qib_dma.c | 97 struct scatterlist *sg; local 104 for_each_sg(sgl, sg, nents, i) { 105 addr = (u64) page_address(sg_page(sg)); 111 sg->dma_address = addr + sg->offset; 113 sg->dma_length = sg->length; 120 struct scatterlist *sg, int nents, 119 qib_unmap_sg(struct ib_device *dev, struct scatterlist *sg, int nents, enum dma_data_direction direction) argument
|
/drivers/media/pci/ivtv/ |
H A D | ivtv-udma.c | 77 struct scatterlist *sg; local 79 for (i = 0, sg = dma->SGlist; i < dma->SG_length; i++, sg++) { 80 dma->SGarray[i].size = cpu_to_le32(sg_dma_len(sg)); 81 dma->SGarray[i].src = cpu_to_le32(sg_dma_address(sg)); 83 buffer_offset += sg_dma_len(sg); 85 split -= sg_dma_len(sg);
|
/drivers/mmc/core/ |
H A D | sd_ops.c | 254 struct scatterlist sg; local 284 data.sg = &sg; 287 sg_init_one(&sg, data_buf, 8); 313 struct scatterlist sg; local 335 data.sg = &sg; 338 sg_init_one(&sg, resp, 64); 358 struct scatterlist sg; local 380 data.sg [all...] |
H A D | sdio_ops.c | 127 struct scatterlist sg, *sg_ptr; local 165 data.sg = sgtable.sgl; 168 for_each_sg(data.sg, sg_ptr, data.sg_len, i) { 175 data.sg = &sg; 178 sg_init_one(&sg, buf, left_size);
|