Lines Matching refs:sg
49 struct scatterlist *sg = host->sg_ptr, *sg_tmp;
58 for_each_sg(sg, sg_tmp, host->sg_len, i) {
67 if ((!aligned && (host->sg_len > 1 || sg->length > PAGE_CACHE_SIZE ||
73 if (sg->length < TMIO_MMC_MIN_DMA_LEN) {
80 /* The only sg element can be unaligned, use our bounce buffer then */
82 sg_init_one(&host->bounce_sg, host->bounce_buf, sg->length);
84 sg = host->sg_ptr;
87 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_FROM_DEVICE);
89 desc = dmaengine_prep_slave_sg(chan, sg, ret,
120 dev_dbg(&host->pdev->dev, "%s(): desc %p, cookie %d, sg[%d]\n", __func__,
126 struct scatterlist *sg = host->sg_ptr, *sg_tmp;
135 for_each_sg(sg, sg_tmp, host->sg_len, i) {
144 if ((!aligned && (host->sg_len > 1 || sg->length > PAGE_CACHE_SIZE ||
150 if (sg->length < TMIO_MMC_MIN_DMA_LEN) {
157 /* The only sg element can be unaligned, use our bounce buffer then */
160 void *sg_vaddr = tmio_mmc_kmap_atomic(sg, &flags);
161 sg_init_one(&host->bounce_sg, host->bounce_buf, sg->length);
163 tmio_mmc_kunmap_atomic(sg, &flags, sg_vaddr);
165 sg = host->sg_ptr;
168 ret = dma_map_sg(chan->device->dev, sg, host->sg_len, DMA_TO_DEVICE);
170 desc = dmaengine_prep_slave_sg(chan, sg, ret,