Searched refs:dma_list (Results 1 - 9 of 9) sorted by path

/drivers/block/rsxx/
H A Ddma.c686 struct list_head dma_list[RSXX_MAX_TARGETS]; local
704 INIT_LIST_HEAD(&dma_list[i]);
715 st = rsxx_queue_discard(card, &dma_list[tgt], laddr,
737 st = rsxx_queue_dma(card, &dma_list[tgt],
755 if (!list_empty(&dma_list[i])) {
758 list_splice_tail(&dma_list[i], &card->ctrl[i].queue);
770 rsxx_cleanup_dma_queue(&card->ctrl[i], &dma_list[i],
/drivers/infiniband/hw/mthca/
H A Dmthca_allocator.c200 u64 *dma_list = NULL; local
223 dma_list = kmalloc(npages * sizeof *dma_list, GFP_KERNEL);
224 if (!dma_list)
228 dma_list[i] = t + i * (1 << shift);
234 dma_list = kmalloc(npages * sizeof *dma_list, GFP_KERNEL);
235 if (!dma_list)
253 dma_list[i] = t;
261 dma_list, shif
[all...]
H A Dmthca_eq.c471 u64 *dma_list = NULL; local
490 dma_list = kmalloc(npages * sizeof *dma_list, GFP_KERNEL);
491 if (!dma_list)
505 dma_list[i] = t;
519 dma_list, PAGE_SHIFT, npages,
551 kfree(dma_list);
582 kfree(dma_list);
/drivers/misc/genwqe/
H A Dcard_base.h181 dma_addr_t *dma_list; /* list of dma addresses per page */ member in struct:dma_mapping
377 dma_addr_t *dma_list);
H A Dcard_dev.c962 &m->dma_list[page_offs]);
H A Dcard_utils.c235 static void genwqe_unmap_pages(struct genwqe_dev *cd, dma_addr_t *dma_list, argument
241 for (i = 0; (i < num_pages) && (dma_list[i] != 0x0); i++) {
242 pci_unmap_page(pci_dev, dma_list[i],
244 dma_list[i] = 0x0;
250 dma_addr_t *dma_list)
259 dma_list[i] = 0x0;
272 dma_list[i] = daddr;
277 genwqe_unmap_pages(cd, dma_list, num_pages);
369 dma_addr_t *dma_list)
404 daddr = dma_list[
248 genwqe_map_pages(struct genwqe_dev *cd, struct page **page_list, int num_pages, dma_addr_t *dma_list) argument
368 genwqe_setup_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, dma_addr_t *dma_list) argument
[all...]
/drivers/net/ethernet/mellanox/mlx4/
H A Deq.c893 u64 *dma_list = NULL; local
914 dma_list = kmalloc(npages * sizeof *dma_list, GFP_KERNEL);
915 if (!dma_list)
929 dma_list[i] = t;
949 err = mlx4_write_mtt(dev, &eq->mtt, 0, npages, dma_list);
969 kfree(dma_list);
993 kfree(dma_list);
/drivers/vfio/
H A Dvfio_iommu_type1.c59 struct rb_root dma_list; member in struct:vfio_iommu
92 struct rb_node *node = iommu->dma_list.rb_node;
110 struct rb_node **link = &iommu->dma_list.rb_node, *parent = NULL;
124 rb_insert_color(&new->node, &iommu->dma_list);
129 rb_erase(&old->node, &iommu->dma_list);
627 n = rb_first(&iommu->dma_list);
787 while ((node = rb_first(&iommu->dma_list)))
850 iommu->dma_list = RB_ROOT;
/drivers/vme/
H A Dvme.c716 struct vme_dma_list *dma_list; local
725 dma_list = kmalloc(sizeof(struct vme_dma_list), GFP_KERNEL);
726 if (dma_list == NULL) {
730 INIT_LIST_HEAD(&dma_list->entries);
731 dma_list->parent = ctrlr;
732 mutex_init(&dma_list->mtx);
734 return dma_list;

Completed in 237 milliseconds