Searched refs:dev_priv (Results 1 - 25 of 168) sorted by relevance

1234567

/drivers/gpu/drm/gma500/
H A Dblitter.h20 extern int gma_blt_wait_idle(struct drm_psb_private *dev_priv);
H A Dgma_device.c23 struct drm_psb_private *dev_priv = dev->dev_private; local
34 dev_priv->core_freq = 100;
37 dev_priv->core_freq = 133;
40 dev_priv->core_freq = 150;
43 dev_priv->core_freq = 178;
46 dev_priv->core_freq = 200;
51 dev_priv->core_freq = 266;
54 dev_priv->core_freq = 0;
H A Dbacklight.c32 struct drm_psb_private *dev_priv = dev->dev_private; local
33 backlight_update_status(dev_priv->backlight_device);
40 struct drm_psb_private *dev_priv = dev->dev_private; local
41 dev_priv->backlight_enabled = true;
42 if (dev_priv->backlight_device) {
43 dev_priv->backlight_device->props.brightness = dev_priv->backlight_level;
52 struct drm_psb_private *dev_priv = dev->dev_private; local
53 dev_priv->backlight_enabled = false;
54 if (dev_priv
64 struct drm_psb_private *dev_priv = dev->dev_private; local
76 struct drm_psb_private *dev_priv = dev->dev_private; local
87 struct drm_psb_private *dev_priv = dev->dev_private; local
[all...]
H A Dpsb_lid.c28 struct drm_psb_private * dev_priv = (struct drm_psb_private *)data; local
29 struct drm_device *dev = (struct drm_device *)dev_priv->dev;
30 struct timer_list *lid_timer = &dev_priv->lid_timer;
32 u32 __iomem *lid_state = dev_priv->opregion.lid_state;
35 if (readl(lid_state) == dev_priv->lid_last_state)
61 dev_priv->lid_last_state = readl(lid_state);
64 spin_lock_irqsave(&dev_priv->lid_lock, irq_flags);
69 spin_unlock_irqrestore(&dev_priv->lid_lock, irq_flags);
72 void psb_lid_timer_init(struct drm_psb_private *dev_priv) argument
74 struct timer_list *lid_timer = &dev_priv
90 psb_lid_timer_takedown(struct drm_psb_private *dev_priv) argument
[all...]
H A Dpower.c49 struct drm_psb_private *dev_priv = dev->dev_private; local
52 dev_priv->apm_base = dev_priv->apm_reg & 0xffff;
53 dev_priv->ospm_base &= 0xffff;
55 dev_priv->display_power = true; /* We start active */
56 dev_priv->display_count = 0; /* Currently no users */
57 dev_priv->suspended = false; /* And not suspended */
61 if (dev_priv->ops->init_pm)
62 dev_priv->ops->init_pm(dev);
85 struct drm_psb_private *dev_priv local
103 struct drm_psb_private *dev_priv = dev->dev_private; local
127 struct drm_psb_private *dev_priv = dev->dev_private; local
157 struct drm_psb_private *dev_priv = dev->dev_private; local
192 struct drm_psb_private *dev_priv = dev->dev_private; local
237 struct drm_psb_private *dev_priv = dev->dev_private; local
251 struct drm_psb_private *dev_priv = dev->dev_private; local
290 struct drm_psb_private *dev_priv = dev->dev_private; local
312 struct drm_psb_private *dev_priv = drmdev->dev_private; local
[all...]
H A Dpsb_drv.c112 struct drm_psb_private *dev_priv = dev->dev_private; local
113 struct psb_fbdev *fbdev = dev_priv->fbdev;
124 struct drm_psb_private *dev_priv = dev->dev_private; local
125 struct psb_gtt *pg = &dev_priv->gtt;
138 dev_priv->gatt_free_offset = pg->mmu_gatt_start +
141 spin_lock_init(&dev_priv->irqmask_lock);
142 spin_lock_init(&dev_priv->lock_2d);
153 psb_spank(dev_priv);
164 struct drm_psb_private *dev_priv = dev->dev_private; local
168 if (dev_priv) {
228 struct drm_psb_private *dev_priv; local
413 struct drm_psb_private *dev_priv = dev->dev_private; local
[all...]
H A Dintel_bios.c58 parse_edp(struct drm_psb_private *dev_priv, struct bdb_header *bdb) argument
67 dev_priv->edp.bpp = 18;
69 if (dev_priv->edp.support) {
71 dev_priv->edp.bpp);
76 panel_type = dev_priv->panel_type;
79 dev_priv->edp.bpp = 18;
82 dev_priv->edp.bpp = 24;
85 dev_priv->edp.bpp = 30;
93 dev_priv->edp.pps = *edp_pps;
96 dev_priv
201 parse_backlight_data(struct drm_psb_private *dev_priv, struct bdb_header *bdb) argument
230 parse_lfp_panel_data(struct drm_psb_private *dev_priv, struct bdb_header *bdb) argument
283 parse_sdvo_panel_data(struct drm_psb_private *dev_priv, struct bdb_header *bdb) argument
313 parse_general_features(struct drm_psb_private *dev_priv, struct bdb_header *bdb) argument
336 parse_sdvo_device_mapping(struct drm_psb_private *dev_priv, struct bdb_header *bdb) argument
427 parse_driver_features(struct drm_psb_private *dev_priv, struct bdb_header *bdb) argument
447 parse_device_mapping(struct drm_psb_private *dev_priv, struct bdb_header *bdb) argument
528 struct drm_psb_private *dev_priv = dev->dev_private; local
592 struct drm_psb_private *dev_priv = dev->dev_private; local
[all...]
H A Dmdfld_output.c36 struct drm_psb_private *dev_priv = dev->dev_private; local
37 return dev_priv->mdfld_panel_id;
55 /* if (dev_priv->mdfld_hdmi_present)
56 mdfld_hdmi_init(dev, &dev_priv->mode_dev); */
64 struct drm_psb_private *dev_priv = dev->dev_private; local
67 dev_priv->mdfld_panel_id = TC35876X;
69 mdfld_init_panel(dev, 0, dev_priv->mdfld_panel_id);
/drivers/gpu/drm/vmwgfx/
H A Dvmwgfx_irq.c36 struct vmw_private *dev_priv = vmw_priv(dev); local
39 spin_lock(&dev_priv->irq_lock);
40 status = inl(dev_priv->io_start + VMWGFX_IRQSTATUS_PORT);
41 masked_status = status & dev_priv->irq_mask;
42 spin_unlock(&dev_priv->irq_lock);
45 outl(status, dev_priv->io_start + VMWGFX_IRQSTATUS_PORT);
52 vmw_fences_update(dev_priv->fman);
53 wake_up_all(&dev_priv->fence_queue);
57 wake_up_all(&dev_priv->fifo_queue);
63 static bool vmw_fifo_idle(struct vmw_private *dev_priv, uint32_ argument
74 vmw_update_seqno(struct vmw_private *dev_priv, struct vmw_fifo_state *fifo_state) argument
87 vmw_seqno_passed(struct vmw_private *dev_priv, uint32_t seqno) argument
116 vmw_fallback_wait(struct vmw_private *dev_priv, bool lazy, bool fifo_idle, uint32_t seqno, bool interruptible, unsigned long timeout) argument
185 vmw_seqno_waiter_add(struct vmw_private *dev_priv) argument
201 vmw_seqno_waiter_remove(struct vmw_private *dev_priv) argument
216 vmw_goal_waiter_add(struct vmw_private *dev_priv) argument
232 vmw_goal_waiter_remove(struct vmw_private *dev_priv) argument
246 vmw_wait_seqno(struct vmw_private *dev_priv, bool lazy, uint32_t seqno, bool interruptible, unsigned long timeout) argument
294 struct vmw_private *dev_priv = vmw_priv(dev); local
312 struct vmw_private *dev_priv = vmw_priv(dev); local
[all...]
H A Dvmwgfx_drv.c286 * @dev_priv: A device private structure.
296 static int vmw_dummy_query_bo_create(struct vmw_private *dev_priv) argument
309 ret = ttm_bo_create(&dev_priv->bdev,
337 dev_priv->dummy_query_bo = bo;
342 static int vmw_request_device(struct vmw_private *dev_priv) argument
346 ret = vmw_fifo_init(dev_priv, &dev_priv->fifo);
351 vmw_fence_fifo_up(dev_priv->fman);
352 if (dev_priv->has_mob) {
353 ret = vmw_otables_setup(dev_priv);
375 vmw_release_device(struct vmw_private *dev_priv) argument
398 vmw_3d_resource_inc(struct vmw_private *dev_priv, bool unhide_svga) argument
428 vmw_3d_resource_dec(struct vmw_private *dev_priv, bool hide_svga) argument
459 vmw_get_initial_size(struct vmw_private *dev_priv) argument
498 vmw_dma_select_mode(struct vmw_private *dev_priv) argument
566 vmw_dma_masks(struct vmw_private *dev_priv) argument
578 vmw_dma_masks(struct vmw_private *dev_priv) argument
586 struct vmw_private *dev_priv; local
885 struct vmw_private *dev_priv = vmw_priv(dev); local
932 struct vmw_private *dev_priv = vmw_priv(dev); local
959 struct vmw_private *dev_priv = vmw_priv(dev); local
1149 struct vmw_private *dev_priv = vmw_priv(dev); local
1207 struct vmw_private *dev_priv = vmw_priv(dev); local
1257 struct vmw_private *dev_priv = local
1295 struct vmw_private *dev_priv = vmw_priv(dev); local
1337 struct vmw_private *dev_priv = vmw_priv(dev); local
1365 struct vmw_private *dev_priv = vmw_priv(dev); local
[all...]
H A Dvmwgfx_ttm_glue.c34 struct vmw_private *dev_priv; local
42 dev_priv = vmw_priv(file_priv->minor->dev);
43 return ttm_bo_mmap(filp, vma, &dev_priv->bdev);
57 int vmw_ttm_global_init(struct vmw_private *dev_priv) argument
62 global_ref = &dev_priv->mem_global_ref;
74 dev_priv->bo_global_ref.mem_glob =
75 dev_priv->mem_global_ref.object;
76 global_ref = &dev_priv->bo_global_ref.ref;
90 drm_global_item_unref(&dev_priv->mem_global_ref);
94 void vmw_ttm_global_release(struct vmw_private *dev_priv) argument
[all...]
H A Dvmwgfx_fifo.c32 bool vmw_fifo_have_3d(struct vmw_private *dev_priv) argument
34 __le32 __iomem *fifo_mem = dev_priv->mmio_virt;
36 const struct vmw_fifo_state *fifo = &dev_priv->fifo;
38 if (!(dev_priv->capabilities & SVGA_CAP_3D))
41 if (dev_priv->capabilities & SVGA_CAP_GBOBJECTS) {
44 if (!dev_priv->has_mob)
47 mutex_lock(&dev_priv->hw_mutex);
48 vmw_write(dev_priv, SVGA_REG_DEV_CAP, SVGA3D_DEVCAP_3D);
49 result = vmw_read(dev_priv, SVGA_REG_DEV_CAP);
50 mutex_unlock(&dev_priv
81 vmw_fifo_have_pitchlock(struct vmw_private *dev_priv) argument
96 vmw_fifo_init(struct vmw_private *dev_priv, struct vmw_fifo_state *fifo) argument
163 vmw_fifo_ping_host_locked(struct vmw_private *dev_priv, uint32_t reason) argument
173 vmw_fifo_ping_host(struct vmw_private *dev_priv, uint32_t reason) argument
182 vmw_fifo_release(struct vmw_private *dev_priv, struct vmw_fifo_state *fifo) argument
215 vmw_fifo_is_full(struct vmw_private *dev_priv, uint32_t bytes) argument
226 vmw_fifo_wait_noirq(struct vmw_private *dev_priv, uint32_t bytes, bool interruptible, unsigned long timeout) argument
259 vmw_fifo_wait(struct vmw_private *dev_priv, uint32_t bytes, bool interruptible, unsigned long timeout) argument
321 vmw_fifo_reserve(struct vmw_private *dev_priv, uint32_t bytes) argument
445 vmw_fifo_commit(struct vmw_private *dev_priv, uint32_t bytes) argument
491 vmw_fifo_send_fence(struct vmw_private *dev_priv, uint32_t *seqno) argument
545 vmw_fifo_emit_dummy_legacy_query(struct vmw_private *dev_priv, uint32_t cid) argument
594 vmw_fifo_emit_dummy_gb_query(struct vmw_private *dev_priv, uint32_t cid) argument
648 vmw_fifo_emit_dummy_query(struct vmw_private *dev_priv, uint32_t cid) argument
[all...]
/drivers/gpu/drm/radeon/
H A Dradeon_cp.c61 static void radeon_do_cp_start(drm_radeon_private_t * dev_priv);
63 u32 radeon_read_ring_rptr(drm_radeon_private_t *dev_priv, u32 off) argument
67 if (dev_priv->flags & RADEON_IS_AGP) {
68 val = DRM_READ32(dev_priv->ring_rptr, off);
71 dev_priv->ring_rptr->handle) +
78 u32 radeon_get_ring_head(drm_radeon_private_t *dev_priv) argument
80 if (dev_priv->writeback_works)
81 return radeon_read_ring_rptr(dev_priv, 0);
83 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_R600)
90 void radeon_write_ring_rptr(drm_radeon_private_t *dev_priv, u3 argument
99 radeon_set_ring_head(drm_radeon_private_t *dev_priv, u32 val) argument
104 radeon_get_scratch(drm_radeon_private_t *dev_priv, int index) argument
121 R500_READ_MCIND(drm_radeon_private_t *dev_priv, int addr) argument
130 RS480_READ_MCIND(drm_radeon_private_t *dev_priv, int addr) argument
139 RS690_READ_MCIND(drm_radeon_private_t *dev_priv, int addr) argument
148 RS600_READ_MCIND(drm_radeon_private_t *dev_priv, int addr) argument
157 IGP_READ_MCIND(drm_radeon_private_t *dev_priv, int addr) argument
168 radeon_read_fb_location(drm_radeon_private_t *dev_priv) argument
188 radeon_write_fb_location(drm_radeon_private_t *dev_priv, u32 fb_loc) argument
207 radeon_write_agp_location(drm_radeon_private_t *dev_priv, u32 agp_loc) argument
229 radeon_write_agp_base(drm_radeon_private_t *dev_priv, u64 agp_base) argument
264 radeon_enable_bm(struct drm_radeon_private *dev_priv) argument
285 drm_radeon_private_t *dev_priv = dev->dev_private; local
291 RADEON_READ_PCIE(drm_radeon_private_t *dev_priv, int addr) argument
298 radeon_status(drm_radeon_private_t * dev_priv) argument
324 radeon_do_pixcache_flush(drm_radeon_private_t * dev_priv) argument
355 radeon_do_wait_for_fifo(drm_radeon_private_t * dev_priv, int entries) argument
379 radeon_do_wait_for_idle(drm_radeon_private_t * dev_priv) argument
410 drm_radeon_private_t *dev_priv = dev->dev_private; local
473 radeon_cp_init_microcode(drm_radeon_private_t *dev_priv) argument
547 radeon_cp_load_microcode(drm_radeon_private_t *dev_priv) argument
571 radeon_do_cp_flush(drm_radeon_private_t * dev_priv) argument
584 radeon_do_cp_idle(drm_radeon_private_t * dev_priv) argument
603 radeon_do_cp_start(drm_radeon_private_t * dev_priv) argument
646 radeon_do_cp_reset(drm_radeon_private_t * dev_priv) argument
661 radeon_do_cp_stop(drm_radeon_private_t * dev_priv) argument
685 drm_radeon_private_t *dev_priv = dev->dev_private; local
748 radeon_cp_init_ring_buffer(struct drm_device * dev, drm_radeon_private_t *dev_priv, struct drm_file *file_priv) argument
865 radeon_test_writeback(drm_radeon_private_t * dev_priv) argument
909 radeon_set_igpgart(drm_radeon_private_t * dev_priv, int on) argument
980 rs600_set_igpgart(drm_radeon_private_t *dev_priv, int on) argument
1055 radeon_set_pciegart(drm_radeon_private_t * dev_priv, int on) argument
1085 radeon_set_pcigart(drm_radeon_private_t * dev_priv, int on) argument
1132 radeon_setup_pcigart_surface(drm_radeon_private_t *dev_priv) argument
1175 drm_radeon_private_t *dev_priv = dev->dev_private; local
1559 drm_radeon_private_t *dev_priv = dev->dev_private; local
1619 drm_radeon_private_t *dev_priv = dev->dev_private; local
1653 drm_radeon_private_t *dev_priv = dev->dev_private; local
1680 drm_radeon_private_t *dev_priv = dev->dev_private; local
1708 drm_radeon_private_t *dev_priv = dev->dev_private; local
1757 drm_radeon_private_t *dev_priv = dev->dev_private; local
1827 drm_radeon_private_t *dev_priv = dev->dev_private; local
1850 drm_radeon_private_t *dev_priv = dev->dev_private; local
1865 drm_radeon_private_t *dev_priv = dev->dev_private; local
1876 drm_radeon_private_t *dev_priv = dev->dev_private; local
1922 drm_radeon_private_t *dev_priv = dev->dev_private; local
1962 drm_radeon_private_t *dev_priv = dev->dev_private; local
1977 radeon_wait_ring(drm_radeon_private_t * dev_priv, int n) argument
2070 drm_radeon_private_t *dev_priv; local
2179 drm_radeon_private_t *dev_priv = dev->dev_private; local
2195 drm_radeon_private_t *dev_priv = dev->dev_private; local
2207 radeon_commit_ring(drm_radeon_private_t *dev_priv) argument
[all...]
H A Dradeon_irq.c41 drm_radeon_private_t *dev_priv = dev->dev_private; local
44 dev_priv->irq_enable_reg |= mask;
46 dev_priv->irq_enable_reg &= ~mask;
49 RADEON_WRITE(RADEON_GEN_INT_CNTL, dev_priv->irq_enable_reg);
54 drm_radeon_private_t *dev_priv = dev->dev_private; local
57 dev_priv->r500_disp_irq_reg |= mask;
59 dev_priv->r500_disp_irq_reg &= ~mask;
62 RADEON_WRITE(R500_DxMODE_INT_MASK, dev_priv->r500_disp_irq_reg);
67 drm_radeon_private_t *dev_priv = dev->dev_private; local
69 if ((dev_priv
102 drm_radeon_private_t *dev_priv = dev->dev_private; local
133 radeon_acknowledge_irqs(drm_radeon_private_t *dev_priv, u32 *r500_disp_int) argument
187 drm_radeon_private_t *dev_priv = local
225 drm_radeon_private_t *dev_priv = dev->dev_private; local
243 drm_radeon_private_t *dev_priv = local
260 drm_radeon_private_t *dev_priv = dev->dev_private; local
289 drm_radeon_private_t *dev_priv = dev->dev_private; local
317 drm_radeon_private_t *dev_priv = dev->dev_private; local
335 drm_radeon_private_t *dev_priv = local
353 drm_radeon_private_t *dev_priv = local
371 drm_radeon_private_t *dev_priv = local
388 drm_radeon_private_t *dev_priv = (drm_radeon_private_t *) dev->dev_private; local
395 drm_radeon_private_t *dev_priv = (drm_radeon_private_t *) dev->dev_private; local
[all...]
H A Dr600_cp.c102 static int r600_do_wait_for_fifo(drm_radeon_private_t *dev_priv, int entries) argument
106 dev_priv->stats.boxes |= RADEON_BOX_WAIT_IDLE;
108 for (i = 0; i < dev_priv->usec_timeout; i++) {
110 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_RV770)
127 static int r600_do_wait_for_idle(drm_radeon_private_t *dev_priv) argument
131 dev_priv->stats.boxes |= RADEON_BOX_WAIT_IDLE;
133 if ((dev_priv->flags & RADEON_FAMILY_MASK) >= CHIP_RV770)
134 ret = r600_do_wait_for_fifo(dev_priv, 8);
136 ret = r600_do_wait_for_fifo(dev_priv, 16);
139 for (i = 0; i < dev_priv
180 drm_radeon_private_t *dev_priv = dev->dev_private; local
234 drm_radeon_private_t *dev_priv = dev->dev_private; local
249 drm_radeon_private_t *dev_priv = dev->dev_private; local
312 r600_cp_init_microcode(drm_radeon_private_t *dev_priv) argument
391 r600_cp_load_microcode(drm_radeon_private_t *dev_priv) argument
434 drm_radeon_private_t *dev_priv = dev->dev_private; local
484 r700_cp_load_microcode(drm_radeon_private_t *dev_priv) argument
525 r600_test_writeback(drm_radeon_private_t *dev_priv) argument
574 drm_radeon_private_t *dev_priv = dev->dev_private; local
729 r600_gfx_init(struct drm_device *dev, drm_radeon_private_t *dev_priv) argument
1181 r700_get_tile_pipe_to_backend_map(drm_radeon_private_t *dev_priv, u32 num_tile_pipes, u32 num_backends, u32 backend_disable_mask) argument
1355 r700_gfx_init(struct drm_device *dev, drm_radeon_private_t *dev_priv) argument
1781 r600_cp_init_ring_buffer(struct drm_device *dev, drm_radeon_private_t *dev_priv, struct drm_file *file_priv) argument
1939 drm_radeon_private_t *dev_priv = dev->dev_private; local
1984 drm_radeon_private_t *dev_priv = dev->dev_private; local
2289 drm_radeon_private_t *dev_priv = dev->dev_private; local
2307 r600_do_cp_idle(drm_radeon_private_t *dev_priv) argument
2328 r600_do_cp_start(drm_radeon_private_t *dev_priv) argument
2356 r600_do_cp_reset(drm_radeon_private_t *dev_priv) argument
2367 r600_do_cp_stop(drm_radeon_private_t *dev_priv) argument
2383 drm_radeon_private_t *dev_priv = dev->dev_private; local
2419 drm_radeon_private_t *dev_priv = dev->dev_private; local
2482 drm_radeon_private_t *dev_priv = dev->dev_private; local
2570 r600_cs_id_emit(drm_radeon_private_t *dev_priv, u32 *id) argument
2602 drm_radeon_private_t *dev_priv = dev->dev_private; local
2614 struct drm_radeon_private *dev_priv = dev->dev_private; local
2655 struct drm_radeon_private *dev_priv = dev->dev_private; local
[all...]
/drivers/gpu/drm/via/
H A Dvia_map.c30 drm_via_private_t *dev_priv = dev->dev_private; local
34 dev_priv->sarea = drm_legacy_getsarea(dev);
35 if (!dev_priv->sarea) {
37 dev->dev_private = (void *)dev_priv;
42 dev_priv->fb = drm_legacy_findmap(dev, init->fb_offset);
43 if (!dev_priv->fb) {
45 dev->dev_private = (void *)dev_priv;
49 dev_priv->mmio = drm_legacy_findmap(dev, init->mmio_offset);
50 if (!dev_priv->mmio) {
52 dev->dev_private = (void *)dev_priv;
96 drm_via_private_t *dev_priv; local
121 drm_via_private_t *dev_priv = dev->dev_private; local
[all...]
H A Dvia_dma.c60 dev_priv->dma_low += 8; \
68 dev_priv->dma_low += 8; \
71 static void via_cmdbuf_start(drm_via_private_t *dev_priv);
72 static void via_cmdbuf_pause(drm_via_private_t *dev_priv);
73 static void via_cmdbuf_reset(drm_via_private_t *dev_priv);
74 static void via_cmdbuf_rewind(drm_via_private_t *dev_priv);
75 static int via_wait_idle(drm_via_private_t *dev_priv);
76 static void via_pad_cache(drm_via_private_t *dev_priv, int qwords);
82 static uint32_t via_cmdbuf_space(drm_via_private_t *dev_priv) argument
84 uint32_t agp_base = dev_priv
96 via_cmdbuf_lag(drm_via_private_t *dev_priv) argument
111 via_cmdbuf_wait(drm_via_private_t *dev_priv, unsigned int size) argument
142 via_check_dma(drm_via_private_t * dev_priv, unsigned int size) argument
158 drm_via_private_t *dev_priv = local
173 via_initialize(struct drm_device *dev, drm_via_private_t *dev_priv, drm_via_dma_init_t *init) argument
231 drm_via_private_t *dev_priv = (drm_via_private_t *) dev->dev_private; local
262 drm_via_private_t *dev_priv; local
313 drm_via_private_t *dev_priv = dev->dev_private; local
344 drm_via_private_t *dev_priv = dev->dev_private; local
377 via_align_buffer(drm_via_private_t *dev_priv, uint32_t * vb, int qw_count) argument
390 via_get_dma(drm_via_private_t *dev_priv) argument
400 via_hook_segment(drm_via_private_t *dev_priv, uint32_t pause_addr_hi, uint32_t pause_addr_lo, int no_pci_fire) argument
466 via_wait_idle(drm_via_private_t *dev_priv) argument
480 via_align_cmd(drm_via_private_t *dev_priv, uint32_t cmd_type, uint32_t addr, uint32_t *cmd_addr_hi, uint32_t *cmd_addr_lo, int skip_wait) argument
510 via_cmdbuf_start(drm_via_private_t *dev_priv) argument
569 via_pad_cache(drm_via_private_t *dev_priv, int qwords) argument
579 via_dummy_bitblt(drm_via_private_t *dev_priv) argument
587 via_cmdbuf_jump(drm_via_private_t *dev_priv) argument
645 via_cmdbuf_rewind(drm_via_private_t *dev_priv) argument
650 via_cmdbuf_flush(drm_via_private_t *dev_priv, uint32_t cmd_type) argument
658 via_cmdbuf_pause(drm_via_private_t *dev_priv) argument
663 via_cmdbuf_reset(drm_via_private_t *dev_priv) argument
678 drm_via_private_t *dev_priv; local
[all...]
H A Dvia_video.c32 void via_init_futex(drm_via_private_t *dev_priv) argument
39 init_waitqueue_head(&(dev_priv->decoder_queue[i]));
40 XVMCLOCKPTR(dev_priv->sarea_priv, i)->lock = 0;
44 void via_cleanup_futex(drm_via_private_t *dev_priv) argument
48 void via_release_futex(drm_via_private_t *dev_priv, int context) argument
53 if (!dev_priv->sarea_priv)
57 lock = (volatile int *)XVMCLOCKPTR(dev_priv->sarea_priv, i);
61 wake_up(&(dev_priv->decoder_queue[i]));
72 drm_via_private_t *dev_priv = (drm_via_private_t *) dev->dev_private; local
73 drm_via_sarea_t *sAPriv = dev_priv
[all...]
/drivers/gpu/drm/savage/
H A Dsavage_bci.c38 savage_bci_wait_fifo_shadow(drm_savage_private_t * dev_priv, unsigned int n) argument
40 uint32_t mask = dev_priv->status_used_mask;
41 uint32_t threshold = dev_priv->bci_threshold_hi;
46 if (n > dev_priv->cob_size + SAVAGE_BCI_FIFO_SIZE - threshold)
53 status = dev_priv->status_ptr[0];
67 savage_bci_wait_fifo_s3d(drm_savage_private_t * dev_priv, unsigned int n) argument
69 uint32_t maxUsed = dev_priv->cob_size + SAVAGE_BCI_FIFO_SIZE - n;
88 savage_bci_wait_fifo_s4(drm_savage_private_t * dev_priv, unsigned int n) argument
90 uint32_t maxUsed = dev_priv->cob_size + SAVAGE_BCI_FIFO_SIZE - n;
120 savage_bci_wait_event_shadow(drm_savage_private_t * dev_priv, uint16_ argument
143 savage_bci_wait_event_reg(drm_savage_private_t * dev_priv, uint16_t e) argument
164 savage_bci_emit_event(drm_savage_private_t * dev_priv, unsigned int flags) argument
208 drm_savage_private_t *dev_priv = dev->dev_private; local
241 drm_savage_private_t *dev_priv = dev->dev_private; local
274 drm_savage_private_t *dev_priv = dev->dev_private; local
295 savage_dma_init(drm_savage_private_t * dev_priv) argument
319 savage_dma_reset(drm_savage_private_t * dev_priv) argument
334 savage_dma_wait(drm_savage_private_t * dev_priv, unsigned int page) argument
362 savage_dma_alloc(drm_savage_private_t * dev_priv, unsigned int n) argument
418 savage_dma_flush(drm_savage_private_t * dev_priv) argument
501 savage_fake_dma_flush(drm_savage_private_t * dev_priv) argument
540 drm_savage_private_t *dev_priv; local
564 drm_savage_private_t *dev_priv = dev->dev_private; local
649 drm_savage_private_t *dev_priv = dev->dev_private; local
660 drm_savage_private_t *dev_priv = dev->dev_private; local
669 drm_savage_private_t *dev_priv = dev->dev_private; local
891 drm_savage_private_t *dev_priv = dev->dev_private; local
932 drm_savage_private_t *dev_priv = dev->dev_private; local
947 drm_savage_private_t *dev_priv = dev->dev_private; local
1042 drm_savage_private_t *dev_priv = dev->dev_private; local
[all...]
/drivers/gpu/drm/i915/
H A Dintel_uncore.c44 assert_device_not_suspended(struct drm_i915_private *dev_priv) argument
46 WARN(HAS_RUNTIME_PM(dev_priv->dev) && dev_priv->pm.suspended,
50 static void __gen6_gt_wait_for_thread_c0(struct drm_i915_private *dev_priv) argument
54 if (IS_HASWELL(dev_priv->dev))
62 if (wait_for_atomic_us((__raw_i915_read32(dev_priv, GEN6_GT_THREAD_STATUS_REG) & gt_thread_status_mask) == 0, 500))
66 static void __gen6_gt_force_wake_reset(struct drm_i915_private *dev_priv) argument
68 __raw_i915_write32(dev_priv, FORCEWAKE, 0);
70 __raw_posting_read(dev_priv, ECOBUS);
73 static void __gen6_gt_force_wake_get(struct drm_i915_private *dev_priv, argument
92 __gen7_gt_force_wake_mt_reset(struct drm_i915_private *dev_priv) argument
99 __gen7_gt_force_wake_mt_get(struct drm_i915_private *dev_priv, int fw_engine) argument
127 gen6_gt_check_fifodbg(struct drm_i915_private *dev_priv) argument
136 __gen6_gt_force_wake_put(struct drm_i915_private *dev_priv, int fw_engine) argument
145 __gen7_gt_force_wake_mt_put(struct drm_i915_private *dev_priv, int fw_engine) argument
157 __gen6_gt_wait_for_fifo(struct drm_i915_private *dev_priv) argument
184 vlv_force_wake_reset(struct drm_i915_private *dev_priv) argument
194 __vlv_force_wake_get(struct drm_i915_private *dev_priv, int fw_engine) argument
238 __vlv_force_wake_put(struct drm_i915_private *dev_priv, int fw_engine) argument
259 vlv_force_wake_get(struct drm_i915_private *dev_priv, int fw_engine) argument
278 vlv_force_wake_put(struct drm_i915_private *dev_priv, int fw_engine) argument
304 struct drm_i915_private *dev_priv = (void *)arg; local
321 struct drm_i915_private *dev_priv = dev->dev_private; local
368 struct drm_i915_private *dev_priv = dev->dev_private; local
404 gen6_gt_force_wake_get(struct drm_i915_private *dev_priv, int fw_engine) argument
426 gen6_gt_force_wake_put(struct drm_i915_private *dev_priv, int fw_engine) argument
457 assert_force_wake_inactive(struct drm_i915_private *dev_priv) argument
508 ilk_dummy_write(struct drm_i915_private *dev_priv) argument
517 hsw_unclaimed_reg_debug(struct drm_i915_private *dev_priv, u32 reg, bool read, bool before) argument
534 hsw_unclaimed_reg_detect(struct drm_i915_private *dev_priv) argument
736 is_gen8_shadowed(struct drm_i915_private *dev_priv, u32 reg) argument
831 struct drm_i915_private *dev_priv = dev->dev_private; local
977 struct drm_i915_private *dev_priv = dev->dev_private; local
1020 struct drm_i915_private *dev_priv = dev->dev_private; local
1095 struct drm_i915_private *dev_priv = dev->dev_private; local
1125 struct drm_i915_private *dev_priv = dev->dev_private; local
1149 struct drm_i915_private *dev_priv = dev->dev_private; local
1184 struct drm_i915_private *dev_priv = dev->dev_private; local
[all...]
H A Di915_ums.c36 struct drm_i915_private *dev_priv = dev->dev_private; local
53 struct drm_i915_private *dev_priv = dev->dev_private; local
65 array = dev_priv->regfile.save_palette_a;
67 array = dev_priv->regfile.save_palette_b;
75 struct drm_i915_private *dev_priv = dev->dev_private; local
87 array = dev_priv->regfile.save_palette_a;
89 array = dev_priv->regfile.save_palette_b;
97 struct drm_i915_private *dev_priv = dev->dev_private; local
101 dev_priv->regfile.saveCURACNTR = I915_READ(_CURACNTR);
102 dev_priv
294 struct drm_i915_private *dev_priv = dev->dev_private; local
[all...]
H A Dintel_sideband.c42 static int vlv_sideband_rw(struct drm_i915_private *dev_priv, u32 devfn, argument
52 WARN_ON(!mutex_is_locked(&dev_priv->dpio_lock));
78 u32 vlv_punit_read(struct drm_i915_private *dev_priv, u8 addr) argument
82 WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
84 mutex_lock(&dev_priv->dpio_lock);
85 vlv_sideband_rw(dev_priv, PCI_DEVFN(2, 0), IOSF_PORT_PUNIT,
87 mutex_unlock(&dev_priv->dpio_lock);
92 void vlv_punit_write(struct drm_i915_private *dev_priv, u8 addr, u32 val) argument
94 WARN_ON(!mutex_is_locked(&dev_priv->rps.hw_lock));
96 mutex_lock(&dev_priv
102 vlv_bunit_read(struct drm_i915_private *dev_priv, u32 reg) argument
112 vlv_bunit_write(struct drm_i915_private *dev_priv, u32 reg, u32 val) argument
118 vlv_nc_read(struct drm_i915_private *dev_priv, u8 addr) argument
132 vlv_gpio_nc_read(struct drm_i915_private *dev_priv, u32 reg) argument
140 vlv_gpio_nc_write(struct drm_i915_private *dev_priv, u32 reg, u32 val) argument
146 vlv_cck_read(struct drm_i915_private *dev_priv, u32 reg) argument
154 vlv_cck_write(struct drm_i915_private *dev_priv, u32 reg, u32 val) argument
160 vlv_ccu_read(struct drm_i915_private *dev_priv, u32 reg) argument
168 vlv_ccu_write(struct drm_i915_private *dev_priv, u32 reg, u32 val) argument
174 vlv_gps_core_read(struct drm_i915_private *dev_priv, u32 reg) argument
182 vlv_gps_core_write(struct drm_i915_private *dev_priv, u32 reg, u32 val) argument
188 vlv_dpio_read(struct drm_i915_private *dev_priv, enum pipe pipe, int reg) argument
205 vlv_dpio_write(struct drm_i915_private *dev_priv, enum pipe pipe, int reg, u32 val) argument
212 intel_sbi_read(struct drm_i915_private *dev_priv, u16 reg, enum intel_sbi_destination destination) argument
241 intel_sbi_write(struct drm_i915_private *dev_priv, u16 reg, u32 value, enum intel_sbi_destination destination) argument
270 vlv_flisdsi_read(struct drm_i915_private *dev_priv, u32 reg) argument
278 vlv_flisdsi_write(struct drm_i915_private *dev_priv, u32 reg, u32 val) argument
[all...]
/drivers/gpu/drm/mga/
H A Dmga_dma.c53 int mga_do_wait_for_idle(drm_mga_private_t *dev_priv) argument
59 for (i = 0; i < dev_priv->usec_timeout; i++) {
75 static int mga_do_dma_reset(drm_mga_private_t *dev_priv) argument
77 drm_mga_sarea_t *sarea_priv = dev_priv->sarea_priv;
78 drm_mga_primary_buffer_t *primary = &dev_priv->prim;
103 void mga_do_dma_flush(drm_mga_private_t *dev_priv) argument
105 drm_mga_primary_buffer_t *primary = &dev_priv->prim;
113 for (i = 0; i < dev_priv->usec_timeout; i++) {
125 tail = primary->tail + dev_priv->primary->offset;
148 DRM_DEBUG(" head = 0x%06lx\n", (unsigned long)(head - dev_priv
158 mga_do_dma_wrap_start(drm_mga_private_t *dev_priv) argument
198 mga_do_dma_wrap_end(drm_mga_private_t *dev_priv) argument
225 drm_mga_private_t *dev_priv = dev->dev_private; local
244 mga_freelist_init(struct drm_device *dev, drm_mga_private_t *dev_priv) argument
289 drm_mga_private_t *dev_priv = dev->dev_private; local
324 drm_mga_private_t *dev_priv = dev->dev_private; local
357 drm_mga_private_t *dev_priv = dev->dev_private; local
392 drm_mga_private_t *dev_priv; local
438 drm_mga_private_t *const dev_priv = local
612 drm_mga_private_t *const dev_priv = local
703 drm_mga_private_t *const dev_priv = local
762 const drm_mga_private_t *const dev_priv = local
786 drm_mga_private_t *dev_priv; local
936 drm_mga_private_t *dev_priv = dev->dev_private; local
1023 drm_mga_private_t *dev_priv = (drm_mga_private_t *) dev->dev_private; local
1055 drm_mga_private_t *dev_priv = (drm_mga_private_t *) dev->dev_private; local
1095 drm_mga_private_t *dev_priv = (drm_mga_private_t *) dev->dev_private; local
1148 drm_mga_private_t *dev_priv = dev->dev_private; local
[all...]
H A Dmga_irq.c40 const drm_mga_private_t *const dev_priv = local
46 return atomic_read(&dev_priv->vbl_received);
53 drm_mga_private_t *dev_priv = (drm_mga_private_t *) dev->dev_private; local
62 atomic_inc(&dev_priv->vbl_received);
81 atomic_inc(&dev_priv->last_fence_retired);
82 wake_up(&dev_priv->fence_queue);
93 drm_mga_private_t *dev_priv = (drm_mga_private_t *) dev->dev_private; local
123 drm_mga_private_t *dev_priv = (drm_mga_private_t *) dev->dev_private; local
131 DRM_WAIT_ON(ret, dev_priv->fence_queue, 3 * HZ,
132 (((cur_fence = atomic_read(&dev_priv
142 drm_mga_private_t *dev_priv = (drm_mga_private_t *) dev->dev_private; local
152 drm_mga_private_t *dev_priv = (drm_mga_private_t *) dev->dev_private; local
165 drm_mga_private_t *dev_priv = (drm_mga_private_t *) dev->dev_private; local
[all...]
/drivers/gpu/drm/r128/
H A Dr128_cce.c49 drm_r128_private_t *dev_priv = dev->dev_private; local
56 static void r128_status(drm_r128_private_t *dev_priv) argument
77 static int r128_do_pixcache_flush(drm_r128_private_t *dev_priv) argument
85 for (i = 0; i < dev_priv->usec_timeout; i++) {
97 static int r128_do_wait_for_fifo(drm_r128_private_t *dev_priv, int entries) argument
101 for (i = 0; i < dev_priv->usec_timeout; i++) {
114 static int r128_do_wait_for_idle(drm_r128_private_t *dev_priv) argument
118 ret = r128_do_wait_for_fifo(dev_priv, 64);
122 for (i = 0; i < dev_priv->usec_timeout; i++) {
124 r128_do_pixcache_flush(dev_priv);
141 r128_cce_load_microcode(drm_r128_private_t *dev_priv) argument
191 r128_do_cce_flush(drm_r128_private_t *dev_priv) argument
201 r128_do_cce_idle(drm_r128_private_t *dev_priv) argument
227 r128_do_cce_start(drm_r128_private_t *dev_priv) argument
244 r128_do_cce_reset(drm_r128_private_t *dev_priv) argument
255 r128_do_cce_stop(drm_r128_private_t *dev_priv) argument
268 drm_r128_private_t *dev_priv = dev->dev_private; local
303 r128_cce_init_ring_buffer(struct drm_device *dev, drm_r128_private_t *dev_priv) argument
344 drm_r128_private_t *dev_priv; local
601 drm_r128_private_t *dev_priv = dev->dev_private; local
650 drm_r128_private_t *dev_priv = dev->dev_private; local
672 drm_r128_private_t *dev_priv = dev->dev_private; local
712 drm_r128_private_t *dev_priv = dev->dev_private; local
729 drm_r128_private_t *dev_priv = dev->dev_private; local
813 drm_r128_private_t *dev_priv = dev->dev_private; local
864 r128_wait_ring(drm_r128_private_t *dev_priv, int n) argument
[all...]

Completed in 1021 milliseconds

1234567