Lines Matching defs:svga

56 svga_user_buffer_range(struct svga_context *svga,
61 const struct pipe_vertex_element *ve = svga->curr.velems->velem;
69 for (i=0; i < svga->curr.velems->count; i++) {
71 &svga->curr.vb[ve[i].vertex_buffer_index];
82 for (i=0; i < svga->curr.velems->count; i++) {
84 &svga->curr.vb[ve[i].vertex_buffer_index];
92 svga->dirty |= SVGA_NEW_VBUFFER;
117 svga_upload_user_buffers(struct svga_context *svga,
122 const struct pipe_vertex_element *ve = svga->curr.velems->velem;
126 svga_user_buffer_range(svga, start, count, instance_count);
128 for (i=0; i < svga->curr.velems->count; i++) {
130 &svga->curr.vb[ve[i].vertex_buffer_index];
142 ret = u_upload_buffer( svga->upload_vb,
184 svga_release_user_upl_buffers(struct svga_context *svga)
189 nr = svga->curr.num_vertex_buffers;
192 struct pipe_vertex_buffer *vb = &svga->curr.vb[i];
201 * XXX a root problem here is that the svga->curr.vb[] information
219 retry_draw_range_elements( struct svga_context *svga,
233 svga_hwtnl_set_unfilled( svga->hwtnl,
234 svga->curr.rast->hw_unfilled );
236 svga_hwtnl_set_flatshade( svga->hwtnl,
237 svga->curr.rast->templ.flatshade,
238 svga->curr.rast->templ.flatshade_first );
240 ret = svga_upload_user_buffers( svga, min_index + index_bias,
245 ret = svga_update_state( svga, SVGA_STATE_HW_DRAW );
249 ret = svga_hwtnl_draw_range_elements( svga->hwtnl,
259 svga_context_flush( svga, NULL );
263 return retry_draw_range_elements( svga,
275 retry_draw_arrays( struct svga_context *svga,
284 svga_hwtnl_set_unfilled( svga->hwtnl,
285 svga->curr.rast->hw_unfilled );
287 svga_hwtnl_set_flatshade( svga->hwtnl,
288 svga->curr.rast->templ.flatshade,
289 svga->curr.rast->templ.flatshade_first );
291 ret = svga_upload_user_buffers( svga, start, count, instance_count );
296 ret = svga_update_state( svga, SVGA_STATE_HW_DRAW );
300 ret = svga_hwtnl_draw_arrays( svga->hwtnl, prim,
310 svga_context_flush( svga, NULL );
312 return retry_draw_arrays( svga,
327 struct svga_context *svga = svga_context( pipe );
345 if (svga->curr.reduced_prim != reduced_prim) {
346 svga->curr.reduced_prim = reduced_prim;
347 svga->dirty |= SVGA_NEW_REDUCED_PRIMITIVE;
350 needed_swtnl = svga->state.sw.need_swtnl;
352 svga_update_state_retry( svga, SVGA_STATE_NEED_SWTNL );
355 if (svga->curr.vs->base.id == svga->debug.disable_shader ||
356 svga->curr.fs->base.id == svga->debug.disable_shader)
360 if (svga->state.sw.need_swtnl) {
370 svga_context_flush(svga, NULL);
374 svga_hwtnl_set_index_bias( svga->hwtnl, 0 );
375 ret = svga_swtnl_draw_vbo( svga, info );
378 if (info->indexed && svga->curr.ib.buffer) {
381 assert(svga->curr.ib.offset % svga->curr.ib.index_size == 0);
382 offset = svga->curr.ib.offset / svga->curr.ib.index_size;
384 ret = retry_draw_range_elements( svga,
385 svga->curr.ib.buffer,
386 svga->curr.ib.index_size,
397 ret = retry_draw_arrays( svga,
409 svga_release_user_upl_buffers( svga );
412 svga_hwtnl_flush_retry( svga );
413 svga_context_flush(svga, NULL);
418 void svga_init_draw_functions( struct svga_context *svga )
420 svga->pipe.draw_vbo = svga_draw_vbo;