vl_compositor.c revision 4f37636afb5adc299ecbe497209702a47039580c
1/**************************************************************************
2 *
3 * Copyright 2009 Younes Manton.
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the
8 * "Software"), to deal in the Software without restriction, including
9 * without limitation the rights to use, copy, modify, merge, publish,
10 * distribute, sub license, and/or sell copies of the Software, and to
11 * permit persons to whom the Software is furnished to do so, subject to
12 * the following conditions:
13 *
14 * The above copyright notice and this permission notice (including the
15 * next paragraph) shall be included in all copies or substantial portions
16 * of the Software.
17 *
18 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21 * IN NO EVENT SHALL TUNGSTEN GRAPHICS AND/OR ITS SUPPLIERS BE LIABLE FOR
22 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 *
26 **************************************************************************/
27
28#include <assert.h>
29
30#include "pipe/p_compiler.h"
31#include "pipe/p_context.h"
32
33#include "util/u_memory.h"
34#include "util/u_draw.h"
35#include "util/u_surface.h"
36
37#include "tgsi/tgsi_ureg.h"
38
39#include "vl_csc.h"
40#include "vl_types.h"
41#include "vl_compositor.h"
42
43typedef float csc_matrix[16];
44
45static void *
46create_vert_shader(struct vl_compositor *c)
47{
48   struct ureg_program *shader;
49   struct ureg_src vpos, vtex;
50   struct ureg_dst o_vpos, o_vtex;
51
52   shader = ureg_create(TGSI_PROCESSOR_VERTEX);
53   if (!shader)
54      return false;
55
56   vpos = ureg_DECL_vs_input(shader, 0);
57   vtex = ureg_DECL_vs_input(shader, 1);
58   o_vpos = ureg_DECL_output(shader, TGSI_SEMANTIC_POSITION, 0);
59   o_vtex = ureg_DECL_output(shader, TGSI_SEMANTIC_GENERIC, 1);
60
61   /*
62    * o_vpos = vpos
63    * o_vtex = vtex
64    */
65   ureg_MOV(shader, o_vpos, vpos);
66   ureg_MOV(shader, o_vtex, vtex);
67
68   ureg_END(shader);
69
70   return ureg_create_shader_and_destroy(shader, c->pipe);
71}
72
73static void *
74create_frag_shader_video_buffer(struct vl_compositor *c)
75{
76   struct ureg_program *shader;
77   struct ureg_src tc;
78   struct ureg_src csc[3];
79   struct ureg_src sampler[3];
80   struct ureg_dst texel;
81   struct ureg_dst fragment;
82   unsigned i;
83
84   shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
85   if (!shader)
86      return false;
87
88   tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
89   for (i = 0; i < 3; ++i) {
90      csc[i] = ureg_DECL_constant(shader, i);
91      sampler[i] = ureg_DECL_sampler(shader, i);
92   }
93   texel = ureg_DECL_temporary(shader);
94   fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
95
96   /*
97    * texel.xyz = tex(tc, sampler[i])
98    * fragment = csc * texel
99    */
100   for (i = 0; i < 3; ++i)
101      ureg_TEX(shader, ureg_writemask(texel, TGSI_WRITEMASK_X << i), TGSI_TEXTURE_2D, tc, sampler[i]);
102
103   ureg_MOV(shader, ureg_writemask(texel, TGSI_WRITEMASK_W), ureg_imm1f(shader, 1.0f));
104
105   for (i = 0; i < 3; ++i)
106      ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i], ureg_src(texel));
107
108   ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_imm1f(shader, 1.0f));
109
110   ureg_release_temporary(shader, texel);
111   ureg_END(shader);
112
113   return ureg_create_shader_and_destroy(shader, c->pipe);
114}
115
116static void *
117create_frag_shader_palette(struct vl_compositor *c, bool include_cc)
118{
119   struct ureg_program *shader;
120   struct ureg_src csc[3];
121   struct ureg_src tc;
122   struct ureg_src sampler;
123   struct ureg_src palette;
124   struct ureg_dst texel;
125   struct ureg_dst fragment;
126   unsigned i;
127
128   shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
129   if (!shader)
130      return false;
131
132   for (i = 0; i < 3; ++i)
133      csc[i] = ureg_DECL_constant(shader, i);
134
135   tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
136   sampler = ureg_DECL_sampler(shader, 0);
137   palette = ureg_DECL_sampler(shader, 1);
138
139   texel = ureg_DECL_temporary(shader);
140   fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
141
142   /*
143    * texel = tex(tc, sampler)
144    * fragment.xyz = tex(texel, palette) * csc
145    * fragment.a = texel.a
146    */
147   ureg_TEX(shader, texel, TGSI_TEXTURE_2D, tc, sampler);
148   ureg_MOV(shader, ureg_writemask(fragment, TGSI_WRITEMASK_W), ureg_src(texel));
149
150   if (include_cc) {
151      ureg_TEX(shader, texel, TGSI_TEXTURE_1D, ureg_src(texel), palette);
152      for (i = 0; i < 3; ++i)
153         ureg_DP4(shader, ureg_writemask(fragment, TGSI_WRITEMASK_X << i), csc[i], ureg_src(texel));
154   } else {
155      ureg_TEX(shader, ureg_writemask(fragment, TGSI_WRITEMASK_XYZ),
156               TGSI_TEXTURE_1D, ureg_src(texel), palette);
157   }
158
159   ureg_release_temporary(shader, texel);
160   ureg_END(shader);
161
162   return ureg_create_shader_and_destroy(shader, c->pipe);
163}
164
165static void *
166create_frag_shader_rgba(struct vl_compositor *c)
167{
168   struct ureg_program *shader;
169   struct ureg_src tc;
170   struct ureg_src sampler;
171   struct ureg_dst fragment;
172
173   shader = ureg_create(TGSI_PROCESSOR_FRAGMENT);
174   if (!shader)
175      return false;
176
177   tc = ureg_DECL_fs_input(shader, TGSI_SEMANTIC_GENERIC, 1, TGSI_INTERPOLATE_LINEAR);
178   sampler = ureg_DECL_sampler(shader, 0);
179   fragment = ureg_DECL_output(shader, TGSI_SEMANTIC_COLOR, 0);
180
181   /*
182    * fragment = tex(tc, sampler)
183    */
184   ureg_TEX(shader, fragment, TGSI_TEXTURE_2D, tc, sampler);
185   ureg_END(shader);
186
187   return ureg_create_shader_and_destroy(shader, c->pipe);
188}
189
190static bool
191init_shaders(struct vl_compositor *c)
192{
193   assert(c);
194
195   c->vs = create_vert_shader(c);
196   if (!c->vs) {
197      debug_printf("Unable to create vertex shader.\n");
198      return false;
199   }
200
201   c->fs_video_buffer = create_frag_shader_video_buffer(c);
202   if (!c->fs_video_buffer) {
203      debug_printf("Unable to create YCbCr-to-RGB fragment shader.\n");
204      return false;
205   }
206
207   c->fs_palette.yuv = create_frag_shader_palette(c, true);
208   if (!c->fs_palette.yuv) {
209      debug_printf("Unable to create YUV-Palette-to-RGB fragment shader.\n");
210      return false;
211   }
212
213   c->fs_palette.rgb = create_frag_shader_palette(c, false);
214   if (!c->fs_palette.rgb) {
215      debug_printf("Unable to create RGB-Palette-to-RGB fragment shader.\n");
216      return false;
217   }
218
219   c->fs_rgba = create_frag_shader_rgba(c);
220   if (!c->fs_rgba) {
221      debug_printf("Unable to create RGB-to-RGB fragment shader.\n");
222      return false;
223   }
224
225   return true;
226}
227
228static void cleanup_shaders(struct vl_compositor *c)
229{
230   assert(c);
231
232   c->pipe->delete_vs_state(c->pipe, c->vs);
233   c->pipe->delete_fs_state(c->pipe, c->fs_video_buffer);
234   c->pipe->delete_fs_state(c->pipe, c->fs_palette.yuv);
235   c->pipe->delete_fs_state(c->pipe, c->fs_palette.rgb);
236   c->pipe->delete_fs_state(c->pipe, c->fs_rgba);
237}
238
239static bool
240init_pipe_state(struct vl_compositor *c)
241{
242   struct pipe_rasterizer_state rast;
243   struct pipe_sampler_state sampler;
244   struct pipe_blend_state blend;
245   struct pipe_depth_stencil_alpha_state dsa;
246   unsigned i;
247
248   assert(c);
249
250   c->fb_state.nr_cbufs = 1;
251   c->fb_state.zsbuf = NULL;
252
253   c->viewport.scale[2] = 1;
254   c->viewport.scale[3] = 1;
255   c->viewport.translate[2] = 0;
256   c->viewport.translate[3] = 0;
257
258   memset(&sampler, 0, sizeof(sampler));
259   sampler.wrap_s = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
260   sampler.wrap_t = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
261   sampler.wrap_r = PIPE_TEX_WRAP_CLAMP_TO_EDGE;
262   sampler.min_img_filter = PIPE_TEX_FILTER_LINEAR;
263   sampler.min_mip_filter = PIPE_TEX_MIPFILTER_NONE;
264   sampler.mag_img_filter = PIPE_TEX_FILTER_LINEAR;
265   sampler.compare_mode = PIPE_TEX_COMPARE_NONE;
266   sampler.compare_func = PIPE_FUNC_ALWAYS;
267   sampler.normalized_coords = 1;
268
269   c->sampler_linear = c->pipe->create_sampler_state(c->pipe, &sampler);
270
271   sampler.min_img_filter = PIPE_TEX_FILTER_NEAREST;
272   sampler.mag_img_filter = PIPE_TEX_FILTER_NEAREST;
273   c->sampler_nearest = c->pipe->create_sampler_state(c->pipe, &sampler);
274
275   memset(&blend, 0, sizeof blend);
276   blend.independent_blend_enable = 0;
277   blend.rt[0].blend_enable = 0;
278   blend.logicop_enable = 0;
279   blend.logicop_func = PIPE_LOGICOP_CLEAR;
280   blend.rt[0].colormask = PIPE_MASK_RGBA;
281   blend.dither = 0;
282   c->blend_clear = c->pipe->create_blend_state(c->pipe, &blend);
283
284   blend.rt[0].blend_enable = 1;
285   blend.rt[0].rgb_func = PIPE_BLEND_ADD;
286   blend.rt[0].rgb_src_factor = PIPE_BLENDFACTOR_SRC_ALPHA;
287   blend.rt[0].rgb_dst_factor = PIPE_BLENDFACTOR_INV_SRC_ALPHA;
288   blend.rt[0].alpha_func = PIPE_BLEND_ADD;
289   blend.rt[0].alpha_src_factor = PIPE_BLENDFACTOR_ONE;
290   blend.rt[0].alpha_dst_factor = PIPE_BLENDFACTOR_ONE;
291   c->blend_add = c->pipe->create_blend_state(c->pipe, &blend);
292
293   memset(&rast, 0, sizeof rast);
294   rast.flatshade = 1;
295   rast.front_ccw = 1;
296   rast.cull_face = PIPE_FACE_NONE;
297   rast.fill_back = PIPE_POLYGON_MODE_FILL;
298   rast.fill_front = PIPE_POLYGON_MODE_FILL;
299   rast.scissor = 1;
300   rast.line_width = 1;
301   rast.point_size_per_vertex = 1;
302   rast.offset_units = 1;
303   rast.offset_scale = 1;
304   rast.gl_rasterization_rules = 1;
305
306   c->rast = c->pipe->create_rasterizer_state(c->pipe, &rast);
307
308   memset(&dsa, 0, sizeof dsa);
309   dsa.depth.enabled = 0;
310   dsa.depth.writemask = 0;
311   dsa.depth.func = PIPE_FUNC_ALWAYS;
312   for (i = 0; i < 2; ++i) {
313      dsa.stencil[i].enabled = 0;
314      dsa.stencil[i].func = PIPE_FUNC_ALWAYS;
315      dsa.stencil[i].fail_op = PIPE_STENCIL_OP_KEEP;
316      dsa.stencil[i].zpass_op = PIPE_STENCIL_OP_KEEP;
317      dsa.stencil[i].zfail_op = PIPE_STENCIL_OP_KEEP;
318      dsa.stencil[i].valuemask = 0;
319      dsa.stencil[i].writemask = 0;
320   }
321   dsa.alpha.enabled = 0;
322   dsa.alpha.func = PIPE_FUNC_ALWAYS;
323   dsa.alpha.ref_value = 0;
324   c->dsa = c->pipe->create_depth_stencil_alpha_state(c->pipe, &dsa);
325   c->pipe->bind_depth_stencil_alpha_state(c->pipe, c->dsa);
326   return true;
327}
328
329static void cleanup_pipe_state(struct vl_compositor *c)
330{
331   assert(c);
332
333   /* Asserted in softpipe_delete_fs_state() for some reason */
334   c->pipe->bind_vs_state(c->pipe, NULL);
335   c->pipe->bind_fs_state(c->pipe, NULL);
336
337   c->pipe->delete_depth_stencil_alpha_state(c->pipe, c->dsa);
338   c->pipe->delete_sampler_state(c->pipe, c->sampler_linear);
339   c->pipe->delete_sampler_state(c->pipe, c->sampler_nearest);
340   c->pipe->delete_blend_state(c->pipe, c->blend_clear);
341   c->pipe->delete_blend_state(c->pipe, c->blend_add);
342   c->pipe->delete_rasterizer_state(c->pipe, c->rast);
343}
344
345static bool
346create_vertex_buffer(struct vl_compositor *c)
347{
348   assert(c);
349
350   pipe_resource_reference(&c->vertex_buf.buffer, NULL);
351   c->vertex_buf.buffer = pipe_buffer_create
352   (
353      c->pipe->screen,
354      PIPE_BIND_VERTEX_BUFFER,
355      PIPE_USAGE_STREAM,
356      sizeof(struct vertex4f) * VL_COMPOSITOR_MAX_LAYERS * 4
357   );
358   return c->vertex_buf.buffer != NULL;
359}
360
361static bool
362init_buffers(struct vl_compositor *c)
363{
364   struct pipe_vertex_element vertex_elems[2];
365
366   assert(c);
367
368   /*
369    * Create our vertex buffer and vertex buffer elements
370    */
371   c->vertex_buf.stride = sizeof(struct vertex4f);
372   c->vertex_buf.buffer_offset = 0;
373   create_vertex_buffer(c);
374
375   vertex_elems[0].src_offset = 0;
376   vertex_elems[0].instance_divisor = 0;
377   vertex_elems[0].vertex_buffer_index = 0;
378   vertex_elems[0].src_format = PIPE_FORMAT_R32G32_FLOAT;
379   vertex_elems[1].src_offset = sizeof(struct vertex2f);
380   vertex_elems[1].instance_divisor = 0;
381   vertex_elems[1].vertex_buffer_index = 0;
382   vertex_elems[1].src_format = PIPE_FORMAT_R32G32_FLOAT;
383   c->vertex_elems_state = c->pipe->create_vertex_elements_state(c->pipe, 2, vertex_elems);
384
385   /*
386    * Create our fragment shader's constant buffer
387    * Const buffer contains the color conversion matrix and bias vectors
388    */
389   /* XXX: Create with IMMUTABLE/STATIC... although it does change every once in a long while... */
390   c->csc_matrix = pipe_buffer_create
391   (
392      c->pipe->screen,
393      PIPE_BIND_CONSTANT_BUFFER,
394      PIPE_USAGE_STATIC,
395      sizeof(csc_matrix)
396   );
397
398   return true;
399}
400
401static void
402cleanup_buffers(struct vl_compositor *c)
403{
404   assert(c);
405
406   c->pipe->delete_vertex_elements_state(c->pipe, c->vertex_elems_state);
407   pipe_resource_reference(&c->vertex_buf.buffer, NULL);
408   pipe_resource_reference(&c->csc_matrix, NULL);
409}
410
411static INLINE struct pipe_video_rect
412default_rect(struct vl_compositor_layer *layer)
413{
414   struct pipe_resource *res = layer->sampler_views[0]->texture;
415   struct pipe_video_rect rect = { 0, 0, res->width0, res->height0 };
416   return rect;
417}
418
419static INLINE struct vertex2f
420calc_topleft(struct vertex2f size, struct pipe_video_rect rect)
421{
422   struct vertex2f res = { rect.x / size.x, rect.y / size.y };
423   return res;
424}
425
426static INLINE struct vertex2f
427calc_bottomright(struct vertex2f size, struct pipe_video_rect rect)
428{
429   struct vertex2f res = { (rect.x + rect.w) / size.x, (rect.y + rect.h) / size.y };
430   return res;
431}
432
433static INLINE void
434calc_src_and_dst(struct vl_compositor_layer *layer, unsigned width, unsigned height,
435                 struct pipe_video_rect src, struct pipe_video_rect dst)
436{
437   struct vertex2f size =  { width, height };
438
439   layer->src.tl = calc_topleft(size, src);
440   layer->src.br = calc_bottomright(size, src);
441   layer->dst.tl = calc_topleft(size, dst);
442   layer->dst.br = calc_bottomright(size, dst);
443}
444
445static void
446gen_rect_verts(struct vertex4f *vb, struct vl_compositor_layer *layer)
447{
448   assert(vb && layer);
449
450   vb[0].x = layer->dst.tl.x;
451   vb[0].y = layer->dst.tl.y;
452   vb[0].z = layer->src.tl.x;
453   vb[0].w = layer->src.tl.y;
454
455   vb[1].x = layer->dst.br.x;
456   vb[1].y = layer->dst.tl.y;
457   vb[1].z = layer->src.br.x;
458   vb[1].w = layer->src.tl.y;
459
460   vb[2].x = layer->dst.br.x;
461   vb[2].y = layer->dst.br.y;
462   vb[2].z = layer->src.br.x;
463   vb[2].w = layer->src.br.y;
464
465   vb[3].x = layer->dst.tl.x;
466   vb[3].y = layer->dst.br.y;
467   vb[3].z = layer->src.tl.x;
468   vb[3].w = layer->src.br.y;
469}
470
471static void
472gen_vertex_data(struct vl_compositor *c)
473{
474   struct vertex4f *vb;
475   struct pipe_transfer *buf_transfer;
476   unsigned i;
477
478   assert(c);
479
480   vb = pipe_buffer_map(c->pipe, c->vertex_buf.buffer,
481                        PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD | PIPE_TRANSFER_DONTBLOCK,
482                        &buf_transfer);
483
484   if (!vb) {
485      // If buffer is still locked from last draw create a new one
486      create_vertex_buffer(c);
487      vb = pipe_buffer_map(c->pipe, c->vertex_buf.buffer,
488                           PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD,
489                           &buf_transfer);
490   }
491
492   for (i = 0; i < VL_COMPOSITOR_MAX_LAYERS; i++) {
493      if (c->used_layers & (1 << i)) {
494         struct vl_compositor_layer *layer = &c->layers[i];
495         gen_rect_verts(vb, layer);
496         vb += 4;
497
498         if (layer->clearing &&
499             c->dirty_tl.x >= layer->dst.tl.x &&
500             c->dirty_tl.y >= layer->dst.tl.y &&
501             c->dirty_br.x <= layer->dst.br.x &&
502             c->dirty_br.y <= layer->dst.br.y) {
503
504            // We clear the dirty area anyway, no need for clear_render_target
505            c->dirty_tl.x = c->dirty_tl.y = 1.0f;
506            c->dirty_br.x = c->dirty_br.y = 0.0f;
507         }
508      }
509   }
510
511   pipe_buffer_unmap(c->pipe, buf_transfer);
512}
513
514static void
515draw_layers(struct vl_compositor *c)
516{
517   unsigned vb_index, i;
518
519   assert(c);
520
521   for (i = 0, vb_index = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
522      if (c->used_layers & (1 << i)) {
523         struct vl_compositor_layer *layer = &c->layers[i];
524         struct pipe_sampler_view **samplers = &layer->sampler_views[0];
525         unsigned num_sampler_views = !samplers[1] ? 1 : !samplers[2] ? 2 : 3;
526
527         c->pipe->bind_blend_state(c->pipe, layer->blend);
528         c->pipe->bind_fs_state(c->pipe, layer->fs);
529         c->pipe->bind_fragment_sampler_states(c->pipe, num_sampler_views, layer->samplers);
530         c->pipe->set_fragment_sampler_views(c->pipe, num_sampler_views, samplers);
531         util_draw_arrays(c->pipe, PIPE_PRIM_QUADS, vb_index * 4, 4);
532         vb_index++;
533
534         // Remember the currently drawn area as dirty for the next draw command
535         c->dirty_tl.x = MIN2(layer->dst.tl.x, c->dirty_tl.x);
536         c->dirty_tl.y = MIN2(layer->dst.tl.y, c->dirty_tl.y);
537         c->dirty_br.x = MAX2(layer->dst.br.x, c->dirty_br.x);
538         c->dirty_br.y = MAX2(layer->dst.br.y, c->dirty_br.y);
539      }
540   }
541}
542
543void
544vl_compositor_reset_dirty_area(struct vl_compositor *c)
545{
546   assert(c);
547
548   c->dirty_tl.x = c->dirty_tl.y = 0.0f;
549   c->dirty_br.x = c->dirty_br.y = 1.0f;
550}
551
552void
553vl_compositor_set_clear_color(struct vl_compositor *c, float color[4])
554{
555   unsigned i;
556
557   assert(c);
558
559   for (i = 0; i < 4; ++i)
560      c->clear_color[i] = color[i];
561}
562
563void
564vl_compositor_clear_layers(struct vl_compositor *c)
565{
566   unsigned i, j;
567
568   assert(c);
569
570   c->used_layers = 0;
571   for ( i = 0; i < VL_COMPOSITOR_MAX_LAYERS; ++i) {
572      c->layers[i].clearing = i ? false : true;
573      c->layers[i].blend = i ? c->blend_add : c->blend_clear;
574      c->layers[i].fs = NULL;
575      for ( j = 0; j < 3; j++)
576         pipe_sampler_view_reference(&c->layers[i].sampler_views[j], NULL);
577   }
578}
579
580void
581vl_compositor_cleanup(struct vl_compositor *c)
582{
583   assert(c);
584
585   vl_compositor_clear_layers(c);
586
587   cleanup_buffers(c);
588   cleanup_shaders(c);
589   cleanup_pipe_state(c);
590}
591
592void
593vl_compositor_set_csc_matrix(struct vl_compositor *c, const float matrix[16])
594{
595   struct pipe_transfer *buf_transfer;
596
597   assert(c);
598
599   memcpy
600   (
601      pipe_buffer_map(c->pipe, c->csc_matrix,
602                      PIPE_TRANSFER_WRITE | PIPE_TRANSFER_DISCARD,
603                      &buf_transfer),
604		matrix,
605		sizeof(csc_matrix)
606   );
607
608   pipe_buffer_unmap(c->pipe, buf_transfer);
609}
610
611void
612vl_compositor_set_layer_blend(struct vl_compositor *c,
613                              unsigned layer, void *blend,
614                              bool is_clearing)
615{
616   assert(c && blend);
617
618   assert(layer < VL_COMPOSITOR_MAX_LAYERS);
619
620   c->layers[layer].clearing = is_clearing;
621   c->layers[layer].blend = blend;
622}
623
624void
625vl_compositor_set_buffer_layer(struct vl_compositor *c,
626                               unsigned layer,
627                               struct pipe_video_buffer *buffer,
628                               struct pipe_video_rect *src_rect,
629                               struct pipe_video_rect *dst_rect)
630{
631   struct pipe_sampler_view **sampler_views;
632   unsigned i;
633
634   assert(c && buffer);
635
636   assert(layer < VL_COMPOSITOR_MAX_LAYERS);
637
638   c->used_layers |= 1 << layer;
639   c->layers[layer].fs = c->fs_video_buffer;
640
641   sampler_views = buffer->get_sampler_view_components(buffer);
642   for (i = 0; i < 3; ++i) {
643      c->layers[layer].samplers[i] = c->sampler_linear;
644      pipe_sampler_view_reference(&c->layers[layer].sampler_views[i], sampler_views[i]);
645   }
646
647   calc_src_and_dst(&c->layers[layer], buffer->width, buffer->height,
648                    src_rect ? *src_rect : default_rect(&c->layers[layer]),
649                    dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
650}
651
652void
653vl_compositor_set_palette_layer(struct vl_compositor *c,
654                                unsigned layer,
655                                struct pipe_sampler_view *indexes,
656                                struct pipe_sampler_view *palette,
657                                struct pipe_video_rect *src_rect,
658                                struct pipe_video_rect *dst_rect,
659                                bool include_color_conversion)
660{
661   assert(c && indexes && palette);
662
663   assert(layer < VL_COMPOSITOR_MAX_LAYERS);
664
665   c->used_layers |= 1 << layer;
666
667   c->layers[layer].fs = include_color_conversion ?
668      c->fs_palette.yuv : c->fs_palette.rgb;
669
670   c->layers[layer].samplers[0] = c->sampler_linear;
671   c->layers[layer].samplers[1] = c->sampler_nearest;
672   c->layers[layer].samplers[2] = NULL;
673   pipe_sampler_view_reference(&c->layers[layer].sampler_views[0], indexes);
674   pipe_sampler_view_reference(&c->layers[layer].sampler_views[1], palette);
675   pipe_sampler_view_reference(&c->layers[layer].sampler_views[2], NULL);
676   calc_src_and_dst(&c->layers[layer], indexes->texture->width0, indexes->texture->height0,
677                    src_rect ? *src_rect : default_rect(&c->layers[layer]),
678                    dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
679
680}
681
682void
683vl_compositor_set_rgba_layer(struct vl_compositor *c,
684                             unsigned layer,
685                             struct pipe_sampler_view *rgba,
686                             struct pipe_video_rect *src_rect,
687                             struct pipe_video_rect *dst_rect)
688{
689   assert(c && rgba);
690
691   assert(layer < VL_COMPOSITOR_MAX_LAYERS);
692
693   c->used_layers |= 1 << layer;
694   c->layers[layer].fs = c->fs_rgba;
695   c->layers[layer].samplers[0] = c->sampler_linear;
696   c->layers[layer].samplers[1] = NULL;
697   c->layers[layer].samplers[2] = NULL;
698   pipe_sampler_view_reference(&c->layers[layer].sampler_views[0], rgba);
699   pipe_sampler_view_reference(&c->layers[layer].sampler_views[1], NULL);
700   pipe_sampler_view_reference(&c->layers[layer].sampler_views[2], NULL);
701   calc_src_and_dst(&c->layers[layer], rgba->texture->width0, rgba->texture->height0,
702                    src_rect ? *src_rect : default_rect(&c->layers[layer]),
703                    dst_rect ? *dst_rect : default_rect(&c->layers[layer]));
704}
705
706void
707vl_compositor_render(struct vl_compositor *c,
708                     struct pipe_surface           *dst_surface,
709                     struct pipe_video_rect        *dst_area,
710                     struct pipe_video_rect        *dst_clip,
711                     bool clear_dirty_area)
712{
713   struct pipe_scissor_state scissor;
714
715   assert(c);
716   assert(dst_surface);
717
718   c->fb_state.width = dst_surface->width;
719   c->fb_state.height = dst_surface->height;
720   c->fb_state.cbufs[0] = dst_surface;
721
722   if (dst_area) {
723      c->viewport.scale[0] = dst_area->w;
724      c->viewport.scale[1] = dst_area->h;
725      c->viewport.translate[0] = dst_area->x;
726      c->viewport.translate[1] = dst_area->y;
727   } else {
728      c->viewport.scale[0] = dst_surface->width;
729      c->viewport.scale[1] = dst_surface->height;
730      c->viewport.translate[0] = 0;
731      c->viewport.translate[1] = 0;
732   }
733
734   if (dst_clip) {
735      scissor.minx = dst_clip->x;
736      scissor.miny = dst_clip->y;
737      scissor.maxx = dst_clip->x + dst_clip->w;
738      scissor.maxy = dst_clip->y + dst_clip->h;
739   } else {
740      scissor.minx = 0;
741      scissor.miny = 0;
742      scissor.maxx = dst_surface->width;
743      scissor.maxy = dst_surface->height;
744   }
745
746   gen_vertex_data(c);
747
748   if (clear_dirty_area && (c->dirty_tl.x < c->dirty_br.x ||
749                            c->dirty_tl.y < c->dirty_br.y)) {
750      util_clear_render_target(c->pipe, dst_surface, c->clear_color,
751                               0, 0, dst_surface->width, dst_surface->height);
752      c->dirty_tl.x = c->dirty_tl.y = 1.0f;
753      c->dirty_br.x = c->dirty_br.y = 0.0f;
754   }
755
756   c->pipe->set_scissor_state(c->pipe, &scissor);
757   c->pipe->set_framebuffer_state(c->pipe, &c->fb_state);
758   c->pipe->set_viewport_state(c->pipe, &c->viewport);
759   c->pipe->bind_vs_state(c->pipe, c->vs);
760   c->pipe->set_vertex_buffers(c->pipe, 1, &c->vertex_buf);
761   c->pipe->bind_vertex_elements_state(c->pipe, c->vertex_elems_state);
762   c->pipe->set_constant_buffer(c->pipe, PIPE_SHADER_FRAGMENT, 0, c->csc_matrix);
763   c->pipe->bind_rasterizer_state(c->pipe, c->rast);
764
765   draw_layers(c);
766}
767
768bool
769vl_compositor_init(struct vl_compositor *c, struct pipe_context *pipe)
770{
771   csc_matrix csc_matrix;
772
773   c->pipe = pipe;
774
775   if (!init_pipe_state(c))
776      return false;
777
778   if (!init_shaders(c)) {
779      cleanup_pipe_state(c);
780      return false;
781   }
782   if (!init_buffers(c)) {
783      cleanup_shaders(c);
784      cleanup_pipe_state(c);
785      return false;
786   }
787
788   vl_compositor_clear_layers(c);
789
790   vl_csc_get_matrix(VL_CSC_COLOR_STANDARD_IDENTITY, NULL, true, csc_matrix);
791   vl_compositor_set_csc_matrix(c, csc_matrix);
792
793   c->clear_color[0] = c->clear_color[1] = 0.0f;
794   c->clear_color[2] = c->clear_color[3] = 0.0f;
795   vl_compositor_reset_dirty_area(c);
796
797   return true;
798}
799