1/*
2// Copyright (c) 2014 Intel Corporation 
3//
4// Licensed under the Apache License, Version 2.0 (the "License");
5// you may not use this file except in compliance with the License.
6// You may obtain a copy of the License at
7//
8//      http://www.apache.org/licenses/LICENSE-2.0
9//
10// Unless required by applicable law or agreed to in writing, software
11// distributed under the License is distributed on an "AS IS" BASIS,
12// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13// See the License for the specific language governing permissions and
14// limitations under the License.
15*/
16#include <HwcTrace.h>
17#include <Hwcomposer.h>
18#include <DisplayPlaneManager.h>
19#include <DisplayQuery.h>
20#include <VirtualDevice.h>
21#include <SoftVsyncObserver.h>
22
23#include <binder/IServiceManager.h>
24#include <binder/ProcessState.h>
25
26#include <hal_public.h>
27#include <libsync/sw_sync.h>
28#include <sync/sync.h>
29
30#include <va/va_android.h>
31#include <va/va_vpp.h>
32#include <va/va_tpi.h>
33
34#include <cutils/properties.h>
35
36#include <sys/types.h>
37#include <sys/stat.h>
38#include <fcntl.h>
39
40#define NUM_CSC_BUFFERS 6
41#define NUM_SCALING_BUFFERS 3
42
43#define QCIF_WIDTH 176
44#define QCIF_HEIGHT 144
45
46namespace android {
47namespace intel {
48
49static inline uint32_t align_width(uint32_t val)
50{
51    return align_to(val, 64);
52}
53
54static inline uint32_t align_height(uint32_t val)
55{
56    return align_to(val, 16);
57}
58
59static void my_close_fence(const char* func, const char* fenceName, int& fenceFd)
60{
61    if (fenceFd != -1) {
62        ALOGV("%s: closing fence %s (fd=%d)", func, fenceName, fenceFd);
63        int err = close(fenceFd);
64        if (err < 0) {
65            ALOGE("%s: fence %s close error %d: %s", func, fenceName, err, strerror(errno));
66        }
67        fenceFd = -1;
68    }
69}
70
71static void my_sync_wait_and_close(const char* func, const char* fenceName, int& fenceFd)
72{
73    if (fenceFd != -1) {
74        ALOGV("%s: waiting on fence %s (fd=%d)", func, fenceName, fenceFd);
75        int err = sync_wait(fenceFd, 300);
76        if (err < 0) {
77            ALOGE("%s: fence %s sync_wait error %d: %s", func, fenceName, err, strerror(errno));
78        }
79        my_close_fence(func, fenceName, fenceFd);
80    }
81}
82
83static void my_timeline_inc(const char* func, const char* timelineName, int& syncTimelineFd)
84{
85    if (syncTimelineFd != -1) {
86        ALOGV("%s: incrementing timeline %s (fd=%d)", func, timelineName, syncTimelineFd);
87        int err = sw_sync_timeline_inc(syncTimelineFd, 1);
88        if (err < 0)
89            ALOGE("%s sync timeline %s increment error %d: %s", func, timelineName, errno, strerror(errno));
90        syncTimelineFd = -1;
91    }
92}
93
94#define CLOSE_FENCE(fenceName)          my_close_fence(__func__, #fenceName, fenceName)
95#define SYNC_WAIT_AND_CLOSE(fenceName)  my_sync_wait_and_close(__func__, #fenceName, fenceName)
96#define TIMELINE_INC(timelineName)      my_timeline_inc(__func__, #timelineName, timelineName)
97
98class MappedSurface {
99public:
100    MappedSurface(VADisplay dpy, VASurfaceID surf)
101        : va_dpy(dpy),
102          ptr(NULL)
103    {
104        VAStatus va_status;
105        va_status = vaDeriveImage(va_dpy, surf, &image);
106        if (va_status != VA_STATUS_SUCCESS) {
107            ETRACE("vaDeriveImage returns %08x", va_status);
108            return;
109        }
110        va_status = vaMapBuffer(va_dpy, image.buf, (void**)&ptr);
111        if (va_status != VA_STATUS_SUCCESS) {
112            ETRACE("vaMapBuffer returns %08x", va_status);
113            vaDestroyImage(va_dpy, image.image_id);
114            return;
115        }
116    }
117    ~MappedSurface() {
118        if (ptr == NULL)
119            return;
120
121        VAStatus va_status;
122
123        va_status = vaUnmapBuffer(va_dpy, image.buf);
124        if (va_status != VA_STATUS_SUCCESS) ETRACE("vaUnmapBuffer returns %08x", va_status);
125
126        va_status = vaDestroyImage(va_dpy, image.image_id);
127        if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroyImage returns %08x", va_status);
128    }
129    bool valid() { return ptr != NULL; }
130    uint8_t* getPtr() { return ptr; }
131private:
132    VADisplay va_dpy;
133    VAImage image;
134    uint8_t* ptr;
135};
136
137class VirtualDevice::VAMappedHandle {
138public:
139    VAMappedHandle(VADisplay dpy, buffer_handle_t handle, uint32_t stride, uint32_t height, unsigned int pixel_format)
140        : va_dpy(dpy),
141          surface(0)
142    {
143        VTRACE("Map gralloc %p size=%ux%u", handle, stride, height);
144
145        unsigned int format;
146        unsigned long buffer = reinterpret_cast<unsigned long>(handle);
147        VASurfaceAttribExternalBuffers buf;
148        buf.pixel_format = pixel_format;
149        buf.width = stride;
150        buf.height = height;
151        buf.buffers = &buffer;
152        buf.num_buffers = 1;
153        buf.flags = 0;
154        buf.private_data = NULL;
155
156        if (pixel_format == VA_FOURCC_RGBA || pixel_format == VA_FOURCC_BGRA) {
157            format = VA_RT_FORMAT_RGB32;
158            buf.data_size = stride * height * 4;
159            buf.num_planes = 3;
160            buf.pitches[0] = stride;
161            buf.pitches[1] = stride;
162            buf.pitches[2] = stride;
163            buf.pitches[3] = 0;
164            buf.offsets[0] = 0;
165            buf.offsets[1] = 0;
166            buf.offsets[2] = 0;
167            buf.offsets[3] = 0;
168        }
169        else {
170            format = VA_RT_FORMAT_YUV420;
171            buf.data_size = stride * height * 3/2;
172            buf.num_planes = 2;
173            buf.pitches[0] = stride;
174            buf.pitches[1] = stride;
175            buf.pitches[2] = 0;
176            buf.pitches[3] = 0;
177            buf.offsets[0] = 0;
178            buf.offsets[1] = stride * height;
179        }
180
181        VASurfaceAttrib attrib_list[3];
182        attrib_list[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
183        attrib_list[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
184        attrib_list[0].value.type = VAGenericValueTypeInteger;
185        attrib_list[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
186        attrib_list[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
187        attrib_list[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
188        attrib_list[1].value.type = VAGenericValueTypePointer;
189        attrib_list[1].value.value.p = (void *)&buf;
190        attrib_list[2].type = (VASurfaceAttribType)VASurfaceAttribPixelFormat;
191        attrib_list[2].flags = VA_SURFACE_ATTRIB_SETTABLE;
192        attrib_list[2].value.type = VAGenericValueTypeInteger;
193        attrib_list[2].value.value.i = pixel_format;
194
195        VAStatus va_status;
196        va_status = vaCreateSurfaces(va_dpy,
197                    format,
198                    stride,
199                    height,
200                    &surface,
201                    1,
202                    attrib_list,
203                    3);
204        if (va_status != VA_STATUS_SUCCESS) {
205            ETRACE("vaCreateSurfaces returns %08x, surface = %x", va_status, surface);
206            surface = 0;
207        }
208    }
209    VAMappedHandle(VADisplay dpy, buffer_handle_t khandle, uint32_t stride, uint32_t height, bool tiled)
210        : va_dpy(dpy),
211          surface(0)
212    {
213        int format;
214        VASurfaceAttributeTPI attribTpi;
215        memset(&attribTpi, 0, sizeof(attribTpi));
216        VTRACE("Map khandle 0x%x size=%ux%u", khandle, stride, height);
217        attribTpi.type = VAExternalMemoryKernelDRMBufffer;
218        attribTpi.width = stride;
219        attribTpi.height = height;
220        attribTpi.size = stride*height*3/2;
221        attribTpi.pixel_format = VA_FOURCC_NV12;
222        attribTpi.tiling = tiled;
223        attribTpi.luma_stride = stride;
224        attribTpi.chroma_u_stride = stride;
225        attribTpi.chroma_v_stride = stride;
226        attribTpi.luma_offset = 0;
227        attribTpi.chroma_u_offset = stride*height;
228        attribTpi.chroma_v_offset = stride*height+1;
229        format = VA_RT_FORMAT_YUV420;
230        attribTpi.count = 1;
231        attribTpi.buffers = (long unsigned int*) &khandle;
232
233        VAStatus va_status;
234        va_status = vaCreateSurfacesWithAttribute(va_dpy,
235                    stride,
236                    height,
237                    format,
238                    1,
239                    &surface,
240                    &attribTpi);
241        if (va_status != VA_STATUS_SUCCESS) {
242            ETRACE("vaCreateSurfacesWithAttribute returns %08x", va_status);
243            surface = 0;
244        }
245    }
246    ~VAMappedHandle()
247    {
248        if (surface == 0)
249            return;
250        VAStatus va_status;
251        va_status = vaDestroySurfaces(va_dpy, &surface, 1);
252        if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroySurfaces returns %08x", va_status);
253    }
254private:
255    VADisplay va_dpy;
256public:
257    VASurfaceID surface;
258};
259
260// refcounted version of VAMappedHandle, to make caching easier
261class VirtualDevice::VAMappedHandleObject : public RefBase, public VAMappedHandle {
262public:
263    VAMappedHandleObject(VADisplay dpy, buffer_handle_t handle, uint32_t stride, uint32_t height, unsigned int pixel_format)
264        : VAMappedHandle(dpy, handle, stride, height, pixel_format) { }
265    VAMappedHandleObject(VADisplay dpy, buffer_handle_t khandle, uint32_t stride, uint32_t height, bool tiled)
266        : VAMappedHandle(dpy, khandle, stride, height, tiled) { }
267protected:
268    ~VAMappedHandleObject() {}
269};
270
271VirtualDevice::CachedBuffer::CachedBuffer(BufferManager *mgr, buffer_handle_t handle)
272    : manager(mgr),
273      mapper(NULL),
274      vaMappedHandle(NULL),
275      cachedKhandle(0)
276{
277    DataBuffer *buffer = manager->lockDataBuffer((buffer_handle_t)handle);
278    mapper = manager->map(*buffer);
279    manager->unlockDataBuffer(buffer);
280}
281
282VirtualDevice::CachedBuffer::~CachedBuffer()
283{
284    if (vaMappedHandle != NULL)
285        delete vaMappedHandle;
286    manager->unmap(mapper);
287}
288
289VirtualDevice::HeldDecoderBuffer::HeldDecoderBuffer(const sp<VirtualDevice>& vd, const android::sp<CachedBuffer>& cachedBuffer)
290    : vd(vd),
291      cachedBuffer(cachedBuffer)
292{
293    if (!vd->mPayloadManager->setRenderStatus(cachedBuffer->mapper, true)) {
294        ETRACE("Failed to set render status");
295    }
296}
297
298VirtualDevice::HeldDecoderBuffer::~HeldDecoderBuffer()
299{
300    if (!vd->mPayloadManager->setRenderStatus(cachedBuffer->mapper, false)) {
301        ETRACE("Failed to set render status");
302    }
303}
304
305struct VirtualDevice::Task : public RefBase {
306    virtual void run(VirtualDevice& vd) = 0;
307    virtual ~Task() {}
308};
309
310struct VirtualDevice::RenderTask : public VirtualDevice::Task {
311    RenderTask() : successful(false) { }
312    virtual void run(VirtualDevice& vd) = 0;
313    bool successful;
314};
315
316struct VirtualDevice::ComposeTask : public VirtualDevice::RenderTask {
317    ComposeTask()
318        : videoKhandle(0),
319          rgbHandle(NULL),
320          mappedRgbIn(NULL),
321          outputHandle(NULL),
322          yuvAcquireFenceFd(-1),
323          rgbAcquireFenceFd(-1),
324          outbufAcquireFenceFd(-1),
325          syncTimelineFd(-1) { }
326
327    virtual ~ComposeTask() {
328        // If queueCompose() creates this object and sets up fences,
329        // but aborts before enqueuing the task, or if the task runs
330        // but errors out, make sure our acquire fences get closed
331        // and any release fences get signaled.
332        CLOSE_FENCE(yuvAcquireFenceFd);
333        CLOSE_FENCE(rgbAcquireFenceFd);
334        CLOSE_FENCE(outbufAcquireFenceFd);
335        TIMELINE_INC(syncTimelineFd);
336    }
337
338    virtual void run(VirtualDevice& vd) {
339        bool dump = false;
340        if (vd.mDebugVspDump && ++vd.mDebugCounter > 200) {
341            dump = true;
342            vd.mDebugCounter = 0;
343        }
344
345        SYNC_WAIT_AND_CLOSE(yuvAcquireFenceFd);
346
347        VASurfaceID videoInSurface;
348        if (videoKhandle == 0) {
349            videoInSurface = vd.va_blank_yuv_in;
350        } else {
351            if (videoCachedBuffer->cachedKhandle != videoKhandle || videoCachedBuffer->vaMappedHandle == NULL) {
352                if (videoCachedBuffer->vaMappedHandle != NULL)
353                    delete videoCachedBuffer->vaMappedHandle;
354                videoCachedBuffer->vaMappedHandle = new VAMappedHandle(vd.va_dpy, videoKhandle, videoStride, videoBufHeight, videoTiled);
355                videoCachedBuffer->cachedKhandle = videoKhandle;
356            }
357            videoInSurface = videoCachedBuffer->vaMappedHandle->surface;
358        }
359
360        if (videoInSurface == 0) {
361            ETRACE("Couldn't map video");
362            return;
363        }
364        SYNC_WAIT_AND_CLOSE(rgbAcquireFenceFd);
365        SYNC_WAIT_AND_CLOSE(outbufAcquireFenceFd);
366
367        VAMappedHandle mappedVideoOut(vd.va_dpy, outputHandle, align_width(outWidth), align_height(outHeight), (unsigned int)VA_FOURCC_NV12);
368        if (mappedVideoOut.surface == 0) {
369            ETRACE("Unable to map outbuf");
370            return;
371        }
372
373        if (dump)
374            dumpSurface(vd.va_dpy, "/data/misc/vsp_in.yuv", videoInSurface, videoStride*videoBufHeight*3/2);
375
376        if (mappedRgbIn != NULL) {
377            if (dump)
378                dumpSurface(vd.va_dpy, "/data/misc/vsp_in.rgb", mappedRgbIn->surface, align_width(outWidth)*align_height(outHeight)*4);
379            vd.vspCompose(videoInSurface, mappedRgbIn->surface, mappedVideoOut.surface, &surface_region, &output_region);
380        }
381        else if (rgbHandle != NULL) {
382            VAMappedHandle localMappedRgbIn(vd.va_dpy, rgbHandle, align_width(outWidth), align_height(outHeight), (unsigned int)VA_FOURCC_BGRA);
383            vd.vspCompose(videoInSurface, localMappedRgbIn.surface, mappedVideoOut.surface, &surface_region, &output_region);
384        }
385        else {
386            // No RGBA, so compose with 100% transparent RGBA frame.
387            if (dump)
388                dumpSurface(vd.va_dpy, "/data/misc/vsp_in.rgb", vd.va_blank_rgb_in, align_width(outWidth)*align_height(outHeight)*4);
389            vd.vspCompose(videoInSurface, vd.va_blank_rgb_in, mappedVideoOut.surface, &surface_region, &output_region);
390        }
391        if (dump)
392            dumpSurface(vd.va_dpy, "/data/misc/vsp_out.yuv", mappedVideoOut.surface, align_width(outWidth)*align_height(outHeight)*3/2);
393        TIMELINE_INC(syncTimelineFd);
394        successful = true;
395    }
396    void dumpSurface(VADisplay va_dpy, const char* filename, VASurfaceID surf, int size) {
397        MappedSurface dumpSurface(va_dpy, surf);
398        if (dumpSurface.valid()) {
399            int fd = open(filename, O_CREAT | O_TRUNC | O_WRONLY, S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP);
400            if (fd > 0) {
401                write(fd, dumpSurface.getPtr(), size);
402                close(fd);
403                ALOGI("Output dumped");
404            }
405            else
406                ALOGE("Error %d opening output file: %s", errno, strerror(errno));
407        }
408        else
409            ALOGE("Failed to map output for dump");
410    }
411    buffer_handle_t videoKhandle;
412    uint32_t videoStride;
413    uint32_t videoBufHeight;
414    bool videoTiled;
415    buffer_handle_t rgbHandle;
416    sp<RefBase> heldRgbHandle;
417    sp<VAMappedHandleObject> mappedRgbIn;
418    buffer_handle_t outputHandle;
419    VARectangle surface_region;
420    VARectangle output_region;
421    uint32_t outWidth;
422    uint32_t outHeight;
423    sp<CachedBuffer> videoCachedBuffer;
424    sp<RefBase> heldVideoBuffer;
425    int yuvAcquireFenceFd;
426    int rgbAcquireFenceFd;
427    int outbufAcquireFenceFd;
428    int syncTimelineFd;
429};
430
431struct VirtualDevice::EnableVspTask : public VirtualDevice::Task {
432    virtual void run(VirtualDevice& vd) {
433        vd.vspEnable(width, height);
434    }
435    uint32_t width;
436    uint32_t height;
437};
438
439struct VirtualDevice::DisableVspTask : public VirtualDevice::Task {
440    virtual void run(VirtualDevice& vd) {
441        vd.vspDisable();
442    }
443};
444
445struct VirtualDevice::BlitTask : public VirtualDevice::RenderTask {
446    BlitTask()
447        : srcAcquireFenceFd(-1),
448          destAcquireFenceFd(-1),
449          syncTimelineFd(-1) { }
450
451    virtual ~BlitTask()
452    {
453        // If queueColorConvert() creates this object and sets up fences,
454        // but aborts before enqueuing the task, or if the task runs
455        // but errors out, make sure our acquire fences get closed
456        // and any release fences get signaled.
457        CLOSE_FENCE(srcAcquireFenceFd);
458        CLOSE_FENCE(destAcquireFenceFd);
459        TIMELINE_INC(syncTimelineFd);
460    }
461
462    virtual void run(VirtualDevice& vd) {
463        SYNC_WAIT_AND_CLOSE(srcAcquireFenceFd);
464        SYNC_WAIT_AND_CLOSE(destAcquireFenceFd);
465        BufferManager* mgr = vd.mHwc.getBufferManager();
466        if (!(mgr->blit(srcHandle, destHandle, destRect, false, false))) {
467            ETRACE("color space conversion from RGB to NV12 failed");
468        }
469        else
470            successful = true;
471        TIMELINE_INC(syncTimelineFd);
472    }
473    buffer_handle_t srcHandle;
474    buffer_handle_t destHandle;
475    int srcAcquireFenceFd;
476    int destAcquireFenceFd;
477    int syncTimelineFd;
478    crop_t destRect;
479};
480
481struct VirtualDevice::FrameTypeChangedTask : public VirtualDevice::Task {
482    virtual void run(VirtualDevice& vd) {
483#ifdef INTEL_WIDI
484        typeChangeListener->frameTypeChanged(inputFrameInfo);
485        ITRACE("Notify frameTypeChanged: %dx%d in %dx%d @ %d fps",
486            inputFrameInfo.contentWidth, inputFrameInfo.contentHeight,
487            inputFrameInfo.bufferWidth, inputFrameInfo.bufferHeight,
488            inputFrameInfo.contentFrameRateN);
489#endif
490    }
491#ifdef INTEL_WIDI
492    sp<IFrameTypeChangeListener> typeChangeListener;
493    FrameInfo inputFrameInfo;
494#endif
495};
496
497struct VirtualDevice::BufferInfoChangedTask : public VirtualDevice::Task {
498    virtual void run(VirtualDevice& vd) {
499#ifdef INTEL_WIDI
500        typeChangeListener->bufferInfoChanged(outputFrameInfo);
501        ITRACE("Notify bufferInfoChanged: %dx%d in %dx%d @ %d fps",
502            outputFrameInfo.contentWidth, outputFrameInfo.contentHeight,
503            outputFrameInfo.bufferWidth, outputFrameInfo.bufferHeight,
504            outputFrameInfo.contentFrameRateN);
505#endif
506    }
507#ifdef INTEL_WIDI
508    sp<IFrameTypeChangeListener> typeChangeListener;
509    FrameInfo outputFrameInfo;
510#endif
511};
512
513struct VirtualDevice::OnFrameReadyTask : public VirtualDevice::Task {
514    virtual void run(VirtualDevice& vd) {
515        if (renderTask != NULL && !renderTask->successful)
516            return;
517
518        {
519            Mutex::Autolock _l(vd.mHeldBuffersLock);
520            //Add the heldbuffer to the vector before calling onFrameReady, so that the buffer will be removed
521            //from the vector properly even if the notifyBufferReturned call acquires mHeldBuffersLock first.
522            vd.mHeldBuffers.add(handle, heldBuffer);
523        }
524#ifdef INTEL_WIDI
525        // FIXME: we could remove this casting once onFrameReady receives
526        // a buffer_handle_t handle
527        status_t result = frameListener->onFrameReady((uint32_t)handle, handleType, renderTimestamp, mediaTimestamp);
528        if (result != OK) {
529            Mutex::Autolock _l(vd.mHeldBuffersLock);
530            vd.mHeldBuffers.removeItem(handle);
531        }
532#else
533        Mutex::Autolock _l(vd.mHeldBuffersLock);
534        vd.mHeldBuffers.removeItem(handle);
535#endif
536    }
537    sp<RenderTask> renderTask;
538    sp<RefBase> heldBuffer;
539    buffer_handle_t handle;
540#ifdef INTEL_WIDI
541    sp<IFrameListener> frameListener;
542    HWCBufferHandleType handleType;
543#endif
544    int64_t renderTimestamp;
545    int64_t mediaTimestamp;
546};
547
548struct VirtualDevice::BufferList::HeldBuffer : public RefBase {
549    HeldBuffer(BufferList& list, buffer_handle_t handle, uint32_t w, uint32_t h)
550        : mList(list),
551          mHandle(handle),
552          mWidth(w),
553          mHeight(h) { }
554    virtual ~HeldBuffer()
555    {
556        Mutex::Autolock _l(mList.mVd.mTaskLock);
557        if (mWidth == mList.mWidth && mHeight == mList.mHeight) {
558            VTRACE("Returning %s buffer %p (%ux%u) to list", mList.mName, mHandle, mWidth, mHeight);
559            mList.mAvailableBuffers.push_back(mHandle);
560        } else {
561            VTRACE("Deleting %s buffer %p (%ux%u)", mList.mName, mHandle, mWidth, mHeight);
562            BufferManager* mgr = mList.mVd.mHwc.getBufferManager();
563            mgr->freeGrallocBuffer((mHandle));
564            if (mList.mBuffersToCreate < mList.mLimit)
565                mList.mBuffersToCreate++;
566        }
567    }
568
569    BufferList& mList;
570    buffer_handle_t mHandle;
571    uint32_t mWidth;
572    uint32_t mHeight;
573};
574
575VirtualDevice::BufferList::BufferList(VirtualDevice& vd, const char* name,
576                                      uint32_t limit, uint32_t format, uint32_t usage)
577    : mVd(vd),
578      mName(name),
579      mLimit(limit),
580      mFormat(format),
581      mUsage(usage),
582      mBuffersToCreate(0),
583      mWidth(0),
584      mHeight(0)
585{
586}
587
588buffer_handle_t VirtualDevice::BufferList::get(uint32_t width, uint32_t height, sp<RefBase>* heldBuffer)
589{
590    width = align_width(width);
591    height = align_height(height);
592    if (mWidth != width || mHeight != height) {
593        ITRACE("%s buffers changing from %dx%d to %dx%d",
594                mName, mWidth, mHeight, width, height);
595        clear();
596        mWidth = width;
597        mHeight = height;
598        mBuffersToCreate = mLimit;
599    }
600
601    buffer_handle_t handle;
602    if (mAvailableBuffers.empty()) {
603        if (mBuffersToCreate <= 0)
604            return NULL;
605        BufferManager* mgr = mVd.mHwc.getBufferManager();
606        handle = reinterpret_cast<buffer_handle_t>(
607            mgr->allocGrallocBuffer(width, height, mFormat, mUsage));
608        if (handle == NULL){
609            ETRACE("failed to allocate %s buffer", mName);
610            return NULL;
611        }
612        mBuffersToCreate--;
613    }
614    else {
615        handle = *mAvailableBuffers.begin();
616        mAvailableBuffers.erase(mAvailableBuffers.begin());
617    }
618    *heldBuffer = new HeldBuffer(*this, handle, width, height);
619    return handle;
620}
621
622void VirtualDevice::BufferList::clear()
623{
624    if (mWidth != 0 || mHeight != 0)
625        ITRACE("Releasing %s buffers (%ux%u)", mName, mWidth, mHeight);
626    if (!mAvailableBuffers.empty()) {
627        // iterate the list and call freeGraphicBuffer
628        for (List<buffer_handle_t>::iterator i = mAvailableBuffers.begin(); i != mAvailableBuffers.end(); ++i) {
629            VTRACE("Deleting the gralloc buffer associated with handle (%p)", (*i));
630            mVd.mHwc.getBufferManager()->freeGrallocBuffer((*i));
631        }
632        mAvailableBuffers.clear();
633    }
634    mWidth = 0;
635    mHeight = 0;
636}
637
638VirtualDevice::VirtualDevice(Hwcomposer& hwc)
639    : mProtectedMode(false),
640      mCscBuffers(*this, "CSC",
641                  NUM_CSC_BUFFERS, DisplayQuery::queryNV12Format(),
642                  GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_PRIVATE_1),
643      mRgbUpscaleBuffers(*this, "RGB upscale",
644                         NUM_SCALING_BUFFERS, HAL_PIXEL_FORMAT_BGRA_8888,
645                         GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER),
646      mInitialized(false),
647      mHwc(hwc),
648      mPayloadManager(NULL),
649      mVsyncObserver(NULL),
650      mOrigContentWidth(0),
651      mOrigContentHeight(0),
652      mFirstVideoFrame(true),
653      mLastConnectionStatus(false),
654      mCachedBufferCapcity(16),
655      mDecWidth(0),
656      mDecHeight(0)
657{
658    CTRACE();
659#ifdef INTEL_WIDI
660    mNextConfig.frameServerActive = false;
661#endif
662}
663
664VirtualDevice::~VirtualDevice()
665{
666    WARN_IF_NOT_DEINIT();
667}
668
669sp<VirtualDevice::CachedBuffer> VirtualDevice::getMappedBuffer(buffer_handle_t handle)
670{
671    ssize_t index = mMappedBufferCache.indexOfKey(handle);
672    sp<CachedBuffer> cachedBuffer;
673    if (index == NAME_NOT_FOUND) {
674        if (mMappedBufferCache.size() > mCachedBufferCapcity)
675            mMappedBufferCache.clear();
676
677        cachedBuffer = new CachedBuffer(mHwc.getBufferManager(), handle);
678        mMappedBufferCache.add(handle, cachedBuffer);
679    } else {
680        cachedBuffer = mMappedBufferCache[index];
681    }
682
683    return cachedBuffer;
684}
685
686bool VirtualDevice::threadLoop()
687{
688    sp<Task> task;
689    {
690        Mutex::Autolock _l(mTaskLock);
691        while (mTasks.empty()) {
692            mRequestQueued.wait(mTaskLock);
693        }
694        task = *mTasks.begin();
695        mTasks.erase(mTasks.begin());
696    }
697    if (task != NULL) {
698        task->run(*this);
699        task = NULL;
700    }
701    mRequestDequeued.signal();
702
703    return true;
704}
705#ifdef INTEL_WIDI
706status_t VirtualDevice::start(sp<IFrameTypeChangeListener> typeChangeListener)
707{
708    ITRACE();
709    Mutex::Autolock _l(mConfigLock);
710    mNextConfig.typeChangeListener = typeChangeListener;
711    mNextConfig.frameListener = NULL;
712    mNextConfig.policy.scaledWidth = 0;
713    mNextConfig.policy.scaledHeight = 0;
714    mNextConfig.policy.xdpi = 96;
715    mNextConfig.policy.ydpi = 96;
716    mNextConfig.policy.refresh = 60;
717    mNextConfig.extendedModeEnabled =
718        Hwcomposer::getInstance().getDisplayAnalyzer()->isVideoExtModeEnabled();
719    mVideoFramerate = 0;
720    mFirstVideoFrame = true;
721    mNextConfig.frameServerActive = true;
722    mNextConfig.forceNotifyFrameType = true;
723    mNextConfig.forceNotifyBufferInfo = true;
724
725    return NO_ERROR;
726}
727
728status_t VirtualDevice::stop(bool isConnected)
729{
730    ITRACE();
731    Mutex::Autolock _l(mConfigLock);
732    mNextConfig.typeChangeListener = NULL;
733    mNextConfig.frameListener = NULL;
734    mNextConfig.policy.scaledWidth = 0;
735    mNextConfig.policy.scaledHeight = 0;
736    mNextConfig.policy.xdpi = 96;
737    mNextConfig.policy.ydpi = 96;
738    mNextConfig.policy.refresh = 60;
739    mNextConfig.frameServerActive = false;
740    mNextConfig.extendedModeEnabled = false;
741    mNextConfig.forceNotifyFrameType = false;
742    mNextConfig.forceNotifyBufferInfo = false;
743    {
744        Mutex::Autolock _l(mTaskLock);
745        mCscBuffers.clear();
746    }
747    return NO_ERROR;
748}
749#endif
750
751bool VirtualDevice::isFrameServerActive() const
752{
753#ifdef INTEL_WIDI
754    return  mCurrentConfig.frameServerActive;
755#endif
756    return false;
757}
758
759#ifdef INTEL_WIDI
760/* TODO: 64-bit - this handle of size 32-bit is a problem for 64-bit */
761status_t VirtualDevice::notifyBufferReturned(int handle)
762{
763    CTRACE();
764    Mutex::Autolock _l(mHeldBuffersLock);
765    ssize_t index = mHeldBuffers.indexOfKey((buffer_handle_t)handle);
766    if (index == NAME_NOT_FOUND) {
767        ETRACE("Couldn't find returned khandle %p", handle);
768    } else {
769        VTRACE("Removing heldBuffer associated with handle (%p)", handle);
770        mHeldBuffers.removeItemsAt(index, 1);
771    }
772    return NO_ERROR;
773}
774
775status_t VirtualDevice::setResolution(const FrameProcessingPolicy& policy, sp<IFrameListener> listener)
776{
777    ITRACE();
778    Mutex::Autolock _l(mConfigLock);
779    mNextConfig.frameListener = listener;
780    mNextConfig.policy = policy;
781    return NO_ERROR;
782}
783#endif
784static bool canUseDirectly(const hwc_display_contents_1_t *display, size_t n)
785{
786    const hwc_layer_1_t& fbTarget = display->hwLayers[display->numHwLayers-1];
787    const hwc_layer_1_t& layer = display->hwLayers[n];
788    const IMG_native_handle_t* nativeHandle = reinterpret_cast<const IMG_native_handle_t*>(layer.handle);
789    return !(layer.flags & HWC_SKIP_LAYER) && layer.transform == 0 &&
790            layer.blending == HWC_BLENDING_PREMULT &&
791            layer.sourceCropf.left == 0 && layer.sourceCropf.top == 0 &&
792            layer.displayFrame.left == 0 && layer.displayFrame.top == 0 &&
793            layer.sourceCropf.right == fbTarget.sourceCropf.right &&
794            layer.sourceCropf.bottom == fbTarget.sourceCropf.bottom &&
795            layer.displayFrame.right == fbTarget.displayFrame.right &&
796            layer.displayFrame.bottom == fbTarget.displayFrame.bottom &&
797            layer.planeAlpha == 255 && layer.handle != NULL &&
798            (nativeHandle->iFormat == HAL_PIXEL_FORMAT_RGBA_8888 ||
799             nativeHandle->iFormat == HAL_PIXEL_FORMAT_BGRA_8888);
800}
801
802bool VirtualDevice::prePrepare(hwc_display_contents_1_t *display)
803{
804    RETURN_FALSE_IF_NOT_INIT();
805    return true;
806}
807
808bool VirtualDevice::prepare(hwc_display_contents_1_t *display)
809{
810    RETURN_FALSE_IF_NOT_INIT();
811
812    mRenderTimestamp = systemTime();
813    mVspInUse = false;
814    mExpectAcquireFences = false;
815    mIsForceCloneMode = false;
816#ifdef INTEL_WIDI
817    {
818        Mutex::Autolock _l(mConfigLock);
819        mCurrentConfig = mNextConfig;
820    }
821#endif
822
823    bool shouldBeConnected = (display != NULL);
824    if (shouldBeConnected != mLastConnectionStatus) {
825        // calling this will reload the property 'hwc.video.extmode.enable'
826        Hwcomposer::getInstance().getDisplayAnalyzer()->isVideoExtModeEnabled();
827        char propertyVal[PROPERTY_VALUE_MAX];
828        if (property_get("widi.compose.rgb_upscale", propertyVal, NULL) > 0)
829            mVspUpscale = atoi(propertyVal);
830        if (property_get("widi.compose.all_video", propertyVal, NULL) > 0)
831            mDebugVspClear = atoi(propertyVal);
832        if (property_get("widi.compose.dump", propertyVal, NULL) > 0)
833            mDebugVspDump = atoi(propertyVal);
834
835        Hwcomposer::getInstance().getMultiDisplayObserver()->notifyWidiConnectionStatus(shouldBeConnected);
836        mLastConnectionStatus = shouldBeConnected;
837    }
838
839    if (!display) {
840        // No image. We're done with any mappings and CSC buffers.
841        mMappedBufferCache.clear();
842        Mutex::Autolock _l(mTaskLock);
843        mCscBuffers.clear();
844        return true;
845    }
846
847#ifdef INTEL_WIDI
848    if (!mCurrentConfig.frameServerActive) {
849        // We're done with CSC buffers, since we blit to outbuf in this mode.
850        // We want to keep mappings cached, so we don't clear mMappedBufferCache.
851        Mutex::Autolock _l(mTaskLock);
852        mCscBuffers.clear();
853    }
854#else
855    Mutex::Autolock _l(mTaskLock);
856    mCscBuffers.clear();
857#endif
858
859    // by default send the FRAMEBUFFER_TARGET layer (composited image)
860    const ssize_t fbTarget = display->numHwLayers-1;
861    mRgbLayer = fbTarget;
862    mYuvLayer = -1;
863
864    DisplayAnalyzer *analyzer = mHwc.getDisplayAnalyzer();
865
866    mProtectedMode = false;
867#ifdef INTEL_WIDI
868    if (mCurrentConfig.typeChangeListener != NULL &&
869        !analyzer->isOverlayAllowed() &&
870        analyzer->getVideoInstances() <= 1) {
871        if (mCurrentConfig.typeChangeListener->shutdownVideo() != OK) {
872            ITRACE("Waiting for prior encoder session to shut down...");
873        }
874        /* Setting following flag to true will enable us to call bufferInfoChanged() in clone mode. */
875        mNextConfig.forceNotifyBufferInfo = true;
876        mYuvLayer = -1;
877        mRgbLayer = -1;
878        // Skipping frames.
879        // Fences aren't set in prepare, and we don't need them here, but they'll
880        // be set later and we have to close them. Don't log a warning in this case.
881        mExpectAcquireFences = true;
882        for (ssize_t i = 0; i < fbTarget; i++)
883            display->hwLayers[i].compositionType = HWC_OVERLAY;
884        return true;
885    }
886
887    for (ssize_t i = 0; i < fbTarget; i++) {
888        hwc_layer_1_t& layer = display->hwLayers[i];
889        if (analyzer->isVideoLayer(layer) && (mCurrentConfig.extendedModeEnabled || mDebugVspClear || analyzer->isProtectedLayer(layer))) {
890            if (mCurrentConfig.frameServerActive && mCurrentConfig.extendedModeEnabled) {
891                // If composed in surface flinger, then stream fbtarget.
892                if ((layer.flags & HWC_SKIP_LAYER) && !analyzer->ignoreVideoSkipFlag()) {
893                    continue;
894                }
895
896                /* If the resolution of the video layer is less than QCIF, then we are going to play it in clone mode only.*/
897                uint32_t vidContentWidth = layer.sourceCropf.right - layer.sourceCropf.left;
898                uint32_t vidContentHeight = layer.sourceCropf.bottom - layer.sourceCropf.top;
899                if (vidContentWidth < QCIF_WIDTH || vidContentHeight < QCIF_HEIGHT) {
900                    VTRACE("Ingoring layer %d which is too small for extended mode", i);
901                    continue;
902                }
903            }
904            mYuvLayer = i;
905            mProtectedMode = analyzer->isProtectedLayer(layer);
906            break;
907        }
908    }
909#endif
910
911    if (mYuvLayer == -1) {
912        mFirstVideoFrame = true;
913        mDecWidth = 0;
914        mDecHeight = 0;
915    }
916#ifdef INTEL_WIDI
917    if (mCurrentConfig.frameServerActive && mCurrentConfig.extendedModeEnabled && mYuvLayer != -1) {
918        if (handleExtendedMode(display)) {
919            mYuvLayer = -1;
920            mRgbLayer = -1;
921            // Extended mode is successful.
922            // Fences aren't set in prepare, and we don't need them here, but they'll
923            // be set later and we have to close them. Don't log a warning in this case.
924            mExpectAcquireFences = true;
925            for (ssize_t i = 0; i < fbTarget; i++)
926                display->hwLayers[i].compositionType = HWC_OVERLAY;
927            return true;
928        }
929        // if error in playback file , switch to clone mode
930        WTRACE("Error, falling back to clone mode");
931        mIsForceCloneMode = true;
932        mYuvLayer = -1;
933    }
934#endif
935    if (mYuvLayer == 0 && fbTarget == 1) {
936        // No RGB layer, so tell queueCompose to use blank RGB in fbtarget.
937        mRgbLayer = -1;
938    }
939    else if (mYuvLayer == 0 && fbTarget == 2) {
940        if (canUseDirectly(display, 1))
941            mRgbLayer = 1;
942    }
943    else if (mYuvLayer == -1 && fbTarget == 1) {
944        if (canUseDirectly(display, 0))
945            mRgbLayer = 0;
946    }
947
948    for (ssize_t i = 0; i < fbTarget; i++) {
949        hwc_layer_1_t& layer = display->hwLayers[i];
950        if (i == mYuvLayer || i == mRgbLayer || mRgbLayer != fbTarget)
951            layer.compositionType = HWC_OVERLAY;
952        else
953            layer.compositionType = HWC_FRAMEBUFFER;
954    }
955    if (mYuvLayer != -1 && mRgbLayer == fbTarget)
956        // This tells SurfaceFlinger to render this layer by writing transparent pixels
957        // to this layer's target region within the framebuffer. This effectively punches
958        // a hole through any content that is supposed to show below the video, and the
959        // video can be seen through this hole when we composite the YUV and RGBA layers
960        // together. Content above will draw on top of this hole and can cover the video.
961        // This has no effect when the video is the bottommost layer.
962        display->hwLayers[mYuvLayer].hints |= HWC_HINT_CLEAR_FB;
963
964#ifdef INTEL_WIDI
965    // we're streaming fbtarget, so send onFramePrepare and wait for composition to happen
966    if (mCurrentConfig.frameListener != NULL)
967        mCurrentConfig.frameListener->onFramePrepare(mRenderTimestamp, -1);
968#endif
969    return true;
970}
971
972bool VirtualDevice::commit(hwc_display_contents_1_t *display, IDisplayContext *context)
973{
974    RETURN_FALSE_IF_NOT_INIT();
975
976    if (display != NULL && (mRgbLayer != -1 || mYuvLayer != -1))
977        sendToWidi(display);
978
979    if (mVspEnabled && !mVspInUse) {
980        mVaMapCache.clear();
981        sp<DisableVspTask> disableVsp = new DisableVspTask();
982        mMappedBufferCache.clear();
983        Mutex::Autolock _l(mTaskLock);
984        mRgbUpscaleBuffers.clear();
985        mTasks.push(disableVsp);
986        mRequestQueued.signal();
987        mVspEnabled = false;
988    }
989
990    if (display != NULL) {
991        // All acquire fences should be copied somewhere else or closed by now
992        // and set to -1 in these structs except in the case of extended mode.
993        // Make sure the fences are closed and log a warning if not in extended mode.
994        if (display->outbufAcquireFenceFd != -1) {
995            if (!mExpectAcquireFences)
996                WTRACE("outbuf acquire fence (fd=%d) not yet saved or closed", display->outbufAcquireFenceFd);
997            CLOSE_FENCE(display->outbufAcquireFenceFd);
998        }
999        for (size_t i = 0; i < display->numHwLayers; i++) {
1000            hwc_layer_1_t& layer = display->hwLayers[i];
1001            if (layer.acquireFenceFd != -1) {
1002                if (!mExpectAcquireFences && (i < display->numHwLayers-1 || i == (size_t) mRgbLayer))
1003                    WTRACE("layer %zd acquire fence (fd=%zd) not yet saved or closed", i, layer.acquireFenceFd);
1004                CLOSE_FENCE(layer.acquireFenceFd);
1005            }
1006        }
1007    }
1008
1009    return true;
1010}
1011
1012bool VirtualDevice::sendToWidi(hwc_display_contents_1_t *display)
1013{
1014    VTRACE("RGB=%d, YUV=%d", mRgbLayer, mYuvLayer);
1015
1016    if (mYuvLayer == -1 && mRgbLayer == -1)
1017        return true;
1018
1019    if (mYuvLayer != -1) {
1020        mVspInUse = true;
1021        if (queueCompose(display))
1022            return true;
1023    }
1024
1025    return queueColorConvert(display);
1026}
1027
1028bool VirtualDevice::queueCompose(hwc_display_contents_1_t *display)
1029{
1030    hwc_layer_1_t& yuvLayer = display->hwLayers[mYuvLayer];
1031    if (yuvLayer.handle == NULL) {
1032        ETRACE("No video handle");
1033        return false;
1034    }
1035#ifdef INTEL_WIDI
1036    if (!mCurrentConfig.frameServerActive && display->outbuf == NULL) {
1037#else
1038    if (display->outbuf == NULL) {
1039#endif
1040        ETRACE("No outbuf");
1041        return true; // fallback would be pointless
1042    }
1043
1044    sp<ComposeTask> composeTask = new ComposeTask();
1045
1046    sp<RefBase> heldBuffer;
1047    sp<OnFrameReadyTask> frameReadyTask;
1048    Mutex::Autolock _l(mTaskLock);
1049
1050    float upscale_x = 1.0;
1051    float upscale_y = 1.0;
1052    hwc_layer_1_t& fbTarget = display->hwLayers[display->numHwLayers-1];
1053    composeTask->outWidth = fbTarget.sourceCropf.right - fbTarget.sourceCropf.left;
1054    composeTask->outHeight = fbTarget.sourceCropf.bottom - fbTarget.sourceCropf.top;
1055
1056    bool scaleRgb = false;
1057#ifdef INTEL_WIDI
1058    if (mCurrentConfig.frameServerActive) {
1059        if (mVspUpscale) {
1060            composeTask->outWidth = mCurrentConfig.policy.scaledWidth;
1061            composeTask->outHeight = mCurrentConfig.policy.scaledHeight;
1062            upscale_x = mCurrentConfig.policy.scaledWidth/(fbTarget.sourceCropf.right - fbTarget.sourceCropf.left);
1063            upscale_y = mCurrentConfig.policy.scaledHeight/(fbTarget.sourceCropf.bottom - fbTarget.sourceCropf.top);
1064            scaleRgb = composeTask->outWidth != fbTarget.sourceCropf.right - fbTarget.sourceCropf.left ||
1065                       composeTask->outHeight != fbTarget.sourceCropf.bottom - fbTarget.sourceCropf.top;
1066        }
1067
1068        composeTask->outputHandle = mCscBuffers.get(composeTask->outWidth, composeTask->outHeight, &heldBuffer);
1069        if (composeTask->outputHandle == NULL) {
1070            WTRACE("Out of CSC buffers, dropping frame");
1071            return true;
1072        }
1073    } else {
1074        composeTask->outputHandle = display->outbuf;
1075    }
1076#else
1077    composeTask->outputHandle = display->outbuf;
1078#endif
1079
1080    vspPrepare(composeTask->outWidth, composeTask->outHeight);
1081
1082    composeTask->videoCachedBuffer = getMappedBuffer(yuvLayer.handle);
1083    if (composeTask->videoCachedBuffer == NULL) {
1084        ETRACE("Couldn't map video handle %p", yuvLayer.handle);
1085        return false;
1086    }
1087    if (composeTask->videoCachedBuffer->mapper == NULL) {
1088        ETRACE("Src mapper gone");
1089        return false;
1090    }
1091    composeTask->heldVideoBuffer = new HeldDecoderBuffer(this, composeTask->videoCachedBuffer);
1092    IVideoPayloadManager::MetaData videoMetadata;
1093    if (!mPayloadManager->getMetaData(composeTask->videoCachedBuffer->mapper, &videoMetadata)) {
1094        ETRACE("Failed to map video payload info");
1095        return false;
1096    }
1097    if (videoMetadata.normalBuffer.width == 0 || videoMetadata.normalBuffer.height == 0) {
1098        ETRACE("Bad video metadata for handle %p", yuvLayer.handle);
1099        return false;
1100    }
1101    if (videoMetadata.normalBuffer.khandle == 0) {
1102        ETRACE("Bad khandle");
1103        return false;
1104    }
1105
1106    VARectangle& output_region = composeTask->output_region;
1107    output_region.x = static_cast<uint32_t>(yuvLayer.displayFrame.left*upscale_x) & ~1;
1108    output_region.y = static_cast<uint32_t>(yuvLayer.displayFrame.top*upscale_y) & ~1;
1109    output_region.width = (static_cast<uint32_t>(yuvLayer.displayFrame.right*upscale_y+1) & ~1) - output_region.x;
1110    output_region.height = (static_cast<uint32_t>(yuvLayer.displayFrame.bottom*upscale_y+1) & ~1) - output_region.y;
1111
1112    uint32_t videoWidth;
1113    uint32_t videoHeight;
1114    if (videoMetadata.transform == 0 || videoMetadata.transform == HAL_TRANSFORM_ROT_180) {
1115        videoWidth = videoMetadata.normalBuffer.width;
1116        videoHeight = videoMetadata.normalBuffer.height;
1117    } else {
1118        videoWidth = videoMetadata.normalBuffer.height;
1119        videoHeight = videoMetadata.normalBuffer.width;
1120    }
1121
1122    // Layer source crop info is based on an unrotated, unscaled buffer.
1123    // Rotate the rectangle to get the source crop we'd use for a rotated, unscaled buffer.
1124    hwc_frect_t rotatedCrop;
1125    switch (videoMetadata.transform) {
1126    default:
1127        rotatedCrop = yuvLayer.sourceCropf;
1128        break;
1129    case HAL_TRANSFORM_ROT_90:
1130        rotatedCrop.left = yuvLayer.sourceCropf.top;
1131        rotatedCrop.top = videoHeight - yuvLayer.sourceCropf.right;
1132        rotatedCrop.right = yuvLayer.sourceCropf.bottom;
1133        rotatedCrop.bottom = videoHeight - yuvLayer.sourceCropf.left;
1134        break;
1135    case HAL_TRANSFORM_ROT_180:
1136        rotatedCrop.left = videoWidth - yuvLayer.sourceCropf.right;
1137        rotatedCrop.top = videoHeight - yuvLayer.sourceCropf.bottom;
1138        rotatedCrop.right = videoWidth - yuvLayer.sourceCropf.left;
1139        rotatedCrop.bottom = videoHeight - yuvLayer.sourceCropf.top;
1140        break;
1141    case HAL_TRANSFORM_ROT_270:
1142        rotatedCrop.left = videoWidth - yuvLayer.sourceCropf.bottom;
1143        rotatedCrop.top = yuvLayer.sourceCropf.left;
1144        rotatedCrop.right = videoWidth - yuvLayer.sourceCropf.top;
1145        rotatedCrop.bottom = yuvLayer.sourceCropf.right;
1146        break;
1147    }
1148
1149    float factor_x = output_region.width / (rotatedCrop.right - rotatedCrop.left);
1150    float factor_y = output_region.height / (rotatedCrop.bottom - rotatedCrop.top);
1151
1152    uint32_t scaleWidth = videoWidth * factor_x;
1153    uint32_t scaleHeight = videoHeight * factor_y;
1154
1155    scaleWidth &= ~1;
1156    scaleHeight &= ~1;
1157
1158    IVideoPayloadManager::Buffer info;
1159    if (!getFrameOfSize(scaleWidth, scaleHeight, videoMetadata, info)) {
1160        //Returning true as else we fall into the queueColorConvert
1161        //resulting into scrambled frames for protected content.
1162        ITRACE("scaled frame not yet available.");
1163        return true;
1164    }
1165
1166    composeTask->videoKhandle = info.khandle;
1167    composeTask->videoStride = info.lumaStride;
1168    composeTask->videoBufHeight = info.bufHeight;
1169    composeTask->videoTiled = info.tiled;
1170
1171    // rotatedCrop accounts for rotation. Now account for any scaling along each dimension.
1172    hwc_frect_t scaledCrop = rotatedCrop;
1173    if (info.width < videoWidth) {
1174        float factor = static_cast<float>(info.width) / videoWidth;
1175        scaledCrop.left *= factor;
1176        scaledCrop.right *= factor;
1177    }
1178    if (info.height < videoHeight) {
1179        float factor = static_cast<float>(info.height) / videoHeight;
1180        scaledCrop.top *= factor;
1181        scaledCrop.bottom *= factor;
1182    }
1183
1184    VARectangle& surface_region = composeTask->surface_region;
1185    surface_region.x = static_cast<int>(scaledCrop.left) + info.offsetX;
1186    surface_region.y = static_cast<int>(scaledCrop.top) + info.offsetY;
1187    surface_region.width = static_cast<int>(scaledCrop.right - scaledCrop.left);
1188    surface_region.height = static_cast<int>(scaledCrop.bottom - scaledCrop.top);
1189
1190    VTRACE("Want to take (%d,%d)-(%d,%d) region from %dx%d video (in %dx%d buffer) and output to (%d,%d)-(%d,%d)",
1191            surface_region.x, surface_region.y,
1192            surface_region.x + surface_region.width, surface_region.y + surface_region.height,
1193            info.width, info.height,
1194            info.bufWidth, info.bufHeight,
1195            output_region.x, output_region.y,
1196            output_region.x + output_region.width, output_region.y + output_region.height);
1197
1198    if (surface_region.x + surface_region.width > static_cast<int>(info.width + info.offsetX) ||
1199        surface_region.y + surface_region.height > static_cast<int>(info.height + info.offsetY))
1200    {
1201        ETRACE("Source crop exceeds video dimensions: (%d,%d)-(%d,%d) > %ux%u",
1202                surface_region.x, surface_region.y,
1203                surface_region.x + surface_region.width, surface_region.y + surface_region.height,
1204                info.width, info.height);
1205        return false;
1206    }
1207
1208    if (surface_region.width > output_region.width || surface_region.height > output_region.height) {
1209        // VSP can upscale but can't downscale video, so use blank video
1210        // until we start getting downscaled frames.
1211        surface_region.x = 0;
1212        surface_region.y = 0;
1213        surface_region.width = composeTask->outWidth;
1214        surface_region.height = composeTask->outHeight;
1215        output_region = surface_region;
1216        composeTask->videoKhandle = 0;
1217        composeTask->videoStride = composeTask->outWidth;
1218        composeTask->videoBufHeight = composeTask->outHeight;
1219        composeTask->videoTiled = false;
1220    }
1221
1222    composeTask->yuvAcquireFenceFd = yuvLayer.acquireFenceFd;
1223    yuvLayer.acquireFenceFd = -1;
1224
1225    composeTask->outbufAcquireFenceFd = display->outbufAcquireFenceFd;
1226    display->outbufAcquireFenceFd = -1;
1227
1228    int retireFd = sw_sync_fence_create(mSyncTimelineFd, "widi_compose_retire", mNextSyncPoint);
1229    yuvLayer.releaseFenceFd = retireFd;
1230
1231    if (mRgbLayer == -1) {
1232        CLOSE_FENCE(fbTarget.acquireFenceFd);
1233    } else {
1234        hwc_layer_1_t& rgbLayer = display->hwLayers[mRgbLayer];
1235        composeTask->rgbAcquireFenceFd = rgbLayer.acquireFenceFd;
1236        rgbLayer.acquireFenceFd = -1;
1237        rgbLayer.releaseFenceFd = dup(retireFd);
1238    }
1239
1240    mNextSyncPoint++;
1241    composeTask->syncTimelineFd = mSyncTimelineFd;
1242
1243    if (mRgbLayer != -1)
1244    {
1245        hwc_layer_1_t& rgbLayer = display->hwLayers[mRgbLayer];
1246        if (rgbLayer.handle == NULL) {
1247            ETRACE("No RGB handle");
1248            return false;
1249        }
1250
1251        if (scaleRgb) {
1252            buffer_handle_t scalingBuffer;
1253            sp<RefBase> heldUpscaleBuffer;
1254            while ((scalingBuffer = mRgbUpscaleBuffers.get(composeTask->outWidth, composeTask->outHeight, &heldUpscaleBuffer)) == NULL &&
1255                   !mTasks.empty()) {
1256                VTRACE("Waiting for free RGB upscale buffer...");
1257                mRequestDequeued.wait(mTaskLock);
1258            }
1259            if (scalingBuffer == NULL) {
1260                ETRACE("Couldn't get scaling buffer");
1261                return false;
1262            }
1263            BufferManager* mgr = mHwc.getBufferManager();
1264            crop_t destRect;
1265            destRect.x = 0;
1266            destRect.y = 0;
1267            destRect.w = composeTask->outWidth;
1268            destRect.h = composeTask->outHeight;
1269            if (!mgr->blit(rgbLayer.handle, scalingBuffer, destRect, true, true))
1270                return true;
1271            composeTask->rgbHandle = scalingBuffer;
1272            composeTask->heldRgbHandle = heldUpscaleBuffer;
1273        }
1274        else {
1275            unsigned int pixel_format = VA_FOURCC_BGRA;
1276            const IMG_native_handle_t* nativeHandle = reinterpret_cast<const IMG_native_handle_t*>(rgbLayer.handle);
1277            if (nativeHandle->iFormat == HAL_PIXEL_FORMAT_RGBA_8888)
1278                pixel_format = VA_FOURCC_RGBA;
1279            mRgbUpscaleBuffers.clear();
1280            ssize_t index = mVaMapCache.indexOfKey(rgbLayer.handle);
1281            if (index == NAME_NOT_FOUND) {
1282                composeTask->mappedRgbIn = new VAMappedHandleObject(va_dpy, rgbLayer.handle, composeTask->outWidth, composeTask->outHeight, pixel_format);
1283                mVaMapCache.add(rgbLayer.handle, composeTask->mappedRgbIn);
1284            }
1285            else
1286                composeTask->mappedRgbIn = mVaMapCache[index];
1287            if (composeTask->mappedRgbIn->surface == 0) {
1288                ETRACE("Unable to map RGB surface");
1289                return false;
1290            }
1291        }
1292    }
1293    else
1294        composeTask->mappedRgbIn = NULL;
1295
1296    mTasks.push_back(composeTask);
1297    mRequestQueued.signal();
1298#ifdef INTEL_WIDI
1299    if (mCurrentConfig.frameServerActive) {
1300
1301        FrameInfo inputFrameInfo;
1302        memset(&inputFrameInfo, 0, sizeof(inputFrameInfo));
1303        inputFrameInfo.isProtected = mProtectedMode;
1304        inputFrameInfo.frameType = HWC_FRAMETYPE_FRAME_BUFFER;
1305        if (mVspUpscale) {
1306            float upscale_x = (rotatedCrop.right - rotatedCrop.left) /
1307                              (yuvLayer.displayFrame.right - yuvLayer.displayFrame.left);
1308            float upscale_y = (rotatedCrop.bottom - rotatedCrop.top) /
1309                              (yuvLayer.displayFrame.bottom - yuvLayer.displayFrame.top);
1310            float upscale = upscale_x > upscale_y ? upscale_x : upscale_y;
1311            if (upscale <= 1.0)
1312                upscale = 1.0;
1313            inputFrameInfo.contentWidth = (fbTarget.sourceCropf.right - fbTarget.sourceCropf.left)*upscale;
1314            inputFrameInfo.contentHeight = (fbTarget.sourceCropf.bottom - fbTarget.sourceCropf.top)*upscale;
1315        }
1316        else {
1317            inputFrameInfo.contentWidth = composeTask->outWidth;
1318            inputFrameInfo.contentHeight = composeTask->outHeight;
1319        }
1320        inputFrameInfo.contentFrameRateN = 0;
1321        inputFrameInfo.contentFrameRateD = 0;
1322        FrameInfo outputFrameInfo = inputFrameInfo;
1323
1324        BufferManager* mgr = mHwc.getBufferManager();
1325        DataBuffer* dataBuf = mgr->lockDataBuffer(composeTask->outputHandle);
1326        outputFrameInfo.contentWidth = composeTask->outWidth;
1327        outputFrameInfo.contentHeight = composeTask->outHeight;
1328        outputFrameInfo.bufferWidth = dataBuf->getWidth();
1329        outputFrameInfo.bufferHeight = dataBuf->getHeight();
1330        outputFrameInfo.lumaUStride = dataBuf->getWidth();
1331        outputFrameInfo.chromaUStride = dataBuf->getWidth();
1332        outputFrameInfo.chromaVStride = dataBuf->getWidth();
1333        mgr->unlockDataBuffer(dataBuf);
1334
1335        queueFrameTypeInfo(inputFrameInfo);
1336        if (mCurrentConfig.policy.scaledWidth == 0 || mCurrentConfig.policy.scaledHeight == 0)
1337            return true; // This isn't a failure, WiDi just doesn't want frames right now.
1338        queueBufferInfo(outputFrameInfo);
1339
1340        if (mCurrentConfig.frameListener != NULL) {
1341            frameReadyTask = new OnFrameReadyTask();
1342            frameReadyTask->renderTask = composeTask;
1343            frameReadyTask->heldBuffer = heldBuffer;
1344            frameReadyTask->frameListener = mCurrentConfig.frameListener;
1345            frameReadyTask->handle = composeTask->outputHandle;
1346            frameReadyTask->handleType = HWC_HANDLE_TYPE_GRALLOC;
1347            frameReadyTask->renderTimestamp = mRenderTimestamp;
1348            frameReadyTask->mediaTimestamp = -1;
1349            mTasks.push_back(frameReadyTask);
1350        }
1351    }
1352    else {
1353        display->retireFenceFd = dup(retireFd);
1354    }
1355#else
1356    display->retireFenceFd = dup(retireFd);
1357#endif
1358
1359    return true;
1360}
1361
1362bool VirtualDevice::queueColorConvert(hwc_display_contents_1_t *display)
1363{
1364    if (mRgbLayer == -1) {
1365        ETRACE("RGB layer not set");
1366        return false;
1367    }
1368    hwc_layer_1_t& layer = display->hwLayers[mRgbLayer];
1369    if (layer.handle == NULL) {
1370        ETRACE("RGB layer has no handle set");
1371        return false;
1372    }
1373    if (display->outbuf == NULL) {
1374        ETRACE("outbuf is not set");
1375        return false;
1376    }
1377
1378    {
1379        const IMG_native_handle_t* nativeSrcHandle = reinterpret_cast<const IMG_native_handle_t*>(layer.handle);
1380        const IMG_native_handle_t* nativeDestHandle = reinterpret_cast<const IMG_native_handle_t*>(display->outbuf);
1381
1382        if ((nativeSrcHandle->iFormat == HAL_PIXEL_FORMAT_RGBA_8888 &&
1383            nativeDestHandle->iFormat == HAL_PIXEL_FORMAT_BGRA_8888) ||
1384            (nativeSrcHandle->iFormat == HAL_PIXEL_FORMAT_BGRA_8888 &&
1385            nativeDestHandle->iFormat == HAL_PIXEL_FORMAT_RGBA_8888))
1386        {
1387            SYNC_WAIT_AND_CLOSE(layer.acquireFenceFd);
1388            SYNC_WAIT_AND_CLOSE(display->outbufAcquireFenceFd);
1389            display->retireFenceFd = -1;
1390
1391            // synchronous in this case
1392            colorSwap(layer.handle, display->outbuf, ((nativeSrcHandle->iWidth+31)&~31)*nativeSrcHandle->iHeight);
1393            // Workaround: Don't keep cached buffers. If the VirtualDisplaySurface gets destroyed,
1394            //             these would be unmapped on the next frame, after the buffers are destroyed,
1395            //             which is causing heap corruption, probably due to a double-free somewhere.
1396            mMappedBufferCache.clear();
1397            return true;
1398        }
1399    }
1400
1401    sp<BlitTask> blitTask = new BlitTask();
1402    sp<OnFrameReadyTask> frameReadyTask;
1403    blitTask->destRect.x = 0;
1404    blitTask->destRect.y = 0;
1405    blitTask->destRect.w = layer.sourceCropf.right - layer.sourceCropf.left;
1406    blitTask->destRect.h = layer.sourceCropf.bottom - layer.sourceCropf.top;
1407    blitTask->srcHandle = layer.handle;
1408
1409    sp<RefBase> heldBuffer;
1410    Mutex::Autolock _l(mTaskLock);
1411
1412    blitTask->srcAcquireFenceFd = layer.acquireFenceFd;
1413    layer.acquireFenceFd = -1;
1414
1415    blitTask->syncTimelineFd = mSyncTimelineFd;
1416    // Framebuffer after BlitTask::run() calls sw_sync_timeline_inc().
1417    layer.releaseFenceFd = sw_sync_fence_create(mSyncTimelineFd, "widi_blit_retire", mNextSyncPoint);
1418    mNextSyncPoint++;
1419#ifdef INTEL_WIDI
1420    if (mCurrentConfig.frameServerActive) {
1421        blitTask->destHandle = mCscBuffers.get(blitTask->destRect.w, blitTask->destRect.h, &heldBuffer);
1422        blitTask->destAcquireFenceFd = -1;
1423
1424        // we do not use retire fence in frameServerActive path.
1425        CLOSE_FENCE(display->retireFenceFd);
1426
1427        // we use our own buffer, so just close this fence without a wait
1428        CLOSE_FENCE(display->outbufAcquireFenceFd);
1429    }
1430    else {
1431        blitTask->destHandle = display->outbuf;
1432        blitTask->destAcquireFenceFd = display->outbufAcquireFenceFd;
1433        // don't let TngDisplayContext::commitEnd() close this
1434        display->outbufAcquireFenceFd = -1;
1435        display->retireFenceFd = dup(layer.releaseFenceFd);
1436    }
1437#else
1438    blitTask->destHandle = display->outbuf;
1439    blitTask->destAcquireFenceFd = display->outbufAcquireFenceFd;
1440    // don't let TngDisplayContext::commitEnd() close this
1441    display->outbufAcquireFenceFd = -1;
1442    display->retireFenceFd = dup(layer.releaseFenceFd);
1443#endif
1444    if (blitTask->destHandle == NULL) {
1445        WTRACE("Out of CSC buffers, dropping frame");
1446        return false;
1447    }
1448
1449    mTasks.push_back(blitTask);
1450    mRequestQueued.signal();
1451#ifdef INTEL_WIDI
1452    if (mCurrentConfig.frameServerActive) {
1453        FrameInfo inputFrameInfo;
1454        memset(&inputFrameInfo, 0, sizeof(inputFrameInfo));
1455        inputFrameInfo.isProtected = mProtectedMode;
1456        FrameInfo outputFrameInfo;
1457
1458        inputFrameInfo.frameType = HWC_FRAMETYPE_FRAME_BUFFER;
1459        inputFrameInfo.contentWidth = blitTask->destRect.w;
1460        inputFrameInfo.contentHeight = blitTask->destRect.h;
1461        inputFrameInfo.contentFrameRateN = 0;
1462        inputFrameInfo.contentFrameRateD = 0;
1463        outputFrameInfo = inputFrameInfo;
1464
1465        BufferManager* mgr = mHwc.getBufferManager();
1466        DataBuffer* dataBuf = mgr->lockDataBuffer(blitTask->destHandle);
1467        outputFrameInfo.bufferWidth = dataBuf->getWidth();
1468        outputFrameInfo.bufferHeight = dataBuf->getHeight();
1469        outputFrameInfo.lumaUStride = dataBuf->getWidth();
1470        outputFrameInfo.chromaUStride = dataBuf->getWidth();
1471        outputFrameInfo.chromaVStride = dataBuf->getWidth();
1472        mgr->unlockDataBuffer(dataBuf);
1473
1474        if (!mIsForceCloneMode)
1475            queueFrameTypeInfo(inputFrameInfo);
1476
1477        if (mCurrentConfig.policy.scaledWidth == 0 || mCurrentConfig.policy.scaledHeight == 0)
1478            return true; // This isn't a failure, WiDi just doesn't want frames right now.
1479        queueBufferInfo(outputFrameInfo);
1480
1481        if (mCurrentConfig.frameListener != NULL) {
1482            frameReadyTask = new OnFrameReadyTask();
1483            frameReadyTask->renderTask = blitTask;
1484            frameReadyTask->heldBuffer = heldBuffer;
1485            frameReadyTask->frameListener = mCurrentConfig.frameListener;
1486            frameReadyTask->handle = blitTask->destHandle;
1487            frameReadyTask->handleType = HWC_HANDLE_TYPE_GRALLOC;
1488            frameReadyTask->renderTimestamp = mRenderTimestamp;
1489            frameReadyTask->mediaTimestamp = -1;
1490            mTasks.push_back(frameReadyTask);
1491        }
1492    }
1493#endif
1494    return true;
1495}
1496#ifdef INTEL_WIDI
1497bool VirtualDevice::handleExtendedMode(hwc_display_contents_1_t *display)
1498{
1499    FrameInfo inputFrameInfo;
1500    memset(&inputFrameInfo, 0, sizeof(inputFrameInfo));
1501    inputFrameInfo.isProtected = mProtectedMode;
1502
1503    hwc_layer_1_t& layer = display->hwLayers[mYuvLayer];
1504    if (layer.handle == NULL) {
1505        ETRACE("video layer has no handle set");
1506        return false;
1507    }
1508    sp<CachedBuffer> cachedBuffer;
1509    if ((cachedBuffer = getMappedBuffer(layer.handle)) == NULL) {
1510        ETRACE("Failed to map display buffer");
1511        return false;
1512    }
1513
1514    inputFrameInfo.frameType = HWC_FRAMETYPE_VIDEO;
1515    // for video mode let 30 fps be the default value.
1516    inputFrameInfo.contentFrameRateN = 30;
1517    inputFrameInfo.contentFrameRateD = 1;
1518
1519    IVideoPayloadManager::MetaData metadata;
1520    if (!mPayloadManager->getMetaData(cachedBuffer->mapper, &metadata)) {
1521        ETRACE("Failed to get metadata");
1522        return false;
1523    }
1524
1525    if (metadata.transform == 0 || metadata.transform == HAL_TRANSFORM_ROT_180) {
1526        inputFrameInfo.contentWidth = metadata.normalBuffer.width;
1527        inputFrameInfo.contentHeight = metadata.normalBuffer.height;
1528    } else {
1529        inputFrameInfo.contentWidth = metadata.normalBuffer.height;
1530        inputFrameInfo.contentHeight = metadata.normalBuffer.width;
1531        // 90 and 270 have some issues that appear to be decoder bugs
1532        ITRACE("Skipping extended mode due to rotation of 90 or 270");
1533        return false;
1534    }
1535    // Use the crop size if something changed derive it again..
1536    // Only get video source info if frame rate has not been initialized.
1537    // getVideoSourceInfo() is a fairly expensive operation. This optimization
1538    // will save us a few milliseconds per frame
1539    if (mFirstVideoFrame || (mOrigContentWidth != metadata.normalBuffer.width) ||
1540        (mOrigContentHeight != metadata.normalBuffer.height)) {
1541        mVideoFramerate = inputFrameInfo.contentFrameRateN;
1542        VTRACE("VideoWidth = %d, VideoHeight = %d", metadata.normalBuffer.width, metadata.normalBuffer.height);
1543        mOrigContentWidth = metadata.normalBuffer.width;
1544        mOrigContentHeight = metadata.normalBuffer.height;
1545
1546        // For the first video session by default
1547        int sessionID = Hwcomposer::getInstance().getDisplayAnalyzer()->getFirstVideoInstanceSessionID();
1548        if (sessionID >= 0) {
1549            ITRACE("Session id = %d", sessionID);
1550            VideoSourceInfo videoInfo;
1551            memset(&videoInfo, 0, sizeof(videoInfo));
1552            status_t ret = mHwc.getMultiDisplayObserver()->getVideoSourceInfo(sessionID, &videoInfo);
1553            if (ret == NO_ERROR) {
1554                ITRACE("width = %d, height = %d, fps = %d", videoInfo.width, videoInfo.height,
1555                        videoInfo.frameRate);
1556                if (videoInfo.frameRate > 0) {
1557                    mVideoFramerate = videoInfo.frameRate;
1558                }
1559            }
1560        }
1561        mFirstVideoFrame = false;
1562    }
1563    inputFrameInfo.contentFrameRateN = mVideoFramerate;
1564    inputFrameInfo.contentFrameRateD = 1;
1565
1566    sp<ComposeTask> composeTask;
1567    sp<RefBase> heldBuffer;
1568    Mutex::Autolock _l(mTaskLock);
1569
1570    if (mCurrentConfig.policy.scaledWidth == 0 || mCurrentConfig.policy.scaledHeight == 0) {
1571        queueFrameTypeInfo(inputFrameInfo);
1572        return true; // This isn't a failure, WiDi just doesn't want frames right now.
1573    }
1574
1575    IVideoPayloadManager::Buffer info;
1576    if (!getFrameOfSize(mCurrentConfig.policy.scaledWidth, mCurrentConfig.policy.scaledHeight, metadata, info)) {
1577        ITRACE("Extended mode waiting for scaled frame");
1578        return false;
1579    }
1580
1581    queueFrameTypeInfo(inputFrameInfo);
1582
1583    heldBuffer = new HeldDecoderBuffer(this, cachedBuffer);
1584    int64_t mediaTimestamp = metadata.timestamp;
1585
1586    VARectangle surface_region;
1587    surface_region.x = info.offsetX;
1588    surface_region.y = info.offsetY;
1589    surface_region.width = info.width;
1590    surface_region.height = info.height;
1591    FrameInfo outputFrameInfo = inputFrameInfo;
1592    outputFrameInfo.bufferFormat = metadata.format;
1593
1594    outputFrameInfo.contentWidth = info.width;
1595    outputFrameInfo.contentHeight = info.height;
1596    outputFrameInfo.bufferWidth = info.bufWidth;
1597    outputFrameInfo.bufferHeight = info.bufHeight;
1598    outputFrameInfo.lumaUStride = info.lumaStride;
1599    outputFrameInfo.chromaUStride = info.chromaUStride;
1600    outputFrameInfo.chromaVStride = info.chromaVStride;
1601
1602    if (outputFrameInfo.bufferFormat == 0 ||
1603        outputFrameInfo.bufferWidth < outputFrameInfo.contentWidth ||
1604        outputFrameInfo.bufferHeight < outputFrameInfo.contentHeight ||
1605        outputFrameInfo.contentWidth <= 0 || outputFrameInfo.contentHeight <= 0 ||
1606        outputFrameInfo.lumaUStride <= 0 ||
1607        outputFrameInfo.chromaUStride <= 0 || outputFrameInfo.chromaVStride <= 0) {
1608        ITRACE("Payload cleared or inconsistent info, not sending frame");
1609        ITRACE("outputFrameInfo.bufferFormat  = %d ", outputFrameInfo.bufferFormat);
1610        ITRACE("outputFrameInfo.bufferWidth   = %d ", outputFrameInfo.bufferWidth);
1611        ITRACE("outputFrameInfo.contentWidth  = %d ", outputFrameInfo.contentWidth);
1612        ITRACE("outputFrameInfo.bufferHeight  = %d ", outputFrameInfo.bufferHeight);
1613        ITRACE("outputFrameInfo.contentHeight = %d ", outputFrameInfo.contentHeight);
1614        ITRACE("outputFrameInfo.lumaUStride   = %d ", outputFrameInfo.lumaUStride);
1615        ITRACE("outputFrameInfo.chromaUStride = %d ", outputFrameInfo.chromaUStride);
1616        ITRACE("outputFrameInfo.chromaVStride = %d ", outputFrameInfo.chromaVStride);
1617        return false;
1618    }
1619
1620    if (mCurrentConfig.policy.scaledWidth == 0 || mCurrentConfig.policy.scaledHeight == 0)
1621        return true; // This isn't a failure, WiDi just doesn't want frames right now.
1622
1623    if (info.khandle == mExtLastKhandle && mediaTimestamp == mExtLastTimestamp) {
1624        // Same frame again. We don't send a frame, but we return true because
1625        // this isn't an error.
1626        if (metadata.transform != 0)
1627            mVspInUse = true; // Don't shut down VSP just to start it again really quick.
1628        return true;
1629    }
1630    mExtLastKhandle = info.khandle;
1631    mExtLastTimestamp = mediaTimestamp;
1632
1633    HWCBufferHandleType handleType = HWC_HANDLE_TYPE_KBUF;
1634
1635    buffer_handle_t handle = info.khandle;
1636
1637    // Ideally we'd check if there's an offset (info.offsetX > 0 || info.offsetY > 0),
1638    // so we use VSP only when cropping is needed. But using the khandle directly when
1639    // both rotation and scaling are involved can encode the frame with the wrong
1640    // tiling status, so use VSP to normalize if any rotation is involved.
1641    if (metadata.transform != 0) {
1642        // Cropping (or above workaround) needed, so use VSP to do it.
1643        mVspInUse = true;
1644        vspPrepare(info.width, info.height);
1645
1646        composeTask = new ComposeTask();
1647        composeTask->heldVideoBuffer = heldBuffer;
1648        heldBuffer = NULL;
1649        composeTask->outWidth = info.width;
1650        composeTask->outHeight = info.height;
1651        composeTask->outputHandle = mCscBuffers.get(composeTask->outWidth, composeTask->outHeight, &heldBuffer);
1652        if (composeTask->outputHandle == NULL) {
1653            ITRACE("Out of CSC buffers, dropping frame");
1654            return true;
1655        }
1656
1657        composeTask->surface_region = surface_region;
1658        composeTask->videoCachedBuffer = cachedBuffer;
1659        VARectangle& output_region = composeTask->output_region;
1660        output_region.x = 0;
1661        output_region.y = 0;
1662        output_region.width = info.width;
1663        output_region.height = info.height;
1664
1665        composeTask->videoKhandle = info.khandle;
1666        composeTask->videoStride = info.lumaStride;
1667        composeTask->videoBufHeight = info.bufHeight;
1668        composeTask->videoTiled = info.tiled;
1669
1670        BufferManager* mgr = mHwc.getBufferManager();
1671        DataBuffer* dataBuf = mgr->lockDataBuffer(composeTask->outputHandle);
1672        outputFrameInfo.contentWidth = composeTask->outWidth;
1673        outputFrameInfo.contentHeight = composeTask->outHeight;
1674        outputFrameInfo.bufferWidth = dataBuf->getWidth();
1675        outputFrameInfo.bufferHeight = dataBuf->getHeight();
1676        outputFrameInfo.lumaUStride = dataBuf->getWidth();
1677        outputFrameInfo.chromaUStride = dataBuf->getWidth();
1678        outputFrameInfo.chromaVStride = dataBuf->getWidth();
1679        mgr->unlockDataBuffer(dataBuf);
1680
1681        handle = composeTask->outputHandle;
1682        handleType = HWC_HANDLE_TYPE_GRALLOC;
1683
1684        mTasks.push_back(composeTask);
1685        mRequestQueued.signal();
1686    }
1687
1688    queueBufferInfo(outputFrameInfo);
1689
1690    if (mCurrentConfig.frameListener != NULL) {
1691        sp<OnFrameReadyTask> frameReadyTask = new OnFrameReadyTask();
1692        frameReadyTask->renderTask = composeTask;
1693        frameReadyTask->heldBuffer = heldBuffer;
1694        frameReadyTask->frameListener = mCurrentConfig.frameListener;
1695        frameReadyTask->handle = handle;
1696        frameReadyTask->handleType = handleType;
1697        frameReadyTask->renderTimestamp = mRenderTimestamp;
1698        frameReadyTask->mediaTimestamp = mediaTimestamp;
1699
1700        mTasks.push_back(frameReadyTask);
1701        mRequestQueued.signal();
1702    }
1703
1704    return true;
1705}
1706
1707void VirtualDevice::queueFrameTypeInfo(const FrameInfo& inputFrameInfo)
1708{
1709    if (mCurrentConfig.forceNotifyFrameType ||
1710        memcmp(&inputFrameInfo, &mLastInputFrameInfo, sizeof(inputFrameInfo)) != 0) {
1711        // something changed, notify type change listener
1712        mNextConfig.forceNotifyFrameType = false;
1713        mLastInputFrameInfo = inputFrameInfo;
1714
1715        sp<FrameTypeChangedTask> notifyTask = new FrameTypeChangedTask;
1716        notifyTask->typeChangeListener = mCurrentConfig.typeChangeListener;
1717        notifyTask->inputFrameInfo = inputFrameInfo;
1718        mTasks.push_back(notifyTask);
1719    }
1720}
1721
1722void VirtualDevice::queueBufferInfo(const FrameInfo& outputFrameInfo)
1723{
1724    if (mCurrentConfig.forceNotifyBufferInfo ||
1725        memcmp(&outputFrameInfo, &mLastOutputFrameInfo, sizeof(outputFrameInfo)) != 0) {
1726        mNextConfig.forceNotifyBufferInfo = false;
1727        mLastOutputFrameInfo = outputFrameInfo;
1728
1729        sp<BufferInfoChangedTask> notifyTask = new BufferInfoChangedTask;
1730        notifyTask->typeChangeListener = mCurrentConfig.typeChangeListener;
1731        notifyTask->outputFrameInfo = outputFrameInfo;
1732
1733        //if (handleType == HWC_HANDLE_TYPE_GRALLOC)
1734        //    mMappedBufferCache.clear(); // !
1735        mTasks.push_back(notifyTask);
1736    }
1737}
1738#endif
1739
1740void VirtualDevice::colorSwap(buffer_handle_t src, buffer_handle_t dest, uint32_t pixelCount)
1741{
1742    sp<CachedBuffer> srcCachedBuffer;
1743    sp<CachedBuffer> destCachedBuffer;
1744
1745    {
1746        srcCachedBuffer = getMappedBuffer(src);
1747        if (srcCachedBuffer == NULL || srcCachedBuffer->mapper == NULL)
1748            return;
1749        destCachedBuffer = getMappedBuffer(dest);
1750        if (destCachedBuffer == NULL || destCachedBuffer->mapper == NULL)
1751            return;
1752    }
1753
1754    uint8_t* srcPtr = static_cast<uint8_t*>(srcCachedBuffer->mapper->getCpuAddress(0));
1755    uint8_t* destPtr = static_cast<uint8_t*>(destCachedBuffer->mapper->getCpuAddress(0));
1756    if (srcPtr == NULL || destPtr == NULL)
1757        return;
1758    while (pixelCount > 0) {
1759        destPtr[0] = srcPtr[2];
1760        destPtr[1] = srcPtr[1];
1761        destPtr[2] = srcPtr[0];
1762        destPtr[3] = srcPtr[3];
1763        srcPtr += 4;
1764        destPtr += 4;
1765        pixelCount--;
1766    }
1767}
1768
1769void VirtualDevice::vspPrepare(uint32_t width, uint32_t height)
1770{
1771    if (mVspEnabled && width == mVspWidth && height == mVspHeight)
1772        return;
1773
1774    if (mVspEnabled)
1775    {
1776        ITRACE("Going to switch VSP from %ux%u to %ux%u", mVspWidth, mVspHeight, width, height);
1777        mMappedBufferCache.clear();
1778        mVaMapCache.clear();
1779        sp<DisableVspTask> disableVsp = new DisableVspTask();
1780        mTasks.push_back(disableVsp);
1781    }
1782    mVspWidth = width;
1783    mVspHeight = height;
1784
1785    sp<EnableVspTask> enableTask = new EnableVspTask();
1786    enableTask->width = width;
1787    enableTask->height = height;
1788    mTasks.push_back(enableTask);
1789    mRequestQueued.signal();
1790    // to map a buffer from this thread, we need this task to complete on the other thread
1791    while (enableTask->getStrongCount() > 1) {
1792        VTRACE("Waiting for WidiBlit thread to enable VSP...");
1793        mRequestDequeued.wait(mTaskLock);
1794    }
1795    mVspEnabled = true;
1796}
1797
1798void VirtualDevice::vspEnable(uint32_t width, uint32_t height)
1799{
1800    width = align_width(width);
1801    height = align_height(height);
1802    ITRACE("Start VSP at %ux%u", width, height);
1803    VAStatus va_status;
1804
1805    int display = 0;
1806    int major_ver, minor_ver;
1807    va_dpy = vaGetDisplay(&display);
1808    va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
1809    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaInitialize returns %08x", va_status);
1810
1811    VAConfigAttrib va_attr;
1812    va_attr.type = VAConfigAttribRTFormat;
1813    va_status = vaGetConfigAttributes(va_dpy,
1814                VAProfileNone,
1815                VAEntrypointVideoProc,
1816                &va_attr,
1817                1);
1818    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaGetConfigAttributes returns %08x", va_status);
1819
1820    va_status = vaCreateConfig(
1821                va_dpy,
1822                VAProfileNone,
1823                VAEntrypointVideoProc,
1824                &(va_attr),
1825                1,
1826                &va_config
1827                );
1828    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateConfig returns %08x", va_status);
1829
1830    VADisplayAttribute attr;
1831    attr.type = VADisplayAttribRenderMode;
1832    attr.value = VA_RENDER_MODE_LOCAL_OVERLAY;
1833    va_status = vaSetDisplayAttributes(va_dpy, &attr, 1);
1834    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaSetDisplayAttributes returns %08x", va_status);
1835
1836
1837    va_status = vaCreateSurfaces(
1838                va_dpy,
1839                VA_RT_FORMAT_YUV420,
1840                width,
1841                height,
1842                &va_blank_yuv_in,
1843                1,
1844                NULL,
1845                0);
1846    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateSurfaces (video in) returns %08x", va_status);
1847
1848    unsigned long buffer;
1849    VASurfaceAttribExternalBuffers buf;
1850    int stride = align_width(width);
1851    int bufHeight = align_height(height);
1852    buf.pixel_format = VA_FOURCC_RGBA;
1853    buf.width = width;
1854    buf.height = height;
1855    buf.data_size = stride * bufHeight * 4;
1856    buf.num_planes = 3;
1857    buf.pitches[0] = stride;
1858    buf.pitches[1] = stride;
1859    buf.pitches[2] = stride;
1860    buf.pitches[3] = 0;
1861    buf.offsets[0] = 0;
1862    buf.offsets[1] = stride * bufHeight;
1863    buf.offsets[2] = buf.offsets[1];
1864    buf.offsets[3] = 0;
1865    buf.buffers = &buffer;
1866    buf.num_buffers = 1;
1867    buf.flags = 0;
1868    buf.private_data = NULL;
1869
1870    VASurfaceAttrib attrib_list[2];
1871    attrib_list[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
1872    attrib_list[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
1873    attrib_list[0].value.type = VAGenericValueTypeInteger;
1874    attrib_list[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_VA;
1875    attrib_list[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
1876    attrib_list[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
1877    attrib_list[1].value.type = VAGenericValueTypePointer;
1878    attrib_list[1].value.value.p = (void *)&buf;
1879
1880    va_status = vaCreateSurfaces(
1881                va_dpy,
1882                VA_RT_FORMAT_RGB32,
1883                stride,
1884                bufHeight,
1885                &va_blank_rgb_in,
1886                1,
1887                attrib_list,
1888                2);
1889    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateSurfaces (blank rgba in) returns %08x", va_status);
1890
1891    va_status = vaCreateContext(
1892                va_dpy,
1893                va_config,
1894                stride,
1895                bufHeight,
1896                0,
1897                &va_blank_yuv_in /* not used by VSP, but libva checks for it */,
1898                1,
1899                &va_context);
1900    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateContext returns %08x", va_status);
1901
1902    VASurfaceID tmp_yuv;
1903    va_status = vaCreateSurfaces(
1904                va_dpy,
1905                VA_RT_FORMAT_YUV420,
1906                stride,
1907                bufHeight,
1908                &tmp_yuv,
1909                1,
1910                NULL,
1911                0);
1912    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateSurfaces (temp yuv) returns %08x", va_status);
1913    {
1914        MappedSurface mappedVideoIn(va_dpy, tmp_yuv);
1915        if (mappedVideoIn.valid()) {
1916            // Value doesn't matter, as RGBA will be opaque,
1917            // but I don't want random data in here.
1918            memset(mappedVideoIn.getPtr(), 0x0, width*height*3/2);
1919        }
1920        else
1921            ETRACE("Unable to map tmp black surface");
1922    }
1923
1924    {
1925        MappedSurface mappedBlankIn(va_dpy, va_blank_rgb_in);
1926        if (mappedBlankIn.valid()) {
1927            // Fill RGBA with opaque black temporarily, in order to generate an
1928            // encrypted black buffer in va_blank_yuv_in to use in place of the
1929            // real frame data during the short interval where we're waiting for
1930            // downscaling to kick in.
1931            uint32_t* pixels = reinterpret_cast<uint32_t*>(mappedBlankIn.getPtr());
1932            for (size_t i = 0; i < stride*height; i++)
1933                pixels[i] = 0xff000000;
1934        }
1935        else
1936            ETRACE("Unable to map blank rgba in");
1937    }
1938
1939    // Compose opaque black with temp yuv to produce encrypted black yuv.
1940    VARectangle region;
1941    region.x = 0;
1942    region.y = 0;
1943    region.width = width;
1944    region.height = height;
1945    vspCompose(tmp_yuv, va_blank_rgb_in, va_blank_yuv_in, &region, &region);
1946
1947    va_status = vaDestroySurfaces(va_dpy, &tmp_yuv, 1);
1948    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroySurfaces (temp yuv) returns %08x", va_status);
1949
1950    {
1951        // Fill RGBA with transparent black now, to be used when there is no
1952        // UI to compose on top of the video.
1953        MappedSurface mappedBlankIn(va_dpy, va_blank_rgb_in);
1954        if (mappedBlankIn.valid())
1955            memset(mappedBlankIn.getPtr(), 0, stride*height*4);
1956        else
1957            ETRACE("Unable to map blank rgba in");
1958    }
1959}
1960
1961void VirtualDevice::vspDisable()
1962{
1963    ITRACE("Shut down VSP");
1964
1965    if (va_context == 0 && va_blank_yuv_in == 0) {
1966        ITRACE("Already shut down");
1967        return;
1968    }
1969
1970    VABufferID pipeline_param_id;
1971    VAStatus va_status;
1972    va_status = vaCreateBuffer(va_dpy,
1973                va_context,
1974                VAProcPipelineParameterBufferType,
1975                sizeof(VAProcPipelineParameterBuffer),
1976                1,
1977                NULL,
1978                &pipeline_param_id);
1979    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateBuffer returns %08x", va_status);
1980
1981    VABlendState blend_state;
1982    VAProcPipelineParameterBuffer *pipeline_param;
1983    va_status = vaMapBuffer(va_dpy,
1984                pipeline_param_id,
1985                (void **)&pipeline_param);
1986    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaMapBuffer returns %08x", va_status);
1987
1988    memset(pipeline_param, 0, sizeof(VAProcPipelineParameterBuffer));
1989    pipeline_param->pipeline_flags = VA_PIPELINE_FLAG_END;
1990    pipeline_param->num_filters = 0;
1991    pipeline_param->blend_state = &blend_state;
1992
1993    va_status = vaUnmapBuffer(va_dpy, pipeline_param_id);
1994    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaUnmapBuffer returns %08x", va_status);
1995
1996    va_status = vaBeginPicture(va_dpy, va_context, va_blank_yuv_in /* just need some valid surface */);
1997    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaBeginPicture returns %08x", va_status);
1998
1999    va_status = vaRenderPicture(va_dpy, va_context, &pipeline_param_id, 1);
2000    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaRenderPicture returns %08x", va_status);
2001
2002    va_status = vaEndPicture(va_dpy, va_context);
2003    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaEndPicture returns %08x", va_status);
2004
2005    va_status = vaDestroyContext(va_dpy, va_context);
2006    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroyContext returns %08x", va_status);
2007    va_context = 0;
2008
2009    va_status = vaDestroySurfaces(va_dpy, &va_blank_yuv_in, 1);
2010    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroySurfaces (video in) returns %08x", va_status);
2011    va_blank_yuv_in = 0;
2012
2013    va_status = vaDestroySurfaces(va_dpy, &va_blank_rgb_in, 1);
2014    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroySurfaces (blank rgba in) returns %08x", va_status);
2015
2016    if (va_config) {
2017        vaDestroyConfig(va_dpy, va_config);
2018        va_config = 0;
2019    }
2020    if (va_dpy) {
2021        vaTerminate(va_dpy);
2022        va_dpy = NULL;
2023    }
2024}
2025
2026void VirtualDevice::vspCompose(VASurfaceID videoIn, VASurfaceID rgbIn, VASurfaceID videoOut,
2027                               const VARectangle* surface_region, const VARectangle* output_region)
2028{
2029    VAStatus va_status;
2030
2031    VABufferID pipeline_param_id;
2032    va_status = vaCreateBuffer(va_dpy,
2033                va_context,
2034                VAProcPipelineParameterBufferType,
2035                sizeof(VAProcPipelineParameterBuffer),
2036                1,
2037                NULL,
2038                &pipeline_param_id);
2039    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateBuffer returns %08x", va_status);
2040
2041    VABlendState blend_state;
2042
2043    VAProcPipelineParameterBuffer *pipeline_param;
2044    va_status = vaMapBuffer(va_dpy,
2045                pipeline_param_id,
2046                (void **)&pipeline_param);
2047    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaMapBuffer returns %08x", va_status);
2048
2049    memset(pipeline_param, 0, sizeof(VAProcPipelineParameterBuffer));
2050    pipeline_param->surface = videoIn;
2051    pipeline_param->surface_region = surface_region;
2052    pipeline_param->output_region = output_region;
2053
2054    pipeline_param->pipeline_flags = 0;
2055    pipeline_param->num_filters = 0;
2056    pipeline_param->blend_state = &blend_state;
2057    pipeline_param->num_additional_outputs = 1;
2058    pipeline_param->additional_outputs = &rgbIn;
2059
2060    va_status = vaUnmapBuffer(va_dpy, pipeline_param_id);
2061    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaUnmapBuffer returns %08x", va_status);
2062
2063    va_status = vaBeginPicture(va_dpy, va_context, videoOut);
2064    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaBeginPicture returns %08x", va_status);
2065
2066    va_status = vaRenderPicture(va_dpy, va_context, &pipeline_param_id, 1);
2067    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaRenderPicture returns %08x", va_status);
2068
2069    va_status = vaEndPicture(va_dpy, va_context);
2070    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaEndPicture returns %08x", va_status);
2071
2072    va_status = vaSyncSurface(va_dpy, videoOut);
2073    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaSyncSurface returns %08x", va_status);
2074}
2075
2076static uint32_t min(uint32_t a, uint32_t b)
2077{
2078    return (a < b) ? a : b;
2079}
2080
2081bool VirtualDevice::getFrameOfSize(uint32_t width, uint32_t height, const IVideoPayloadManager::MetaData& metadata, IVideoPayloadManager::Buffer& info)
2082{
2083    if (metadata.transform == 0 || metadata.transform == HAL_TRANSFORM_ROT_180)
2084        setMaxDecodeResolution(min(width, metadata.normalBuffer.width), min(height, metadata.normalBuffer.height));
2085    else
2086        setMaxDecodeResolution(min(height, metadata.normalBuffer.width), min(width, metadata.normalBuffer.height));
2087
2088    if (metadata.transform == 0) {
2089        if (metadata.normalBuffer.khandle != 0 && metadata.normalBuffer.width <= width && metadata.normalBuffer.height <= height) {
2090            info = metadata.normalBuffer;
2091            return true;
2092        }
2093
2094        if (metadata.scalingBuffer.khandle != 0 && metadata.scalingBuffer.width <= width && metadata.scalingBuffer.height <= height) {
2095            info = metadata.scalingBuffer;
2096            return true;
2097        }
2098    } else {
2099        if (metadata.rotationBuffer.khandle != 0 && metadata.rotationBuffer.width <= width && metadata.rotationBuffer.height <= height) {
2100            info = metadata.rotationBuffer;
2101            return true;
2102        }
2103    }
2104
2105    return false;
2106}
2107
2108void VirtualDevice::setMaxDecodeResolution(uint32_t width, uint32_t height)
2109{
2110    if (mDecWidth == width && mDecHeight == height)
2111        return;
2112
2113    int sessionID = mHwc.getDisplayAnalyzer()->getFirstVideoInstanceSessionID();
2114    if (sessionID < 0) {
2115        ETRACE("Session id is less than 0");
2116        return;
2117    }
2118
2119    MultiDisplayObserver* mds = mHwc.getMultiDisplayObserver();
2120    status_t ret = mds->setDecoderOutputResolution(sessionID, width, height, 0, 0, width, height);
2121    if (ret != NO_ERROR) {
2122        ETRACE("Failed to set scaling to %ux%u: %x", width, height, ret);
2123        return;
2124    }
2125
2126    mDecWidth = width;
2127    mDecHeight = height;
2128    ITRACE("Set scaling to %ux%u",mDecWidth, mDecHeight);
2129}
2130
2131bool VirtualDevice::vsyncControl(bool enabled)
2132{
2133    RETURN_FALSE_IF_NOT_INIT();
2134    return mVsyncObserver->control(enabled);
2135}
2136
2137bool VirtualDevice::blank(bool blank)
2138{
2139    RETURN_FALSE_IF_NOT_INIT();
2140    return true;
2141}
2142
2143bool VirtualDevice::getDisplaySize(int *width, int *height)
2144{
2145    RETURN_FALSE_IF_NOT_INIT();
2146    if (!width || !height) {
2147        ETRACE("invalid parameters");
2148        return false;
2149    }
2150
2151    // TODO: make this platform specifc
2152    *width = 1280;
2153    *height = 720;
2154    return true;
2155}
2156
2157bool VirtualDevice::getDisplayConfigs(uint32_t *configs,
2158                                         size_t *numConfigs)
2159{
2160    RETURN_FALSE_IF_NOT_INIT();
2161    if (!configs || !numConfigs) {
2162        ETRACE("invalid parameters");
2163        return false;
2164    }
2165
2166    *configs = 0;
2167    *numConfigs = 1;
2168
2169    return true;
2170}
2171
2172bool VirtualDevice::getDisplayAttributes(uint32_t configs,
2173                                            const uint32_t *attributes,
2174                                            int32_t *values)
2175{
2176    RETURN_FALSE_IF_NOT_INIT();
2177
2178    if (!attributes || !values) {
2179        ETRACE("invalid parameters");
2180        return false;
2181    }
2182
2183    int i = 0;
2184    while (attributes[i] != HWC_DISPLAY_NO_ATTRIBUTE) {
2185        switch (attributes[i]) {
2186        case HWC_DISPLAY_VSYNC_PERIOD:
2187            values[i] = 1e9 / 60;
2188            break;
2189        case HWC_DISPLAY_WIDTH:
2190            values[i] = 1280;
2191            break;
2192        case HWC_DISPLAY_HEIGHT:
2193            values[i] = 720;
2194            break;
2195        case HWC_DISPLAY_DPI_X:
2196            values[i] = 0;
2197            break;
2198        case HWC_DISPLAY_DPI_Y:
2199            values[i] = 0;
2200            break;
2201        default:
2202            ETRACE("unknown attribute %d", attributes[i]);
2203            break;
2204        }
2205        i++;
2206    }
2207
2208    return true;
2209}
2210
2211bool VirtualDevice::compositionComplete()
2212{
2213    RETURN_FALSE_IF_NOT_INIT();
2214    return true;
2215}
2216
2217bool VirtualDevice::initialize()
2218{
2219    mRgbLayer = -1;
2220    mYuvLayer = -1;
2221#ifdef INTEL_WIDI
2222    // Add initialization codes here. If init fails, invoke DEINIT_AND_RETURN_FALSE();
2223    mNextConfig.typeChangeListener = NULL;
2224    mNextConfig.policy.scaledWidth = 0;
2225    mNextConfig.policy.scaledHeight = 0;
2226    mNextConfig.policy.xdpi = 96;
2227    mNextConfig.policy.ydpi = 96;
2228    mNextConfig.policy.refresh = 60;
2229    mNextConfig.extendedModeEnabled = false;
2230    mNextConfig.forceNotifyFrameType = false;
2231    mNextConfig.forceNotifyBufferInfo = false;
2232    mCurrentConfig = mNextConfig;
2233
2234    memset(&mLastInputFrameInfo, 0, sizeof(mLastInputFrameInfo));
2235    memset(&mLastOutputFrameInfo, 0, sizeof(mLastOutputFrameInfo));
2236#endif
2237    mPayloadManager = mHwc.getPlatFactory()->createVideoPayloadManager();
2238
2239    if (!mPayloadManager) {
2240        DEINIT_AND_RETURN_FALSE("Failed to create payload manager");
2241    }
2242
2243    mVsyncObserver = new SoftVsyncObserver(*this);
2244    if (!mVsyncObserver || !mVsyncObserver->initialize()) {
2245        DEINIT_AND_RETURN_FALSE("Failed to create Soft Vsync Observer");
2246    }
2247
2248    mSyncTimelineFd = sw_sync_timeline_create();
2249    mNextSyncPoint = 1;
2250    mExpectAcquireFences = false;
2251
2252    mThread = new WidiBlitThread(this);
2253    mThread->run("WidiBlit", PRIORITY_URGENT_DISPLAY);
2254
2255#ifdef INTEL_WIDI
2256    // Publish frame server service with service manager
2257    status_t ret = defaultServiceManager()->addService(String16("hwc.widi"), this);
2258    if (ret == NO_ERROR) {
2259        ProcessState::self()->startThreadPool();
2260        mInitialized = true;
2261    } else {
2262        ETRACE("Could not register hwc.widi with service manager, error = %d", ret);
2263        deinitialize();
2264    }
2265#else
2266    mInitialized = true;
2267#endif
2268    mVspEnabled = false;
2269    mVspInUse = false;
2270    mVspWidth = 0;
2271    mVspHeight = 0;
2272    va_dpy = NULL;
2273    va_config = 0;
2274    va_context = 0;
2275    va_blank_yuv_in = 0;
2276    va_blank_rgb_in = 0;
2277    mVspUpscale = false;
2278    mDebugVspClear = false;
2279    mDebugVspDump = false;
2280    mDebugCounter = 0;
2281
2282    ITRACE("Init done.");
2283
2284    return mInitialized;
2285}
2286
2287bool VirtualDevice::isConnected() const
2288{
2289    return true;
2290}
2291
2292const char* VirtualDevice::getName() const
2293{
2294    return "Virtual";
2295}
2296
2297int VirtualDevice::getType() const
2298{
2299    return DEVICE_VIRTUAL;
2300}
2301
2302void VirtualDevice::onVsync(int64_t timestamp)
2303{
2304    mHwc.vsync(DEVICE_VIRTUAL, timestamp);
2305}
2306
2307void VirtualDevice::dump(Dump& d)
2308{
2309}
2310
2311void VirtualDevice::deinitialize()
2312{
2313    VAStatus va_status;
2314
2315    if (mPayloadManager) {
2316        delete mPayloadManager;
2317        mPayloadManager = NULL;
2318    }
2319    DEINIT_AND_DELETE_OBJ(mVsyncObserver);
2320    mInitialized = false;
2321}
2322
2323bool VirtualDevice::setPowerMode(int /*mode*/)
2324{
2325    return true;
2326}
2327
2328int VirtualDevice::getActiveConfig()
2329{
2330    return 0;
2331}
2332
2333bool VirtualDevice::setActiveConfig(int /*index*/)
2334{
2335    return false;
2336}
2337
2338} // namespace intel
2339} // namespace android
2340