1/*
2// Copyright (c) 2014 Intel Corporation 
3//
4// Licensed under the Apache License, Version 2.0 (the "License");
5// you may not use this file except in compliance with the License.
6// You may obtain a copy of the License at
7//
8//      http://www.apache.org/licenses/LICENSE-2.0
9//
10// Unless required by applicable law or agreed to in writing, software
11// distributed under the License is distributed on an "AS IS" BASIS,
12// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13// See the License for the specific language governing permissions and
14// limitations under the License.
15*/
16#include <HwcTrace.h>
17#include <Hwcomposer.h>
18#include <DisplayPlaneManager.h>
19#include <DisplayQuery.h>
20#include <VirtualDevice.h>
21#include <SoftVsyncObserver.h>
22
23#include <binder/IServiceManager.h>
24#include <binder/ProcessState.h>
25
26#include <hal_public.h>
27#include <libsync/sw_sync.h>
28#include <sync/sync.h>
29
30#include <va/va_android.h>
31#include <va/va_vpp.h>
32#include <va/va_tpi.h>
33
34#include <cutils/properties.h>
35
36#include <sys/types.h>
37#include <sys/stat.h>
38#include <fcntl.h>
39
40#define NUM_CSC_BUFFERS 6
41#define NUM_SCALING_BUFFERS 3
42
43#define QCIF_WIDTH 176
44#define QCIF_HEIGHT 144
45
46namespace android {
47namespace intel {
48
49static inline uint32_t align_width(uint32_t val)
50{
51    return align_to(val, 64);
52}
53
54static inline uint32_t align_height(uint32_t val)
55{
56    return align_to(val, 16);
57}
58
59static void my_close_fence(const char* func, const char* fenceName, int& fenceFd)
60{
61    if (fenceFd != -1) {
62        ALOGV("%s: closing fence %s (fd=%d)", func, fenceName, fenceFd);
63        int err = close(fenceFd);
64        if (err < 0) {
65            ALOGE("%s: fence %s close error %d: %s", func, fenceName, err, strerror(errno));
66        }
67        fenceFd = -1;
68    }
69}
70
71static void my_sync_wait_and_close(const char* func, const char* fenceName, int& fenceFd)
72{
73    if (fenceFd != -1) {
74        ALOGV("%s: waiting on fence %s (fd=%d)", func, fenceName, fenceFd);
75        int err = sync_wait(fenceFd, 300);
76        if (err < 0) {
77            ALOGE("%s: fence %s sync_wait error %d: %s", func, fenceName, err, strerror(errno));
78        }
79        my_close_fence(func, fenceName, fenceFd);
80    }
81}
82
83static void my_timeline_inc(const char* func, const char* timelineName, int& syncTimelineFd)
84{
85    if (syncTimelineFd != -1) {
86        ALOGV("%s: incrementing timeline %s (fd=%d)", func, timelineName, syncTimelineFd);
87        int err = sw_sync_timeline_inc(syncTimelineFd, 1);
88        if (err < 0)
89            ALOGE("%s sync timeline %s increment error %d: %s", func, timelineName, errno, strerror(errno));
90        syncTimelineFd = -1;
91    }
92}
93
94#define CLOSE_FENCE(fenceName)          my_close_fence(__func__, #fenceName, fenceName)
95#define SYNC_WAIT_AND_CLOSE(fenceName)  my_sync_wait_and_close(__func__, #fenceName, fenceName)
96#define TIMELINE_INC(timelineName)      my_timeline_inc(__func__, #timelineName, timelineName)
97
98class MappedSurface {
99public:
100    MappedSurface(VADisplay dpy, VASurfaceID surf)
101        : va_dpy(dpy),
102          ptr(NULL)
103    {
104        VAStatus va_status;
105        va_status = vaDeriveImage(va_dpy, surf, &image);
106        if (va_status != VA_STATUS_SUCCESS) {
107            ETRACE("vaDeriveImage returns %08x", va_status);
108            return;
109        }
110        va_status = vaMapBuffer(va_dpy, image.buf, (void**)&ptr);
111        if (va_status != VA_STATUS_SUCCESS) {
112            ETRACE("vaMapBuffer returns %08x", va_status);
113            vaDestroyImage(va_dpy, image.image_id);
114            return;
115        }
116    }
117    ~MappedSurface() {
118        if (ptr == NULL)
119            return;
120
121        VAStatus va_status;
122
123        va_status = vaUnmapBuffer(va_dpy, image.buf);
124        if (va_status != VA_STATUS_SUCCESS) ETRACE("vaUnmapBuffer returns %08x", va_status);
125
126        va_status = vaDestroyImage(va_dpy, image.image_id);
127        if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroyImage returns %08x", va_status);
128    }
129    bool valid() { return ptr != NULL; }
130    uint8_t* getPtr() { return ptr; }
131private:
132    VADisplay va_dpy;
133    VAImage image;
134    uint8_t* ptr;
135};
136
137class VirtualDevice::VAMappedHandle {
138public:
139    VAMappedHandle(VADisplay dpy, buffer_handle_t handle, uint32_t stride, uint32_t height, unsigned int pixel_format)
140        : va_dpy(dpy),
141          surface(0)
142    {
143        VTRACE("Map gralloc %p size=%ux%u", handle, stride, height);
144
145        unsigned int format;
146        unsigned long buffer = reinterpret_cast<unsigned long>(handle);
147        VASurfaceAttribExternalBuffers buf;
148        buf.pixel_format = pixel_format;
149        buf.width = stride;
150        buf.height = height;
151        buf.buffers = &buffer;
152        buf.num_buffers = 1;
153        buf.flags = 0;
154        buf.private_data = NULL;
155
156        if (pixel_format == VA_FOURCC_RGBA || pixel_format == VA_FOURCC_BGRA) {
157            format = VA_RT_FORMAT_RGB32;
158            buf.data_size = stride * height * 4;
159            buf.num_planes = 3;
160            buf.pitches[0] = stride;
161            buf.pitches[1] = stride;
162            buf.pitches[2] = stride;
163            buf.pitches[3] = 0;
164            buf.offsets[0] = 0;
165            buf.offsets[1] = 0;
166            buf.offsets[2] = 0;
167            buf.offsets[3] = 0;
168        }
169        else {
170            format = VA_RT_FORMAT_YUV420;
171            buf.data_size = stride * height * 3/2;
172            buf.num_planes = 2;
173            buf.pitches[0] = stride;
174            buf.pitches[1] = stride;
175            buf.pitches[2] = 0;
176            buf.pitches[3] = 0;
177            buf.offsets[0] = 0;
178            buf.offsets[1] = stride * height;
179        }
180
181        VASurfaceAttrib attrib_list[3];
182        attrib_list[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
183        attrib_list[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
184        attrib_list[0].value.type = VAGenericValueTypeInteger;
185        attrib_list[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_ANDROID_GRALLOC;
186        attrib_list[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
187        attrib_list[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
188        attrib_list[1].value.type = VAGenericValueTypePointer;
189        attrib_list[1].value.value.p = (void *)&buf;
190        attrib_list[2].type = (VASurfaceAttribType)VASurfaceAttribPixelFormat;
191        attrib_list[2].flags = VA_SURFACE_ATTRIB_SETTABLE;
192        attrib_list[2].value.type = VAGenericValueTypeInteger;
193        attrib_list[2].value.value.i = pixel_format;
194
195        VAStatus va_status;
196        va_status = vaCreateSurfaces(va_dpy,
197                    format,
198                    stride,
199                    height,
200                    &surface,
201                    1,
202                    attrib_list,
203                    3);
204        if (va_status != VA_STATUS_SUCCESS) {
205            ETRACE("vaCreateSurfaces returns %08x, surface = %x", va_status, surface);
206            surface = 0;
207        }
208    }
209    VAMappedHandle(VADisplay dpy, buffer_handle_t khandle, uint32_t stride, uint32_t height, bool tiled)
210        : va_dpy(dpy),
211          surface(0)
212    {
213        int format;
214        VASurfaceAttributeTPI attribTpi;
215        memset(&attribTpi, 0, sizeof(attribTpi));
216        VTRACE("Map khandle 0x%x size=%ux%u", khandle, stride, height);
217        attribTpi.type = VAExternalMemoryKernelDRMBufffer;
218        attribTpi.width = stride;
219        attribTpi.height = height;
220        attribTpi.size = stride*height*3/2;
221        attribTpi.pixel_format = VA_FOURCC_NV12;
222        attribTpi.tiling = tiled;
223        attribTpi.luma_stride = stride;
224        attribTpi.chroma_u_stride = stride;
225        attribTpi.chroma_v_stride = stride;
226        attribTpi.luma_offset = 0;
227        attribTpi.chroma_u_offset = stride*height;
228        attribTpi.chroma_v_offset = stride*height+1;
229        format = VA_RT_FORMAT_YUV420;
230        attribTpi.count = 1;
231        attribTpi.buffers = (long unsigned int*) &khandle;
232
233        VAStatus va_status;
234        va_status = vaCreateSurfacesWithAttribute(va_dpy,
235                    stride,
236                    height,
237                    format,
238                    1,
239                    &surface,
240                    &attribTpi);
241        if (va_status != VA_STATUS_SUCCESS) {
242            ETRACE("vaCreateSurfacesWithAttribute returns %08x", va_status);
243            surface = 0;
244        }
245    }
246    ~VAMappedHandle()
247    {
248        if (surface == 0)
249            return;
250        VAStatus va_status;
251        va_status = vaDestroySurfaces(va_dpy, &surface, 1);
252        if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroySurfaces returns %08x", va_status);
253    }
254private:
255    VADisplay va_dpy;
256public:
257    VASurfaceID surface;
258};
259
260// refcounted version of VAMappedHandle, to make caching easier
261class VirtualDevice::VAMappedHandleObject : public RefBase, public VAMappedHandle {
262public:
263    VAMappedHandleObject(VADisplay dpy, buffer_handle_t handle, uint32_t stride, uint32_t height, unsigned int pixel_format)
264        : VAMappedHandle(dpy, handle, stride, height, pixel_format) { }
265    VAMappedHandleObject(VADisplay dpy, buffer_handle_t khandle, uint32_t stride, uint32_t height, bool tiled)
266        : VAMappedHandle(dpy, khandle, stride, height, tiled) { }
267protected:
268    ~VAMappedHandleObject() {}
269};
270
271VirtualDevice::CachedBuffer::CachedBuffer(BufferManager *mgr, buffer_handle_t handle)
272    : manager(mgr),
273      mapper(NULL),
274      vaMappedHandle(NULL),
275      cachedKhandle(0)
276{
277    DataBuffer *buffer = manager->lockDataBuffer((buffer_handle_t)handle);
278    mapper = manager->map(*buffer);
279    manager->unlockDataBuffer(buffer);
280}
281
282VirtualDevice::CachedBuffer::~CachedBuffer()
283{
284    if (vaMappedHandle != NULL)
285        delete vaMappedHandle;
286    manager->unmap(mapper);
287}
288
289VirtualDevice::HeldDecoderBuffer::HeldDecoderBuffer(const sp<VirtualDevice>& vd, const android::sp<CachedBuffer>& cachedBuffer)
290    : vd(vd),
291      cachedBuffer(cachedBuffer)
292{
293    if (!vd->mPayloadManager->setRenderStatus(cachedBuffer->mapper, true)) {
294        ETRACE("Failed to set render status");
295    }
296}
297
298VirtualDevice::HeldDecoderBuffer::~HeldDecoderBuffer()
299{
300    if (!vd->mPayloadManager->setRenderStatus(cachedBuffer->mapper, false)) {
301        ETRACE("Failed to set render status");
302    }
303}
304
305struct VirtualDevice::Task : public RefBase {
306    virtual void run(VirtualDevice& vd) = 0;
307    virtual ~Task() {}
308};
309
310struct VirtualDevice::RenderTask : public VirtualDevice::Task {
311    RenderTask() : successful(false) { }
312    virtual void run(VirtualDevice& vd) = 0;
313    bool successful;
314};
315
316struct VirtualDevice::ComposeTask : public VirtualDevice::RenderTask {
317    ComposeTask()
318        : videoKhandle(0),
319          rgbHandle(NULL),
320          mappedRgbIn(NULL),
321          outputHandle(NULL),
322          yuvAcquireFenceFd(-1),
323          rgbAcquireFenceFd(-1),
324          outbufAcquireFenceFd(-1),
325          syncTimelineFd(-1) { }
326
327    virtual ~ComposeTask() {
328        // If queueCompose() creates this object and sets up fences,
329        // but aborts before enqueuing the task, or if the task runs
330        // but errors out, make sure our acquire fences get closed
331        // and any release fences get signaled.
332        CLOSE_FENCE(yuvAcquireFenceFd);
333        CLOSE_FENCE(rgbAcquireFenceFd);
334        CLOSE_FENCE(outbufAcquireFenceFd);
335        TIMELINE_INC(syncTimelineFd);
336    }
337
338    virtual void run(VirtualDevice& vd) {
339        bool dump = false;
340        if (vd.mDebugVspDump && ++vd.mDebugCounter > 200) {
341            dump = true;
342            vd.mDebugCounter = 0;
343        }
344
345        SYNC_WAIT_AND_CLOSE(yuvAcquireFenceFd);
346
347        VASurfaceID videoInSurface;
348        if (videoKhandle == 0) {
349            videoInSurface = vd.va_blank_yuv_in;
350        } else {
351            if (videoCachedBuffer->cachedKhandle != videoKhandle || videoCachedBuffer->vaMappedHandle == NULL) {
352                if (videoCachedBuffer->vaMappedHandle != NULL)
353                    delete videoCachedBuffer->vaMappedHandle;
354                videoCachedBuffer->vaMappedHandle = new VAMappedHandle(vd.va_dpy, videoKhandle, videoStride, videoBufHeight, videoTiled);
355                videoCachedBuffer->cachedKhandle = videoKhandle;
356            }
357            videoInSurface = videoCachedBuffer->vaMappedHandle->surface;
358        }
359
360        if (videoInSurface == 0) {
361            ETRACE("Couldn't map video");
362            return;
363        }
364        SYNC_WAIT_AND_CLOSE(rgbAcquireFenceFd);
365        SYNC_WAIT_AND_CLOSE(outbufAcquireFenceFd);
366
367        VAMappedHandle mappedVideoOut(vd.va_dpy, outputHandle, align_width(outWidth), align_height(outHeight), (unsigned int)VA_FOURCC_NV12);
368        if (mappedVideoOut.surface == 0) {
369            ETRACE("Unable to map outbuf");
370            return;
371        }
372
373        if (dump)
374            dumpSurface(vd.va_dpy, "/data/misc/vsp_in.yuv", videoInSurface, videoStride*videoBufHeight*3/2);
375
376        if (mappedRgbIn != NULL) {
377            if (dump)
378                dumpSurface(vd.va_dpy, "/data/misc/vsp_in.rgb", mappedRgbIn->surface, align_width(outWidth)*align_height(outHeight)*4);
379            vd.vspCompose(videoInSurface, mappedRgbIn->surface, mappedVideoOut.surface, &surface_region, &output_region);
380        }
381        else if (rgbHandle != NULL) {
382            VAMappedHandle localMappedRgbIn(vd.va_dpy, rgbHandle, align_width(outWidth), align_height(outHeight), (unsigned int)VA_FOURCC_BGRA);
383            vd.vspCompose(videoInSurface, localMappedRgbIn.surface, mappedVideoOut.surface, &surface_region, &output_region);
384        }
385        else {
386            // No RGBA, so compose with 100% transparent RGBA frame.
387            if (dump)
388                dumpSurface(vd.va_dpy, "/data/misc/vsp_in.rgb", vd.va_blank_rgb_in, align_width(outWidth)*align_height(outHeight)*4);
389            vd.vspCompose(videoInSurface, vd.va_blank_rgb_in, mappedVideoOut.surface, &surface_region, &output_region);
390        }
391        if (dump)
392            dumpSurface(vd.va_dpy, "/data/misc/vsp_out.yuv", mappedVideoOut.surface, align_width(outWidth)*align_height(outHeight)*3/2);
393        TIMELINE_INC(syncTimelineFd);
394        successful = true;
395    }
396    void dumpSurface(VADisplay va_dpy, const char* filename, VASurfaceID surf, int size) {
397        MappedSurface dumpSurface(va_dpy, surf);
398        if (dumpSurface.valid()) {
399            int fd = open(filename, O_CREAT | O_TRUNC | O_WRONLY, S_IRUSR | S_IWUSR | S_IRGRP | S_IWGRP);
400            if (fd > 0) {
401                write(fd, dumpSurface.getPtr(), size);
402                close(fd);
403                ALOGI("Output dumped");
404            }
405            else
406                ALOGE("Error %d opening output file: %s", errno, strerror(errno));
407        }
408        else
409            ALOGE("Failed to map output for dump");
410    }
411    buffer_handle_t videoKhandle;
412    uint32_t videoStride;
413    uint32_t videoBufHeight;
414    bool videoTiled;
415    buffer_handle_t rgbHandle;
416    sp<RefBase> heldRgbHandle;
417    sp<VAMappedHandleObject> mappedRgbIn;
418    buffer_handle_t outputHandle;
419    VARectangle surface_region;
420    VARectangle output_region;
421    uint32_t outWidth;
422    uint32_t outHeight;
423    sp<CachedBuffer> videoCachedBuffer;
424    sp<RefBase> heldVideoBuffer;
425    int yuvAcquireFenceFd;
426    int rgbAcquireFenceFd;
427    int outbufAcquireFenceFd;
428    int syncTimelineFd;
429};
430
431struct VirtualDevice::EnableVspTask : public VirtualDevice::Task {
432    virtual void run(VirtualDevice& vd) {
433        vd.vspEnable(width, height);
434    }
435    uint32_t width;
436    uint32_t height;
437};
438
439struct VirtualDevice::DisableVspTask : public VirtualDevice::Task {
440    virtual void run(VirtualDevice& vd) {
441        vd.vspDisable();
442    }
443};
444
445struct VirtualDevice::BlitTask : public VirtualDevice::RenderTask {
446    BlitTask()
447        : srcAcquireFenceFd(-1),
448          destAcquireFenceFd(-1),
449          syncTimelineFd(-1) { }
450
451    virtual ~BlitTask()
452    {
453        // If queueColorConvert() creates this object and sets up fences,
454        // but aborts before enqueuing the task, or if the task runs
455        // but errors out, make sure our acquire fences get closed
456        // and any release fences get signaled.
457        CLOSE_FENCE(srcAcquireFenceFd);
458        CLOSE_FENCE(destAcquireFenceFd);
459        TIMELINE_INC(syncTimelineFd);
460    }
461
462    virtual void run(VirtualDevice& vd) {
463        SYNC_WAIT_AND_CLOSE(srcAcquireFenceFd);
464        SYNC_WAIT_AND_CLOSE(destAcquireFenceFd);
465        BufferManager* mgr = vd.mHwc.getBufferManager();
466        if (!(mgr->blit(srcHandle, destHandle, destRect, false, false))) {
467            ETRACE("color space conversion from RGB to NV12 failed");
468        }
469        else
470            successful = true;
471        TIMELINE_INC(syncTimelineFd);
472    }
473    buffer_handle_t srcHandle;
474    buffer_handle_t destHandle;
475    int srcAcquireFenceFd;
476    int destAcquireFenceFd;
477    int syncTimelineFd;
478    crop_t destRect;
479};
480
481struct VirtualDevice::FrameTypeChangedTask : public VirtualDevice::Task {
482    virtual void run(VirtualDevice& vd) {
483#ifdef INTEL_WIDI
484        typeChangeListener->frameTypeChanged(inputFrameInfo);
485        ITRACE("Notify frameTypeChanged: %dx%d in %dx%d @ %d fps",
486            inputFrameInfo.contentWidth, inputFrameInfo.contentHeight,
487            inputFrameInfo.bufferWidth, inputFrameInfo.bufferHeight,
488            inputFrameInfo.contentFrameRateN);
489#endif
490    }
491#ifdef INTEL_WIDI
492    sp<IFrameTypeChangeListener> typeChangeListener;
493    FrameInfo inputFrameInfo;
494#endif
495};
496
497struct VirtualDevice::BufferInfoChangedTask : public VirtualDevice::Task {
498    virtual void run(VirtualDevice& vd) {
499#ifdef INTEL_WIDI
500        typeChangeListener->bufferInfoChanged(outputFrameInfo);
501        ITRACE("Notify bufferInfoChanged: %dx%d in %dx%d @ %d fps",
502            outputFrameInfo.contentWidth, outputFrameInfo.contentHeight,
503            outputFrameInfo.bufferWidth, outputFrameInfo.bufferHeight,
504            outputFrameInfo.contentFrameRateN);
505#endif
506    }
507#ifdef INTEL_WIDI
508    sp<IFrameTypeChangeListener> typeChangeListener;
509    FrameInfo outputFrameInfo;
510#endif
511};
512
513struct VirtualDevice::OnFrameReadyTask : public VirtualDevice::Task {
514    virtual void run(VirtualDevice& vd) {
515        if (renderTask != NULL && !renderTask->successful)
516            return;
517
518        {
519            Mutex::Autolock _l(vd.mHeldBuffersLock);
520            //Add the heldbuffer to the vector before calling onFrameReady, so that the buffer will be removed
521            //from the vector properly even if the notifyBufferReturned call acquires mHeldBuffersLock first.
522            vd.mHeldBuffers.add(handle, heldBuffer);
523        }
524#ifdef INTEL_WIDI
525        // FIXME: we could remove this casting once onFrameReady receives
526        // a buffer_handle_t handle
527        status_t result = frameListener->onFrameReady((uint32_t)handle, handleType, renderTimestamp, mediaTimestamp);
528        if (result != OK) {
529            Mutex::Autolock _l(vd.mHeldBuffersLock);
530            vd.mHeldBuffers.removeItem(handle);
531        }
532#else
533        Mutex::Autolock _l(vd.mHeldBuffersLock);
534        vd.mHeldBuffers.removeItem(handle);
535#endif
536    }
537    sp<RenderTask> renderTask;
538    sp<RefBase> heldBuffer;
539    buffer_handle_t handle;
540#ifdef INTEL_WIDI
541    sp<IFrameListener> frameListener;
542    HWCBufferHandleType handleType;
543#endif
544    int64_t renderTimestamp;
545    int64_t mediaTimestamp;
546};
547
548struct VirtualDevice::BufferList::HeldBuffer : public RefBase {
549    HeldBuffer(BufferList& list, buffer_handle_t handle, uint32_t w, uint32_t h)
550        : mList(list),
551          mHandle(handle),
552          mWidth(w),
553          mHeight(h) { }
554    virtual ~HeldBuffer()
555    {
556        Mutex::Autolock _l(mList.mVd.mTaskLock);
557        if (mWidth == mList.mWidth && mHeight == mList.mHeight) {
558            VTRACE("Returning %s buffer %p (%ux%u) to list", mList.mName, mHandle, mWidth, mHeight);
559            mList.mAvailableBuffers.push_back(mHandle);
560        } else {
561            VTRACE("Deleting %s buffer %p (%ux%u)", mList.mName, mHandle, mWidth, mHeight);
562            BufferManager* mgr = mList.mVd.mHwc.getBufferManager();
563            mgr->freeGrallocBuffer((mHandle));
564            if (mList.mBuffersToCreate < mList.mLimit)
565                mList.mBuffersToCreate++;
566        }
567    }
568
569    BufferList& mList;
570    buffer_handle_t mHandle;
571    uint32_t mWidth;
572    uint32_t mHeight;
573};
574
575VirtualDevice::BufferList::BufferList(VirtualDevice& vd, const char* name,
576                                      uint32_t limit, uint32_t format, uint32_t usage)
577    : mVd(vd),
578      mName(name),
579      mLimit(limit),
580      mFormat(format),
581      mUsage(usage),
582      mBuffersToCreate(0),
583      mWidth(0),
584      mHeight(0)
585{
586}
587
588buffer_handle_t VirtualDevice::BufferList::get(uint32_t width, uint32_t height, sp<RefBase>* heldBuffer)
589{
590    width = align_width(width);
591    height = align_height(height);
592    if (mWidth != width || mHeight != height) {
593        ITRACE("%s buffers changing from %dx%d to %dx%d",
594                mName, mWidth, mHeight, width, height);
595        clear();
596        mWidth = width;
597        mHeight = height;
598        mBuffersToCreate = mLimit;
599    }
600
601    buffer_handle_t handle;
602    if (mAvailableBuffers.empty()) {
603        if (mBuffersToCreate <= 0)
604            return NULL;
605        BufferManager* mgr = mVd.mHwc.getBufferManager();
606        handle = reinterpret_cast<buffer_handle_t>(
607            mgr->allocGrallocBuffer(width, height, mFormat, mUsage));
608        if (handle == NULL){
609            ETRACE("failed to allocate %s buffer", mName);
610            return NULL;
611        }
612        mBuffersToCreate--;
613    }
614    else {
615        handle = *mAvailableBuffers.begin();
616        mAvailableBuffers.erase(mAvailableBuffers.begin());
617    }
618    *heldBuffer = new HeldBuffer(*this, handle, width, height);
619    return handle;
620}
621
622void VirtualDevice::BufferList::clear()
623{
624    if (mWidth != 0 || mHeight != 0)
625        ITRACE("Releasing %s buffers (%ux%u)", mName, mWidth, mHeight);
626    if (!mAvailableBuffers.empty()) {
627        // iterate the list and call freeGraphicBuffer
628        for (List<buffer_handle_t>::iterator i = mAvailableBuffers.begin(); i != mAvailableBuffers.end(); ++i) {
629            VTRACE("Deleting the gralloc buffer associated with handle (%p)", (*i));
630            mVd.mHwc.getBufferManager()->freeGrallocBuffer((*i));
631        }
632        mAvailableBuffers.clear();
633    }
634    mWidth = 0;
635    mHeight = 0;
636}
637
638VirtualDevice::VirtualDevice(Hwcomposer& hwc)
639    : mProtectedMode(false),
640      mCscBuffers(*this, "CSC",
641                  NUM_CSC_BUFFERS, DisplayQuery::queryNV12Format(),
642                  GRALLOC_USAGE_HW_VIDEO_ENCODER | GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_PRIVATE_1),
643      mRgbUpscaleBuffers(*this, "RGB upscale",
644                         NUM_SCALING_BUFFERS, HAL_PIXEL_FORMAT_BGRA_8888,
645                         GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_HW_RENDER),
646      mInitialized(false),
647      mHwc(hwc),
648      mPayloadManager(NULL),
649      mVsyncObserver(NULL),
650      mOrigContentWidth(0),
651      mOrigContentHeight(0),
652      mFirstVideoFrame(true),
653      mLastConnectionStatus(false),
654      mCachedBufferCapcity(16),
655      mDecWidth(0),
656      mDecHeight(0),
657      mFpsDivider(1)
658{
659    CTRACE();
660#ifdef INTEL_WIDI
661    mNextConfig.frameServerActive = false;
662#endif
663}
664
665VirtualDevice::~VirtualDevice()
666{
667    WARN_IF_NOT_DEINIT();
668}
669
670sp<VirtualDevice::CachedBuffer> VirtualDevice::getMappedBuffer(buffer_handle_t handle)
671{
672    ssize_t index = mMappedBufferCache.indexOfKey(handle);
673    sp<CachedBuffer> cachedBuffer;
674    if (index == NAME_NOT_FOUND) {
675        if (mMappedBufferCache.size() > mCachedBufferCapcity)
676            mMappedBufferCache.clear();
677
678        cachedBuffer = new CachedBuffer(mHwc.getBufferManager(), handle);
679        mMappedBufferCache.add(handle, cachedBuffer);
680    } else {
681        cachedBuffer = mMappedBufferCache[index];
682    }
683
684    return cachedBuffer;
685}
686
687bool VirtualDevice::threadLoop()
688{
689    sp<Task> task;
690    {
691        Mutex::Autolock _l(mTaskLock);
692        while (mTasks.empty()) {
693            mRequestQueued.wait(mTaskLock);
694        }
695        task = *mTasks.begin();
696        mTasks.erase(mTasks.begin());
697    }
698    if (task != NULL) {
699        task->run(*this);
700        task = NULL;
701    }
702    mRequestDequeued.signal();
703
704    return true;
705}
706#ifdef INTEL_WIDI
707status_t VirtualDevice::start(sp<IFrameTypeChangeListener> typeChangeListener)
708{
709    ITRACE();
710    Mutex::Autolock _l(mConfigLock);
711    mNextConfig.typeChangeListener = typeChangeListener;
712    mNextConfig.frameListener = NULL;
713    mNextConfig.policy.scaledWidth = 0;
714    mNextConfig.policy.scaledHeight = 0;
715    mNextConfig.policy.xdpi = 96;
716    mNextConfig.policy.ydpi = 96;
717    mNextConfig.policy.refresh = 60;
718    mNextConfig.extendedModeEnabled =
719        Hwcomposer::getInstance().getDisplayAnalyzer()->isVideoExtModeEnabled();
720    mVideoFramerate = 0;
721    mFirstVideoFrame = true;
722    mNextConfig.frameServerActive = true;
723    mNextConfig.forceNotifyFrameType = true;
724    mNextConfig.forceNotifyBufferInfo = true;
725
726    return NO_ERROR;
727}
728
729status_t VirtualDevice::stop(bool isConnected)
730{
731    ITRACE();
732    Mutex::Autolock _l(mConfigLock);
733    mNextConfig.typeChangeListener = NULL;
734    mNextConfig.frameListener = NULL;
735    mNextConfig.policy.scaledWidth = 0;
736    mNextConfig.policy.scaledHeight = 0;
737    mNextConfig.policy.xdpi = 96;
738    mNextConfig.policy.ydpi = 96;
739    mNextConfig.policy.refresh = 60;
740    mNextConfig.frameServerActive = false;
741    mNextConfig.extendedModeEnabled = false;
742    mNextConfig.forceNotifyFrameType = false;
743    mNextConfig.forceNotifyBufferInfo = false;
744    {
745        Mutex::Autolock _l(mTaskLock);
746        mCscBuffers.clear();
747    }
748    return NO_ERROR;
749}
750#endif
751
752bool VirtualDevice::isFrameServerActive() const
753{
754#ifdef INTEL_WIDI
755    return  mCurrentConfig.frameServerActive;
756#endif
757    return false;
758}
759
760#ifdef INTEL_WIDI
761/* TODO: 64-bit - this handle of size 32-bit is a problem for 64-bit */
762status_t VirtualDevice::notifyBufferReturned(int handle)
763{
764    CTRACE();
765    Mutex::Autolock _l(mHeldBuffersLock);
766    ssize_t index = mHeldBuffers.indexOfKey((buffer_handle_t)handle);
767    if (index == NAME_NOT_FOUND) {
768        ETRACE("Couldn't find returned khandle %p", handle);
769    } else {
770        VTRACE("Removing heldBuffer associated with handle (%p)", handle);
771        mHeldBuffers.removeItemsAt(index, 1);
772    }
773    return NO_ERROR;
774}
775
776status_t VirtualDevice::setResolution(const FrameProcessingPolicy& policy, sp<IFrameListener> listener)
777{
778    ITRACE();
779    Mutex::Autolock _l(mConfigLock);
780    mNextConfig.frameListener = listener;
781    mNextConfig.policy = policy;
782    return NO_ERROR;
783}
784#endif
785static bool canUseDirectly(const hwc_display_contents_1_t *display, size_t n)
786{
787    const hwc_layer_1_t& fbTarget = display->hwLayers[display->numHwLayers-1];
788    const hwc_layer_1_t& layer = display->hwLayers[n];
789    const IMG_native_handle_t* nativeHandle = reinterpret_cast<const IMG_native_handle_t*>(layer.handle);
790    return !(layer.flags & HWC_SKIP_LAYER) && layer.transform == 0 &&
791            layer.blending == HWC_BLENDING_PREMULT &&
792            layer.sourceCropf.left == 0 && layer.sourceCropf.top == 0 &&
793            layer.displayFrame.left == 0 && layer.displayFrame.top == 0 &&
794            layer.sourceCropf.right == fbTarget.sourceCropf.right &&
795            layer.sourceCropf.bottom == fbTarget.sourceCropf.bottom &&
796            layer.displayFrame.right == fbTarget.displayFrame.right &&
797            layer.displayFrame.bottom == fbTarget.displayFrame.bottom &&
798            layer.planeAlpha == 255 && layer.handle != NULL &&
799            (nativeHandle->iFormat == HAL_PIXEL_FORMAT_RGBA_8888 ||
800             nativeHandle->iFormat == HAL_PIXEL_FORMAT_BGRA_8888);
801}
802
803bool VirtualDevice::prePrepare(hwc_display_contents_1_t *display)
804{
805    RETURN_FALSE_IF_NOT_INIT();
806    return true;
807}
808
809bool VirtualDevice::prepare(hwc_display_contents_1_t *display)
810{
811    RETURN_FALSE_IF_NOT_INIT();
812
813    mRenderTimestamp = systemTime();
814    mVspInUse = false;
815    mExpectAcquireFences = false;
816    mIsForceCloneMode = false;
817#ifdef INTEL_WIDI
818    {
819        Mutex::Autolock _l(mConfigLock);
820        mCurrentConfig = mNextConfig;
821    }
822#endif
823
824    bool shouldBeConnected = (display != NULL);
825    if (shouldBeConnected != mLastConnectionStatus) {
826        // calling this will reload the property 'hwc.video.extmode.enable'
827        Hwcomposer::getInstance().getDisplayAnalyzer()->isVideoExtModeEnabled();
828        char propertyVal[PROPERTY_VALUE_MAX];
829        if (property_get("widi.compose.rgb_upscale", propertyVal, NULL) > 0)
830            mVspUpscale = atoi(propertyVal);
831        if (property_get("widi.compose.all_video", propertyVal, NULL) > 0)
832            mDebugVspClear = atoi(propertyVal);
833        if (property_get("widi.compose.dump", propertyVal, NULL) > 0)
834            mDebugVspDump = atoi(propertyVal);
835
836        Hwcomposer::getInstance().getMultiDisplayObserver()->notifyWidiConnectionStatus(shouldBeConnected);
837        mLastConnectionStatus = shouldBeConnected;
838    }
839
840    if (!display) {
841        // No image. We're done with any mappings and CSC buffers.
842        mMappedBufferCache.clear();
843        Mutex::Autolock _l(mTaskLock);
844        mCscBuffers.clear();
845        return true;
846    }
847
848#ifdef INTEL_WIDI
849    if (!mCurrentConfig.frameServerActive) {
850        // We're done with CSC buffers, since we blit to outbuf in this mode.
851        // We want to keep mappings cached, so we don't clear mMappedBufferCache.
852        Mutex::Autolock _l(mTaskLock);
853        mCscBuffers.clear();
854    }
855#else
856    Mutex::Autolock _l(mTaskLock);
857    mCscBuffers.clear();
858#endif
859
860    // by default send the FRAMEBUFFER_TARGET layer (composited image)
861    const ssize_t fbTarget = display->numHwLayers-1;
862    mRgbLayer = fbTarget;
863    mYuvLayer = -1;
864
865    DisplayAnalyzer *analyzer = mHwc.getDisplayAnalyzer();
866
867    mProtectedMode = false;
868#ifdef INTEL_WIDI
869    if (mCurrentConfig.typeChangeListener != NULL &&
870        !analyzer->isOverlayAllowed() &&
871        analyzer->getVideoInstances() <= 1) {
872        if (mCurrentConfig.typeChangeListener->shutdownVideo() != OK) {
873            ITRACE("Waiting for prior encoder session to shut down...");
874        }
875        /* Setting following flag to true will enable us to call bufferInfoChanged() in clone mode. */
876        mNextConfig.forceNotifyBufferInfo = true;
877        mYuvLayer = -1;
878        mRgbLayer = -1;
879        // Skipping frames.
880        // Fences aren't set in prepare, and we don't need them here, but they'll
881        // be set later and we have to close them. Don't log a warning in this case.
882        mExpectAcquireFences = true;
883        for (ssize_t i = 0; i < fbTarget; i++)
884            display->hwLayers[i].compositionType = HWC_OVERLAY;
885        return true;
886    }
887
888    for (ssize_t i = 0; i < fbTarget; i++) {
889        hwc_layer_1_t& layer = display->hwLayers[i];
890        if (analyzer->isVideoLayer(layer) && (mCurrentConfig.extendedModeEnabled || mDebugVspClear || analyzer->isProtectedLayer(layer))) {
891            if (mCurrentConfig.frameServerActive && mCurrentConfig.extendedModeEnabled) {
892                // If composed in surface flinger, then stream fbtarget.
893                if ((layer.flags & HWC_SKIP_LAYER) && !analyzer->ignoreVideoSkipFlag()) {
894                    continue;
895                }
896
897                /* If the resolution of the video layer is less than QCIF, then we are going to play it in clone mode only.*/
898                uint32_t vidContentWidth = layer.sourceCropf.right - layer.sourceCropf.left;
899                uint32_t vidContentHeight = layer.sourceCropf.bottom - layer.sourceCropf.top;
900                if (vidContentWidth < QCIF_WIDTH || vidContentHeight < QCIF_HEIGHT) {
901                    VTRACE("Ingoring layer %d which is too small for extended mode", i);
902                    continue;
903                }
904            }
905            mYuvLayer = i;
906            mProtectedMode = analyzer->isProtectedLayer(layer);
907            break;
908        }
909    }
910#endif
911
912    if (mYuvLayer == -1) {
913        mFirstVideoFrame = true;
914        mDecWidth = 0;
915        mDecHeight = 0;
916    }
917#ifdef INTEL_WIDI
918    if (mCurrentConfig.frameServerActive && mCurrentConfig.extendedModeEnabled && mYuvLayer != -1) {
919        if (handleExtendedMode(display)) {
920            mYuvLayer = -1;
921            mRgbLayer = -1;
922            // Extended mode is successful.
923            // Fences aren't set in prepare, and we don't need them here, but they'll
924            // be set later and we have to close them. Don't log a warning in this case.
925            mExpectAcquireFences = true;
926            for (ssize_t i = 0; i < fbTarget; i++)
927                display->hwLayers[i].compositionType = HWC_OVERLAY;
928            return true;
929        }
930        // if error in playback file , switch to clone mode
931        WTRACE("Error, falling back to clone mode");
932        mIsForceCloneMode = true;
933        mYuvLayer = -1;
934    }
935#endif
936    if (mYuvLayer == 0 && fbTarget == 1) {
937        // No RGB layer, so tell queueCompose to use blank RGB in fbtarget.
938        mRgbLayer = -1;
939    }
940    else if (mYuvLayer == 0 && fbTarget == 2) {
941        if (canUseDirectly(display, 1))
942            mRgbLayer = 1;
943    }
944    else if (mYuvLayer == -1 && fbTarget == 1) {
945        if (canUseDirectly(display, 0))
946            mRgbLayer = 0;
947    }
948
949    for (ssize_t i = 0; i < fbTarget; i++) {
950        hwc_layer_1_t& layer = display->hwLayers[i];
951        if (i == mYuvLayer || i == mRgbLayer || mRgbLayer != fbTarget)
952            layer.compositionType = HWC_OVERLAY;
953        else
954            layer.compositionType = HWC_FRAMEBUFFER;
955    }
956    if (mYuvLayer != -1 && mRgbLayer == fbTarget)
957        // This tells SurfaceFlinger to render this layer by writing transparent pixels
958        // to this layer's target region within the framebuffer. This effectively punches
959        // a hole through any content that is supposed to show below the video, and the
960        // video can be seen through this hole when we composite the YUV and RGBA layers
961        // together. Content above will draw on top of this hole and can cover the video.
962        // This has no effect when the video is the bottommost layer.
963        display->hwLayers[mYuvLayer].hints |= HWC_HINT_CLEAR_FB;
964
965#ifdef INTEL_WIDI
966    // we're streaming fbtarget, so send onFramePrepare and wait for composition to happen
967    if (mCurrentConfig.frameListener != NULL)
968        mCurrentConfig.frameListener->onFramePrepare(mRenderTimestamp, -1);
969#endif
970    return true;
971}
972
973bool VirtualDevice::commit(hwc_display_contents_1_t *display, IDisplayContext *context)
974{
975    RETURN_FALSE_IF_NOT_INIT();
976
977    if (display != NULL && (mRgbLayer != -1 || mYuvLayer != -1))
978        sendToWidi(display);
979
980    if (mVspEnabled && !mVspInUse) {
981        mVaMapCache.clear();
982        sp<DisableVspTask> disableVsp = new DisableVspTask();
983        mMappedBufferCache.clear();
984        Mutex::Autolock _l(mTaskLock);
985        mRgbUpscaleBuffers.clear();
986        mTasks.push(disableVsp);
987        mRequestQueued.signal();
988        mVspEnabled = false;
989    }
990
991    if (display != NULL) {
992        // All acquire fences should be copied somewhere else or closed by now
993        // and set to -1 in these structs except in the case of extended mode.
994        // Make sure the fences are closed and log a warning if not in extended mode.
995        if (display->outbufAcquireFenceFd != -1) {
996            if (!mExpectAcquireFences)
997                WTRACE("outbuf acquire fence (fd=%d) not yet saved or closed", display->outbufAcquireFenceFd);
998            CLOSE_FENCE(display->outbufAcquireFenceFd);
999        }
1000        for (size_t i = 0; i < display->numHwLayers; i++) {
1001            hwc_layer_1_t& layer = display->hwLayers[i];
1002            if (layer.acquireFenceFd != -1) {
1003                if (!mExpectAcquireFences && (i < display->numHwLayers-1 || i == (size_t) mRgbLayer))
1004                    WTRACE("layer %zd acquire fence (fd=%zd) not yet saved or closed", i, layer.acquireFenceFd);
1005                CLOSE_FENCE(layer.acquireFenceFd);
1006            }
1007        }
1008    }
1009
1010    return true;
1011}
1012
1013bool VirtualDevice::sendToWidi(hwc_display_contents_1_t *display)
1014{
1015    VTRACE("RGB=%d, YUV=%d", mRgbLayer, mYuvLayer);
1016
1017    if (mYuvLayer == -1 && mRgbLayer == -1)
1018        return true;
1019
1020    if (mYuvLayer != -1) {
1021        mVspInUse = true;
1022        if (queueCompose(display))
1023            return true;
1024    }
1025
1026    return queueColorConvert(display);
1027}
1028
1029bool VirtualDevice::queueCompose(hwc_display_contents_1_t *display)
1030{
1031    hwc_layer_1_t& yuvLayer = display->hwLayers[mYuvLayer];
1032    if (yuvLayer.handle == NULL) {
1033        ETRACE("No video handle");
1034        return false;
1035    }
1036#ifdef INTEL_WIDI
1037    if (!mCurrentConfig.frameServerActive && display->outbuf == NULL) {
1038#else
1039    if (display->outbuf == NULL) {
1040#endif
1041        ETRACE("No outbuf");
1042        return true; // fallback would be pointless
1043    }
1044
1045    sp<ComposeTask> composeTask = new ComposeTask();
1046
1047    sp<RefBase> heldBuffer;
1048    sp<OnFrameReadyTask> frameReadyTask;
1049    Mutex::Autolock _l(mTaskLock);
1050
1051    float upscale_x = 1.0;
1052    float upscale_y = 1.0;
1053    hwc_layer_1_t& fbTarget = display->hwLayers[display->numHwLayers-1];
1054    composeTask->outWidth = fbTarget.sourceCropf.right - fbTarget.sourceCropf.left;
1055    composeTask->outHeight = fbTarget.sourceCropf.bottom - fbTarget.sourceCropf.top;
1056
1057    bool scaleRgb = false;
1058#ifdef INTEL_WIDI
1059    if (mCurrentConfig.frameServerActive) {
1060        if (mVspUpscale) {
1061            composeTask->outWidth = mCurrentConfig.policy.scaledWidth;
1062            composeTask->outHeight = mCurrentConfig.policy.scaledHeight;
1063            upscale_x = mCurrentConfig.policy.scaledWidth/(fbTarget.sourceCropf.right - fbTarget.sourceCropf.left);
1064            upscale_y = mCurrentConfig.policy.scaledHeight/(fbTarget.sourceCropf.bottom - fbTarget.sourceCropf.top);
1065            scaleRgb = composeTask->outWidth != fbTarget.sourceCropf.right - fbTarget.sourceCropf.left ||
1066                       composeTask->outHeight != fbTarget.sourceCropf.bottom - fbTarget.sourceCropf.top;
1067        }
1068
1069        composeTask->outputHandle = mCscBuffers.get(composeTask->outWidth, composeTask->outHeight, &heldBuffer);
1070        if (composeTask->outputHandle == NULL) {
1071            WTRACE("Out of CSC buffers, dropping frame");
1072            return true;
1073        }
1074    } else {
1075        composeTask->outputHandle = display->outbuf;
1076    }
1077#else
1078    composeTask->outputHandle = display->outbuf;
1079#endif
1080
1081    vspPrepare(composeTask->outWidth, composeTask->outHeight);
1082
1083    composeTask->videoCachedBuffer = getMappedBuffer(yuvLayer.handle);
1084    if (composeTask->videoCachedBuffer == NULL) {
1085        ETRACE("Couldn't map video handle %p", yuvLayer.handle);
1086        return false;
1087    }
1088    if (composeTask->videoCachedBuffer->mapper == NULL) {
1089        ETRACE("Src mapper gone");
1090        return false;
1091    }
1092    composeTask->heldVideoBuffer = new HeldDecoderBuffer(this, composeTask->videoCachedBuffer);
1093    IVideoPayloadManager::MetaData videoMetadata;
1094    if (!mPayloadManager->getMetaData(composeTask->videoCachedBuffer->mapper, &videoMetadata)) {
1095        ETRACE("Failed to map video payload info");
1096        return false;
1097    }
1098    if (videoMetadata.normalBuffer.width == 0 || videoMetadata.normalBuffer.height == 0) {
1099        ETRACE("Bad video metadata for handle %p", yuvLayer.handle);
1100        return false;
1101    }
1102    if (videoMetadata.normalBuffer.khandle == 0) {
1103        ETRACE("Bad khandle");
1104        return false;
1105    }
1106
1107    VARectangle& output_region = composeTask->output_region;
1108    output_region.x = static_cast<uint32_t>(yuvLayer.displayFrame.left*upscale_x) & ~1;
1109    output_region.y = static_cast<uint32_t>(yuvLayer.displayFrame.top*upscale_y) & ~1;
1110    output_region.width = (static_cast<uint32_t>(yuvLayer.displayFrame.right*upscale_y+1) & ~1) - output_region.x;
1111    output_region.height = (static_cast<uint32_t>(yuvLayer.displayFrame.bottom*upscale_y+1) & ~1) - output_region.y;
1112
1113    uint32_t videoWidth;
1114    uint32_t videoHeight;
1115    if (videoMetadata.transform == 0 || videoMetadata.transform == HAL_TRANSFORM_ROT_180) {
1116        videoWidth = videoMetadata.normalBuffer.width;
1117        videoHeight = videoMetadata.normalBuffer.height;
1118    } else {
1119        videoWidth = videoMetadata.normalBuffer.height;
1120        videoHeight = videoMetadata.normalBuffer.width;
1121    }
1122
1123    // Layer source crop info is based on an unrotated, unscaled buffer.
1124    // Rotate the rectangle to get the source crop we'd use for a rotated, unscaled buffer.
1125    hwc_frect_t rotatedCrop;
1126    switch (videoMetadata.transform) {
1127    default:
1128        rotatedCrop = yuvLayer.sourceCropf;
1129        break;
1130    case HAL_TRANSFORM_ROT_90:
1131        rotatedCrop.left = yuvLayer.sourceCropf.top;
1132        rotatedCrop.top = videoHeight - yuvLayer.sourceCropf.right;
1133        rotatedCrop.right = yuvLayer.sourceCropf.bottom;
1134        rotatedCrop.bottom = videoHeight - yuvLayer.sourceCropf.left;
1135        break;
1136    case HAL_TRANSFORM_ROT_180:
1137        rotatedCrop.left = videoWidth - yuvLayer.sourceCropf.right;
1138        rotatedCrop.top = videoHeight - yuvLayer.sourceCropf.bottom;
1139        rotatedCrop.right = videoWidth - yuvLayer.sourceCropf.left;
1140        rotatedCrop.bottom = videoHeight - yuvLayer.sourceCropf.top;
1141        break;
1142    case HAL_TRANSFORM_ROT_270:
1143        rotatedCrop.left = videoWidth - yuvLayer.sourceCropf.bottom;
1144        rotatedCrop.top = yuvLayer.sourceCropf.left;
1145        rotatedCrop.right = videoWidth - yuvLayer.sourceCropf.top;
1146        rotatedCrop.bottom = yuvLayer.sourceCropf.right;
1147        break;
1148    }
1149
1150    float factor_x = output_region.width / (rotatedCrop.right - rotatedCrop.left);
1151    float factor_y = output_region.height / (rotatedCrop.bottom - rotatedCrop.top);
1152
1153    uint32_t scaleWidth = videoWidth * factor_x;
1154    uint32_t scaleHeight = videoHeight * factor_y;
1155
1156    scaleWidth &= ~1;
1157    scaleHeight &= ~1;
1158
1159    IVideoPayloadManager::Buffer info;
1160    if (!getFrameOfSize(scaleWidth, scaleHeight, videoMetadata, info)) {
1161        //Returning true as else we fall into the queueColorConvert
1162        //resulting into scrambled frames for protected content.
1163        ITRACE("scaled frame not yet available.");
1164        return true;
1165    }
1166
1167    composeTask->videoKhandle = info.khandle;
1168    composeTask->videoStride = info.lumaStride;
1169    composeTask->videoBufHeight = info.bufHeight;
1170    composeTask->videoTiled = info.tiled;
1171
1172    // rotatedCrop accounts for rotation. Now account for any scaling along each dimension.
1173    hwc_frect_t scaledCrop = rotatedCrop;
1174    if (info.width < videoWidth) {
1175        float factor = static_cast<float>(info.width) / videoWidth;
1176        scaledCrop.left *= factor;
1177        scaledCrop.right *= factor;
1178    }
1179    if (info.height < videoHeight) {
1180        float factor = static_cast<float>(info.height) / videoHeight;
1181        scaledCrop.top *= factor;
1182        scaledCrop.bottom *= factor;
1183    }
1184
1185    VARectangle& surface_region = composeTask->surface_region;
1186    surface_region.x = static_cast<int>(scaledCrop.left) + info.offsetX;
1187    surface_region.y = static_cast<int>(scaledCrop.top) + info.offsetY;
1188    surface_region.width = static_cast<int>(scaledCrop.right - scaledCrop.left);
1189    surface_region.height = static_cast<int>(scaledCrop.bottom - scaledCrop.top);
1190
1191    VTRACE("Want to take (%d,%d)-(%d,%d) region from %dx%d video (in %dx%d buffer) and output to (%d,%d)-(%d,%d)",
1192            surface_region.x, surface_region.y,
1193            surface_region.x + surface_region.width, surface_region.y + surface_region.height,
1194            info.width, info.height,
1195            info.bufWidth, info.bufHeight,
1196            output_region.x, output_region.y,
1197            output_region.x + output_region.width, output_region.y + output_region.height);
1198
1199    if (surface_region.x + surface_region.width > static_cast<int>(info.width + info.offsetX) ||
1200        surface_region.y + surface_region.height > static_cast<int>(info.height + info.offsetY))
1201    {
1202        ETRACE("Source crop exceeds video dimensions: (%d,%d)-(%d,%d) > %ux%u",
1203                surface_region.x, surface_region.y,
1204                surface_region.x + surface_region.width, surface_region.y + surface_region.height,
1205                info.width, info.height);
1206        return false;
1207    }
1208
1209    if (surface_region.width > output_region.width || surface_region.height > output_region.height) {
1210        // VSP can upscale but can't downscale video, so use blank video
1211        // until we start getting downscaled frames.
1212        surface_region.x = 0;
1213        surface_region.y = 0;
1214        surface_region.width = composeTask->outWidth;
1215        surface_region.height = composeTask->outHeight;
1216        output_region = surface_region;
1217        composeTask->videoKhandle = 0;
1218        composeTask->videoStride = composeTask->outWidth;
1219        composeTask->videoBufHeight = composeTask->outHeight;
1220        composeTask->videoTiled = false;
1221    }
1222
1223    composeTask->yuvAcquireFenceFd = yuvLayer.acquireFenceFd;
1224    yuvLayer.acquireFenceFd = -1;
1225
1226    composeTask->outbufAcquireFenceFd = display->outbufAcquireFenceFd;
1227    display->outbufAcquireFenceFd = -1;
1228
1229    int retireFd = sw_sync_fence_create(mSyncTimelineFd, "widi_compose_retire", mNextSyncPoint);
1230    yuvLayer.releaseFenceFd = retireFd;
1231
1232    if (mRgbLayer == -1) {
1233        CLOSE_FENCE(fbTarget.acquireFenceFd);
1234    } else {
1235        hwc_layer_1_t& rgbLayer = display->hwLayers[mRgbLayer];
1236        composeTask->rgbAcquireFenceFd = rgbLayer.acquireFenceFd;
1237        rgbLayer.acquireFenceFd = -1;
1238        rgbLayer.releaseFenceFd = dup(retireFd);
1239    }
1240
1241    mNextSyncPoint++;
1242    composeTask->syncTimelineFd = mSyncTimelineFd;
1243
1244    if (mRgbLayer != -1)
1245    {
1246        hwc_layer_1_t& rgbLayer = display->hwLayers[mRgbLayer];
1247        if (rgbLayer.handle == NULL) {
1248            ETRACE("No RGB handle");
1249            return false;
1250        }
1251
1252        if (scaleRgb) {
1253            buffer_handle_t scalingBuffer;
1254            sp<RefBase> heldUpscaleBuffer;
1255            while ((scalingBuffer = mRgbUpscaleBuffers.get(composeTask->outWidth, composeTask->outHeight, &heldUpscaleBuffer)) == NULL &&
1256                   !mTasks.empty()) {
1257                VTRACE("Waiting for free RGB upscale buffer...");
1258                mRequestDequeued.wait(mTaskLock);
1259            }
1260            if (scalingBuffer == NULL) {
1261                ETRACE("Couldn't get scaling buffer");
1262                return false;
1263            }
1264            BufferManager* mgr = mHwc.getBufferManager();
1265            crop_t destRect;
1266            destRect.x = 0;
1267            destRect.y = 0;
1268            destRect.w = composeTask->outWidth;
1269            destRect.h = composeTask->outHeight;
1270            if (!mgr->blit(rgbLayer.handle, scalingBuffer, destRect, true, true))
1271                return true;
1272            composeTask->rgbHandle = scalingBuffer;
1273            composeTask->heldRgbHandle = heldUpscaleBuffer;
1274        }
1275        else {
1276            unsigned int pixel_format = VA_FOURCC_BGRA;
1277            const IMG_native_handle_t* nativeHandle = reinterpret_cast<const IMG_native_handle_t*>(rgbLayer.handle);
1278            if (nativeHandle->iFormat == HAL_PIXEL_FORMAT_RGBA_8888)
1279                pixel_format = VA_FOURCC_RGBA;
1280            mRgbUpscaleBuffers.clear();
1281            ssize_t index = mVaMapCache.indexOfKey(rgbLayer.handle);
1282            if (index == NAME_NOT_FOUND) {
1283                composeTask->mappedRgbIn = new VAMappedHandleObject(va_dpy, rgbLayer.handle, composeTask->outWidth, composeTask->outHeight, pixel_format);
1284                mVaMapCache.add(rgbLayer.handle, composeTask->mappedRgbIn);
1285            }
1286            else
1287                composeTask->mappedRgbIn = mVaMapCache[index];
1288            if (composeTask->mappedRgbIn->surface == 0) {
1289                ETRACE("Unable to map RGB surface");
1290                return false;
1291            }
1292        }
1293    }
1294    else
1295        composeTask->mappedRgbIn = NULL;
1296
1297    mTasks.push_back(composeTask);
1298    mRequestQueued.signal();
1299#ifdef INTEL_WIDI
1300    if (mCurrentConfig.frameServerActive) {
1301
1302        FrameInfo inputFrameInfo;
1303        memset(&inputFrameInfo, 0, sizeof(inputFrameInfo));
1304        inputFrameInfo.isProtected = mProtectedMode;
1305        inputFrameInfo.frameType = HWC_FRAMETYPE_FRAME_BUFFER;
1306        if (mVspUpscale) {
1307            float upscale_x = (rotatedCrop.right - rotatedCrop.left) /
1308                              (yuvLayer.displayFrame.right - yuvLayer.displayFrame.left);
1309            float upscale_y = (rotatedCrop.bottom - rotatedCrop.top) /
1310                              (yuvLayer.displayFrame.bottom - yuvLayer.displayFrame.top);
1311            float upscale = upscale_x > upscale_y ? upscale_x : upscale_y;
1312            if (upscale <= 1.0)
1313                upscale = 1.0;
1314            inputFrameInfo.contentWidth = (fbTarget.sourceCropf.right - fbTarget.sourceCropf.left)*upscale;
1315            inputFrameInfo.contentHeight = (fbTarget.sourceCropf.bottom - fbTarget.sourceCropf.top)*upscale;
1316        }
1317        else {
1318            inputFrameInfo.contentWidth = composeTask->outWidth;
1319            inputFrameInfo.contentHeight = composeTask->outHeight;
1320        }
1321        inputFrameInfo.contentFrameRateN = 0;
1322        inputFrameInfo.contentFrameRateD = 0;
1323        FrameInfo outputFrameInfo = inputFrameInfo;
1324
1325        BufferManager* mgr = mHwc.getBufferManager();
1326        DataBuffer* dataBuf = mgr->lockDataBuffer(composeTask->outputHandle);
1327        outputFrameInfo.contentWidth = composeTask->outWidth;
1328        outputFrameInfo.contentHeight = composeTask->outHeight;
1329        outputFrameInfo.bufferWidth = dataBuf->getWidth();
1330        outputFrameInfo.bufferHeight = dataBuf->getHeight();
1331        outputFrameInfo.lumaUStride = dataBuf->getWidth();
1332        outputFrameInfo.chromaUStride = dataBuf->getWidth();
1333        outputFrameInfo.chromaVStride = dataBuf->getWidth();
1334        mgr->unlockDataBuffer(dataBuf);
1335
1336        queueFrameTypeInfo(inputFrameInfo);
1337        if (mCurrentConfig.policy.scaledWidth == 0 || mCurrentConfig.policy.scaledHeight == 0)
1338            return true; // This isn't a failure, WiDi just doesn't want frames right now.
1339        queueBufferInfo(outputFrameInfo);
1340
1341        if (mCurrentConfig.frameListener != NULL) {
1342            frameReadyTask = new OnFrameReadyTask();
1343            frameReadyTask->renderTask = composeTask;
1344            frameReadyTask->heldBuffer = heldBuffer;
1345            frameReadyTask->frameListener = mCurrentConfig.frameListener;
1346            frameReadyTask->handle = composeTask->outputHandle;
1347            frameReadyTask->handleType = HWC_HANDLE_TYPE_GRALLOC;
1348            frameReadyTask->renderTimestamp = mRenderTimestamp;
1349            frameReadyTask->mediaTimestamp = -1;
1350            mTasks.push_back(frameReadyTask);
1351        }
1352    }
1353    else {
1354        display->retireFenceFd = dup(retireFd);
1355    }
1356#else
1357    display->retireFenceFd = dup(retireFd);
1358#endif
1359
1360    return true;
1361}
1362
1363bool VirtualDevice::queueColorConvert(hwc_display_contents_1_t *display)
1364{
1365    if (mRgbLayer == -1) {
1366        ETRACE("RGB layer not set");
1367        return false;
1368    }
1369    hwc_layer_1_t& layer = display->hwLayers[mRgbLayer];
1370    if (layer.handle == NULL) {
1371        ETRACE("RGB layer has no handle set");
1372        return false;
1373    }
1374    if (display->outbuf == NULL) {
1375        ETRACE("outbuf is not set");
1376        return false;
1377    }
1378
1379    {
1380        const IMG_native_handle_t* nativeSrcHandle = reinterpret_cast<const IMG_native_handle_t*>(layer.handle);
1381        const IMG_native_handle_t* nativeDestHandle = reinterpret_cast<const IMG_native_handle_t*>(display->outbuf);
1382
1383        if ((nativeSrcHandle->iFormat == HAL_PIXEL_FORMAT_RGBA_8888 &&
1384            nativeDestHandle->iFormat == HAL_PIXEL_FORMAT_BGRA_8888) ||
1385            (nativeSrcHandle->iFormat == HAL_PIXEL_FORMAT_BGRA_8888 &&
1386            nativeDestHandle->iFormat == HAL_PIXEL_FORMAT_RGBA_8888))
1387        {
1388            SYNC_WAIT_AND_CLOSE(layer.acquireFenceFd);
1389            SYNC_WAIT_AND_CLOSE(display->outbufAcquireFenceFd);
1390            display->retireFenceFd = -1;
1391
1392            // synchronous in this case
1393            colorSwap(layer.handle, display->outbuf, ((nativeSrcHandle->iWidth+31)&~31)*nativeSrcHandle->iHeight);
1394            // Workaround: Don't keep cached buffers. If the VirtualDisplaySurface gets destroyed,
1395            //             these would be unmapped on the next frame, after the buffers are destroyed,
1396            //             which is causing heap corruption, probably due to a double-free somewhere.
1397            mMappedBufferCache.clear();
1398            return true;
1399        }
1400    }
1401
1402    sp<BlitTask> blitTask = new BlitTask();
1403    sp<OnFrameReadyTask> frameReadyTask;
1404    blitTask->destRect.x = 0;
1405    blitTask->destRect.y = 0;
1406    blitTask->destRect.w = layer.sourceCropf.right - layer.sourceCropf.left;
1407    blitTask->destRect.h = layer.sourceCropf.bottom - layer.sourceCropf.top;
1408    blitTask->srcHandle = layer.handle;
1409
1410    sp<RefBase> heldBuffer;
1411    Mutex::Autolock _l(mTaskLock);
1412
1413    blitTask->srcAcquireFenceFd = layer.acquireFenceFd;
1414    layer.acquireFenceFd = -1;
1415
1416    blitTask->syncTimelineFd = mSyncTimelineFd;
1417    // Framebuffer after BlitTask::run() calls sw_sync_timeline_inc().
1418    layer.releaseFenceFd = sw_sync_fence_create(mSyncTimelineFd, "widi_blit_retire", mNextSyncPoint);
1419    mNextSyncPoint++;
1420#ifdef INTEL_WIDI
1421    if (mCurrentConfig.frameServerActive) {
1422        blitTask->destHandle = mCscBuffers.get(blitTask->destRect.w, blitTask->destRect.h, &heldBuffer);
1423        blitTask->destAcquireFenceFd = -1;
1424
1425        // we do not use retire fence in frameServerActive path.
1426        CLOSE_FENCE(display->retireFenceFd);
1427
1428        // we use our own buffer, so just close this fence without a wait
1429        CLOSE_FENCE(display->outbufAcquireFenceFd);
1430    }
1431    else {
1432        blitTask->destHandle = display->outbuf;
1433        blitTask->destAcquireFenceFd = display->outbufAcquireFenceFd;
1434        // don't let TngDisplayContext::commitEnd() close this
1435        display->outbufAcquireFenceFd = -1;
1436        display->retireFenceFd = dup(layer.releaseFenceFd);
1437    }
1438#else
1439    blitTask->destHandle = display->outbuf;
1440    blitTask->destAcquireFenceFd = display->outbufAcquireFenceFd;
1441    // don't let TngDisplayContext::commitEnd() close this
1442    display->outbufAcquireFenceFd = -1;
1443    display->retireFenceFd = dup(layer.releaseFenceFd);
1444#endif
1445    if (blitTask->destHandle == NULL) {
1446        WTRACE("Out of CSC buffers, dropping frame");
1447        return false;
1448    }
1449
1450    mTasks.push_back(blitTask);
1451    mRequestQueued.signal();
1452#ifdef INTEL_WIDI
1453    if (mCurrentConfig.frameServerActive) {
1454        FrameInfo inputFrameInfo;
1455        memset(&inputFrameInfo, 0, sizeof(inputFrameInfo));
1456        inputFrameInfo.isProtected = mProtectedMode;
1457        FrameInfo outputFrameInfo;
1458
1459        inputFrameInfo.frameType = HWC_FRAMETYPE_FRAME_BUFFER;
1460        inputFrameInfo.contentWidth = blitTask->destRect.w;
1461        inputFrameInfo.contentHeight = blitTask->destRect.h;
1462        inputFrameInfo.contentFrameRateN = 0;
1463        inputFrameInfo.contentFrameRateD = 0;
1464        outputFrameInfo = inputFrameInfo;
1465
1466        BufferManager* mgr = mHwc.getBufferManager();
1467        DataBuffer* dataBuf = mgr->lockDataBuffer(blitTask->destHandle);
1468        outputFrameInfo.bufferWidth = dataBuf->getWidth();
1469        outputFrameInfo.bufferHeight = dataBuf->getHeight();
1470        outputFrameInfo.lumaUStride = dataBuf->getWidth();
1471        outputFrameInfo.chromaUStride = dataBuf->getWidth();
1472        outputFrameInfo.chromaVStride = dataBuf->getWidth();
1473        mgr->unlockDataBuffer(dataBuf);
1474
1475        if (!mIsForceCloneMode)
1476            queueFrameTypeInfo(inputFrameInfo);
1477
1478        if (mCurrentConfig.policy.scaledWidth == 0 || mCurrentConfig.policy.scaledHeight == 0)
1479            return true; // This isn't a failure, WiDi just doesn't want frames right now.
1480        queueBufferInfo(outputFrameInfo);
1481
1482        if (mCurrentConfig.frameListener != NULL) {
1483            frameReadyTask = new OnFrameReadyTask();
1484            frameReadyTask->renderTask = blitTask;
1485            frameReadyTask->heldBuffer = heldBuffer;
1486            frameReadyTask->frameListener = mCurrentConfig.frameListener;
1487            frameReadyTask->handle = blitTask->destHandle;
1488            frameReadyTask->handleType = HWC_HANDLE_TYPE_GRALLOC;
1489            frameReadyTask->renderTimestamp = mRenderTimestamp;
1490            frameReadyTask->mediaTimestamp = -1;
1491            mTasks.push_back(frameReadyTask);
1492        }
1493    }
1494#endif
1495    return true;
1496}
1497#ifdef INTEL_WIDI
1498bool VirtualDevice::handleExtendedMode(hwc_display_contents_1_t *display)
1499{
1500    FrameInfo inputFrameInfo;
1501    memset(&inputFrameInfo, 0, sizeof(inputFrameInfo));
1502    inputFrameInfo.isProtected = mProtectedMode;
1503
1504    hwc_layer_1_t& layer = display->hwLayers[mYuvLayer];
1505    if (layer.handle == NULL) {
1506        ETRACE("video layer has no handle set");
1507        return false;
1508    }
1509    sp<CachedBuffer> cachedBuffer;
1510    if ((cachedBuffer = getMappedBuffer(layer.handle)) == NULL) {
1511        ETRACE("Failed to map display buffer");
1512        return false;
1513    }
1514
1515    inputFrameInfo.frameType = HWC_FRAMETYPE_VIDEO;
1516    // for video mode let 30 fps be the default value.
1517    inputFrameInfo.contentFrameRateN = 30;
1518    inputFrameInfo.contentFrameRateD = 1;
1519
1520    IVideoPayloadManager::MetaData metadata;
1521    if (!mPayloadManager->getMetaData(cachedBuffer->mapper, &metadata)) {
1522        ETRACE("Failed to get metadata");
1523        return false;
1524    }
1525
1526    if (metadata.transform == 0 || metadata.transform == HAL_TRANSFORM_ROT_180) {
1527        inputFrameInfo.contentWidth = metadata.normalBuffer.width;
1528        inputFrameInfo.contentHeight = metadata.normalBuffer.height;
1529    } else {
1530        inputFrameInfo.contentWidth = metadata.normalBuffer.height;
1531        inputFrameInfo.contentHeight = metadata.normalBuffer.width;
1532        // 90 and 270 have some issues that appear to be decoder bugs
1533        ITRACE("Skipping extended mode due to rotation of 90 or 270");
1534        return false;
1535    }
1536    // Use the crop size if something changed derive it again..
1537    // Only get video source info if frame rate has not been initialized.
1538    // getVideoSourceInfo() is a fairly expensive operation. This optimization
1539    // will save us a few milliseconds per frame
1540    if (mFirstVideoFrame || (mOrigContentWidth != metadata.normalBuffer.width) ||
1541        (mOrigContentHeight != metadata.normalBuffer.height)) {
1542        mVideoFramerate = inputFrameInfo.contentFrameRateN;
1543        VTRACE("VideoWidth = %d, VideoHeight = %d", metadata.normalBuffer.width, metadata.normalBuffer.height);
1544        mOrigContentWidth = metadata.normalBuffer.width;
1545        mOrigContentHeight = metadata.normalBuffer.height;
1546
1547        // For the first video session by default
1548        int sessionID = Hwcomposer::getInstance().getDisplayAnalyzer()->getFirstVideoInstanceSessionID();
1549        if (sessionID >= 0) {
1550            ITRACE("Session id = %d", sessionID);
1551            VideoSourceInfo videoInfo;
1552            memset(&videoInfo, 0, sizeof(videoInfo));
1553            status_t ret = mHwc.getMultiDisplayObserver()->getVideoSourceInfo(sessionID, &videoInfo);
1554            if (ret == NO_ERROR) {
1555                ITRACE("width = %d, height = %d, fps = %d", videoInfo.width, videoInfo.height,
1556                        videoInfo.frameRate);
1557                if (videoInfo.frameRate > 0) {
1558                    mVideoFramerate = videoInfo.frameRate;
1559                }
1560            }
1561        }
1562        mFirstVideoFrame = false;
1563    }
1564    inputFrameInfo.contentFrameRateN = mVideoFramerate;
1565    inputFrameInfo.contentFrameRateD = 1;
1566
1567    sp<ComposeTask> composeTask;
1568    sp<RefBase> heldBuffer;
1569    Mutex::Autolock _l(mTaskLock);
1570
1571    if (mCurrentConfig.policy.scaledWidth == 0 || mCurrentConfig.policy.scaledHeight == 0) {
1572        queueFrameTypeInfo(inputFrameInfo);
1573        return true; // This isn't a failure, WiDi just doesn't want frames right now.
1574    }
1575
1576    IVideoPayloadManager::Buffer info;
1577    if (!getFrameOfSize(mCurrentConfig.policy.scaledWidth, mCurrentConfig.policy.scaledHeight, metadata, info)) {
1578        ITRACE("Extended mode waiting for scaled frame");
1579        return false;
1580    }
1581
1582    queueFrameTypeInfo(inputFrameInfo);
1583
1584    heldBuffer = new HeldDecoderBuffer(this, cachedBuffer);
1585    int64_t mediaTimestamp = metadata.timestamp;
1586
1587    VARectangle surface_region;
1588    surface_region.x = info.offsetX;
1589    surface_region.y = info.offsetY;
1590    surface_region.width = info.width;
1591    surface_region.height = info.height;
1592    FrameInfo outputFrameInfo = inputFrameInfo;
1593    outputFrameInfo.bufferFormat = metadata.format;
1594
1595    outputFrameInfo.contentWidth = info.width;
1596    outputFrameInfo.contentHeight = info.height;
1597    outputFrameInfo.bufferWidth = info.bufWidth;
1598    outputFrameInfo.bufferHeight = info.bufHeight;
1599    outputFrameInfo.lumaUStride = info.lumaStride;
1600    outputFrameInfo.chromaUStride = info.chromaUStride;
1601    outputFrameInfo.chromaVStride = info.chromaVStride;
1602
1603    if (outputFrameInfo.bufferFormat == 0 ||
1604        outputFrameInfo.bufferWidth < outputFrameInfo.contentWidth ||
1605        outputFrameInfo.bufferHeight < outputFrameInfo.contentHeight ||
1606        outputFrameInfo.contentWidth <= 0 || outputFrameInfo.contentHeight <= 0 ||
1607        outputFrameInfo.lumaUStride <= 0 ||
1608        outputFrameInfo.chromaUStride <= 0 || outputFrameInfo.chromaVStride <= 0) {
1609        ITRACE("Payload cleared or inconsistent info, not sending frame");
1610        ITRACE("outputFrameInfo.bufferFormat  = %d ", outputFrameInfo.bufferFormat);
1611        ITRACE("outputFrameInfo.bufferWidth   = %d ", outputFrameInfo.bufferWidth);
1612        ITRACE("outputFrameInfo.contentWidth  = %d ", outputFrameInfo.contentWidth);
1613        ITRACE("outputFrameInfo.bufferHeight  = %d ", outputFrameInfo.bufferHeight);
1614        ITRACE("outputFrameInfo.contentHeight = %d ", outputFrameInfo.contentHeight);
1615        ITRACE("outputFrameInfo.lumaUStride   = %d ", outputFrameInfo.lumaUStride);
1616        ITRACE("outputFrameInfo.chromaUStride = %d ", outputFrameInfo.chromaUStride);
1617        ITRACE("outputFrameInfo.chromaVStride = %d ", outputFrameInfo.chromaVStride);
1618        return false;
1619    }
1620
1621    if (mCurrentConfig.policy.scaledWidth == 0 || mCurrentConfig.policy.scaledHeight == 0)
1622        return true; // This isn't a failure, WiDi just doesn't want frames right now.
1623
1624    if (info.khandle == mExtLastKhandle && mediaTimestamp == mExtLastTimestamp) {
1625        // Same frame again. We don't send a frame, but we return true because
1626        // this isn't an error.
1627        if (metadata.transform != 0)
1628            mVspInUse = true; // Don't shut down VSP just to start it again really quick.
1629        return true;
1630    }
1631    mExtLastKhandle = info.khandle;
1632    mExtLastTimestamp = mediaTimestamp;
1633
1634    HWCBufferHandleType handleType = HWC_HANDLE_TYPE_KBUF;
1635
1636    buffer_handle_t handle = info.khandle;
1637
1638    // Ideally we'd check if there's an offset (info.offsetX > 0 || info.offsetY > 0),
1639    // so we use VSP only when cropping is needed. But using the khandle directly when
1640    // both rotation and scaling are involved can encode the frame with the wrong
1641    // tiling status, so use VSP to normalize if any rotation is involved.
1642    if (metadata.transform != 0) {
1643        // Cropping (or above workaround) needed, so use VSP to do it.
1644        mVspInUse = true;
1645        vspPrepare(info.width, info.height);
1646
1647        composeTask = new ComposeTask();
1648        composeTask->heldVideoBuffer = heldBuffer;
1649        heldBuffer = NULL;
1650        composeTask->outWidth = info.width;
1651        composeTask->outHeight = info.height;
1652        composeTask->outputHandle = mCscBuffers.get(composeTask->outWidth, composeTask->outHeight, &heldBuffer);
1653        if (composeTask->outputHandle == NULL) {
1654            ITRACE("Out of CSC buffers, dropping frame");
1655            return true;
1656        }
1657
1658        composeTask->surface_region = surface_region;
1659        composeTask->videoCachedBuffer = cachedBuffer;
1660        VARectangle& output_region = composeTask->output_region;
1661        output_region.x = 0;
1662        output_region.y = 0;
1663        output_region.width = info.width;
1664        output_region.height = info.height;
1665
1666        composeTask->videoKhandle = info.khandle;
1667        composeTask->videoStride = info.lumaStride;
1668        composeTask->videoBufHeight = info.bufHeight;
1669        composeTask->videoTiled = info.tiled;
1670
1671        BufferManager* mgr = mHwc.getBufferManager();
1672        DataBuffer* dataBuf = mgr->lockDataBuffer(composeTask->outputHandle);
1673        outputFrameInfo.contentWidth = composeTask->outWidth;
1674        outputFrameInfo.contentHeight = composeTask->outHeight;
1675        outputFrameInfo.bufferWidth = dataBuf->getWidth();
1676        outputFrameInfo.bufferHeight = dataBuf->getHeight();
1677        outputFrameInfo.lumaUStride = dataBuf->getWidth();
1678        outputFrameInfo.chromaUStride = dataBuf->getWidth();
1679        outputFrameInfo.chromaVStride = dataBuf->getWidth();
1680        mgr->unlockDataBuffer(dataBuf);
1681
1682        handle = composeTask->outputHandle;
1683        handleType = HWC_HANDLE_TYPE_GRALLOC;
1684
1685        mTasks.push_back(composeTask);
1686        mRequestQueued.signal();
1687    }
1688
1689    queueBufferInfo(outputFrameInfo);
1690
1691    if (mCurrentConfig.frameListener != NULL) {
1692        sp<OnFrameReadyTask> frameReadyTask = new OnFrameReadyTask();
1693        frameReadyTask->renderTask = composeTask;
1694        frameReadyTask->heldBuffer = heldBuffer;
1695        frameReadyTask->frameListener = mCurrentConfig.frameListener;
1696        frameReadyTask->handle = handle;
1697        frameReadyTask->handleType = handleType;
1698        frameReadyTask->renderTimestamp = mRenderTimestamp;
1699        frameReadyTask->mediaTimestamp = mediaTimestamp;
1700
1701        mTasks.push_back(frameReadyTask);
1702        mRequestQueued.signal();
1703    }
1704
1705    return true;
1706}
1707
1708void VirtualDevice::queueFrameTypeInfo(const FrameInfo& inputFrameInfo)
1709{
1710    if (mCurrentConfig.forceNotifyFrameType ||
1711        memcmp(&inputFrameInfo, &mLastInputFrameInfo, sizeof(inputFrameInfo)) != 0) {
1712        // something changed, notify type change listener
1713        mNextConfig.forceNotifyFrameType = false;
1714        mLastInputFrameInfo = inputFrameInfo;
1715
1716        sp<FrameTypeChangedTask> notifyTask = new FrameTypeChangedTask;
1717        notifyTask->typeChangeListener = mCurrentConfig.typeChangeListener;
1718        notifyTask->inputFrameInfo = inputFrameInfo;
1719        mTasks.push_back(notifyTask);
1720    }
1721}
1722
1723void VirtualDevice::queueBufferInfo(const FrameInfo& outputFrameInfo)
1724{
1725    if (mCurrentConfig.forceNotifyBufferInfo ||
1726        memcmp(&outputFrameInfo, &mLastOutputFrameInfo, sizeof(outputFrameInfo)) != 0) {
1727        mNextConfig.forceNotifyBufferInfo = false;
1728        mLastOutputFrameInfo = outputFrameInfo;
1729
1730        sp<BufferInfoChangedTask> notifyTask = new BufferInfoChangedTask;
1731        notifyTask->typeChangeListener = mCurrentConfig.typeChangeListener;
1732        notifyTask->outputFrameInfo = outputFrameInfo;
1733
1734        //if (handleType == HWC_HANDLE_TYPE_GRALLOC)
1735        //    mMappedBufferCache.clear(); // !
1736        mTasks.push_back(notifyTask);
1737    }
1738}
1739#endif
1740
1741void VirtualDevice::colorSwap(buffer_handle_t src, buffer_handle_t dest, uint32_t pixelCount)
1742{
1743    sp<CachedBuffer> srcCachedBuffer;
1744    sp<CachedBuffer> destCachedBuffer;
1745
1746    {
1747        srcCachedBuffer = getMappedBuffer(src);
1748        if (srcCachedBuffer == NULL || srcCachedBuffer->mapper == NULL)
1749            return;
1750        destCachedBuffer = getMappedBuffer(dest);
1751        if (destCachedBuffer == NULL || destCachedBuffer->mapper == NULL)
1752            return;
1753    }
1754
1755    uint8_t* srcPtr = static_cast<uint8_t*>(srcCachedBuffer->mapper->getCpuAddress(0));
1756    uint8_t* destPtr = static_cast<uint8_t*>(destCachedBuffer->mapper->getCpuAddress(0));
1757    if (srcPtr == NULL || destPtr == NULL)
1758        return;
1759    while (pixelCount > 0) {
1760        destPtr[0] = srcPtr[2];
1761        destPtr[1] = srcPtr[1];
1762        destPtr[2] = srcPtr[0];
1763        destPtr[3] = srcPtr[3];
1764        srcPtr += 4;
1765        destPtr += 4;
1766        pixelCount--;
1767    }
1768}
1769
1770void VirtualDevice::vspPrepare(uint32_t width, uint32_t height)
1771{
1772    if (mVspEnabled && width == mVspWidth && height == mVspHeight)
1773        return;
1774
1775    if (mVspEnabled)
1776    {
1777        ITRACE("Going to switch VSP from %ux%u to %ux%u", mVspWidth, mVspHeight, width, height);
1778        mMappedBufferCache.clear();
1779        mVaMapCache.clear();
1780        sp<DisableVspTask> disableVsp = new DisableVspTask();
1781        mTasks.push_back(disableVsp);
1782    }
1783    mVspWidth = width;
1784    mVspHeight = height;
1785
1786    sp<EnableVspTask> enableTask = new EnableVspTask();
1787    enableTask->width = width;
1788    enableTask->height = height;
1789    mTasks.push_back(enableTask);
1790    mRequestQueued.signal();
1791    // to map a buffer from this thread, we need this task to complete on the other thread
1792    while (enableTask->getStrongCount() > 1) {
1793        VTRACE("Waiting for WidiBlit thread to enable VSP...");
1794        mRequestDequeued.wait(mTaskLock);
1795    }
1796    mVspEnabled = true;
1797}
1798
1799void VirtualDevice::vspEnable(uint32_t width, uint32_t height)
1800{
1801    width = align_width(width);
1802    height = align_height(height);
1803    ITRACE("Start VSP at %ux%u", width, height);
1804    VAStatus va_status;
1805
1806    int display = 0;
1807    int major_ver, minor_ver;
1808    va_dpy = vaGetDisplay(&display);
1809    va_status = vaInitialize(va_dpy, &major_ver, &minor_ver);
1810    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaInitialize returns %08x", va_status);
1811
1812    VAConfigAttrib va_attr;
1813    va_attr.type = VAConfigAttribRTFormat;
1814    va_status = vaGetConfigAttributes(va_dpy,
1815                VAProfileNone,
1816                VAEntrypointVideoProc,
1817                &va_attr,
1818                1);
1819    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaGetConfigAttributes returns %08x", va_status);
1820
1821    va_status = vaCreateConfig(
1822                va_dpy,
1823                VAProfileNone,
1824                VAEntrypointVideoProc,
1825                &(va_attr),
1826                1,
1827                &va_config
1828                );
1829    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateConfig returns %08x", va_status);
1830
1831    VADisplayAttribute attr;
1832    attr.type = VADisplayAttribRenderMode;
1833    attr.value = VA_RENDER_MODE_LOCAL_OVERLAY;
1834    va_status = vaSetDisplayAttributes(va_dpy, &attr, 1);
1835    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaSetDisplayAttributes returns %08x", va_status);
1836
1837
1838    va_status = vaCreateSurfaces(
1839                va_dpy,
1840                VA_RT_FORMAT_YUV420,
1841                width,
1842                height,
1843                &va_blank_yuv_in,
1844                1,
1845                NULL,
1846                0);
1847    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateSurfaces (video in) returns %08x", va_status);
1848
1849    unsigned long buffer;
1850    VASurfaceAttribExternalBuffers buf;
1851    int stride = align_width(width);
1852    int bufHeight = align_height(height);
1853    buf.pixel_format = VA_FOURCC_RGBA;
1854    buf.width = width;
1855    buf.height = height;
1856    buf.data_size = stride * bufHeight * 4;
1857    buf.num_planes = 3;
1858    buf.pitches[0] = stride;
1859    buf.pitches[1] = stride;
1860    buf.pitches[2] = stride;
1861    buf.pitches[3] = 0;
1862    buf.offsets[0] = 0;
1863    buf.offsets[1] = stride * bufHeight;
1864    buf.offsets[2] = buf.offsets[1];
1865    buf.offsets[3] = 0;
1866    buf.buffers = &buffer;
1867    buf.num_buffers = 1;
1868    buf.flags = 0;
1869    buf.private_data = NULL;
1870
1871    VASurfaceAttrib attrib_list[2];
1872    attrib_list[0].type = (VASurfaceAttribType)VASurfaceAttribMemoryType;
1873    attrib_list[0].flags = VA_SURFACE_ATTRIB_SETTABLE;
1874    attrib_list[0].value.type = VAGenericValueTypeInteger;
1875    attrib_list[0].value.value.i = VA_SURFACE_ATTRIB_MEM_TYPE_VA;
1876    attrib_list[1].type = (VASurfaceAttribType)VASurfaceAttribExternalBufferDescriptor;
1877    attrib_list[1].flags = VA_SURFACE_ATTRIB_SETTABLE;
1878    attrib_list[1].value.type = VAGenericValueTypePointer;
1879    attrib_list[1].value.value.p = (void *)&buf;
1880
1881    va_status = vaCreateSurfaces(
1882                va_dpy,
1883                VA_RT_FORMAT_RGB32,
1884                stride,
1885                bufHeight,
1886                &va_blank_rgb_in,
1887                1,
1888                attrib_list,
1889                2);
1890    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateSurfaces (blank rgba in) returns %08x", va_status);
1891
1892    va_status = vaCreateContext(
1893                va_dpy,
1894                va_config,
1895                stride,
1896                bufHeight,
1897                0,
1898                &va_blank_yuv_in /* not used by VSP, but libva checks for it */,
1899                1,
1900                &va_context);
1901    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateContext returns %08x", va_status);
1902
1903    VASurfaceID tmp_yuv;
1904    va_status = vaCreateSurfaces(
1905                va_dpy,
1906                VA_RT_FORMAT_YUV420,
1907                stride,
1908                bufHeight,
1909                &tmp_yuv,
1910                1,
1911                NULL,
1912                0);
1913    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateSurfaces (temp yuv) returns %08x", va_status);
1914    {
1915        MappedSurface mappedVideoIn(va_dpy, tmp_yuv);
1916        if (mappedVideoIn.valid()) {
1917            // Value doesn't matter, as RGBA will be opaque,
1918            // but I don't want random data in here.
1919            memset(mappedVideoIn.getPtr(), 0x0, width*height*3/2);
1920        }
1921        else
1922            ETRACE("Unable to map tmp black surface");
1923    }
1924
1925    {
1926        MappedSurface mappedBlankIn(va_dpy, va_blank_rgb_in);
1927        if (mappedBlankIn.valid()) {
1928            // Fill RGBA with opaque black temporarily, in order to generate an
1929            // encrypted black buffer in va_blank_yuv_in to use in place of the
1930            // real frame data during the short interval where we're waiting for
1931            // downscaling to kick in.
1932            uint32_t* pixels = reinterpret_cast<uint32_t*>(mappedBlankIn.getPtr());
1933            for (size_t i = 0; i < stride*height; i++)
1934                pixels[i] = 0xff000000;
1935        }
1936        else
1937            ETRACE("Unable to map blank rgba in");
1938    }
1939
1940    // Compose opaque black with temp yuv to produce encrypted black yuv.
1941    VARectangle region;
1942    region.x = 0;
1943    region.y = 0;
1944    region.width = width;
1945    region.height = height;
1946    vspCompose(tmp_yuv, va_blank_rgb_in, va_blank_yuv_in, &region, &region);
1947
1948    va_status = vaDestroySurfaces(va_dpy, &tmp_yuv, 1);
1949    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroySurfaces (temp yuv) returns %08x", va_status);
1950
1951    {
1952        // Fill RGBA with transparent black now, to be used when there is no
1953        // UI to compose on top of the video.
1954        MappedSurface mappedBlankIn(va_dpy, va_blank_rgb_in);
1955        if (mappedBlankIn.valid())
1956            memset(mappedBlankIn.getPtr(), 0, stride*height*4);
1957        else
1958            ETRACE("Unable to map blank rgba in");
1959    }
1960}
1961
1962void VirtualDevice::vspDisable()
1963{
1964    ITRACE("Shut down VSP");
1965
1966    if (va_context == 0 && va_blank_yuv_in == 0) {
1967        ITRACE("Already shut down");
1968        return;
1969    }
1970
1971    VABufferID pipeline_param_id;
1972    VAStatus va_status;
1973    va_status = vaCreateBuffer(va_dpy,
1974                va_context,
1975                VAProcPipelineParameterBufferType,
1976                sizeof(VAProcPipelineParameterBuffer),
1977                1,
1978                NULL,
1979                &pipeline_param_id);
1980    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateBuffer returns %08x", va_status);
1981
1982    VABlendState blend_state;
1983    VAProcPipelineParameterBuffer *pipeline_param;
1984    va_status = vaMapBuffer(va_dpy,
1985                pipeline_param_id,
1986                (void **)&pipeline_param);
1987    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaMapBuffer returns %08x", va_status);
1988
1989    memset(pipeline_param, 0, sizeof(VAProcPipelineParameterBuffer));
1990    pipeline_param->pipeline_flags = VA_PIPELINE_FLAG_END;
1991    pipeline_param->num_filters = 0;
1992    pipeline_param->blend_state = &blend_state;
1993
1994    va_status = vaUnmapBuffer(va_dpy, pipeline_param_id);
1995    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaUnmapBuffer returns %08x", va_status);
1996
1997    va_status = vaBeginPicture(va_dpy, va_context, va_blank_yuv_in /* just need some valid surface */);
1998    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaBeginPicture returns %08x", va_status);
1999
2000    va_status = vaRenderPicture(va_dpy, va_context, &pipeline_param_id, 1);
2001    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaRenderPicture returns %08x", va_status);
2002
2003    va_status = vaEndPicture(va_dpy, va_context);
2004    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaEndPicture returns %08x", va_status);
2005
2006    va_status = vaDestroyContext(va_dpy, va_context);
2007    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroyContext returns %08x", va_status);
2008    va_context = 0;
2009
2010    va_status = vaDestroySurfaces(va_dpy, &va_blank_yuv_in, 1);
2011    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroySurfaces (video in) returns %08x", va_status);
2012    va_blank_yuv_in = 0;
2013
2014    va_status = vaDestroySurfaces(va_dpy, &va_blank_rgb_in, 1);
2015    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaDestroySurfaces (blank rgba in) returns %08x", va_status);
2016
2017    if (va_config) {
2018        vaDestroyConfig(va_dpy, va_config);
2019        va_config = 0;
2020    }
2021    if (va_dpy) {
2022        vaTerminate(va_dpy);
2023        va_dpy = NULL;
2024    }
2025}
2026
2027void VirtualDevice::vspCompose(VASurfaceID videoIn, VASurfaceID rgbIn, VASurfaceID videoOut,
2028                               const VARectangle* surface_region, const VARectangle* output_region)
2029{
2030    VAStatus va_status;
2031
2032    VABufferID pipeline_param_id;
2033    va_status = vaCreateBuffer(va_dpy,
2034                va_context,
2035                VAProcPipelineParameterBufferType,
2036                sizeof(VAProcPipelineParameterBuffer),
2037                1,
2038                NULL,
2039                &pipeline_param_id);
2040    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaCreateBuffer returns %08x", va_status);
2041
2042    VABlendState blend_state;
2043
2044    VAProcPipelineParameterBuffer *pipeline_param;
2045    va_status = vaMapBuffer(va_dpy,
2046                pipeline_param_id,
2047                (void **)&pipeline_param);
2048    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaMapBuffer returns %08x", va_status);
2049
2050    memset(pipeline_param, 0, sizeof(VAProcPipelineParameterBuffer));
2051    pipeline_param->surface = videoIn;
2052    pipeline_param->surface_region = surface_region;
2053    pipeline_param->output_region = output_region;
2054
2055    pipeline_param->pipeline_flags = 0;
2056    pipeline_param->num_filters = 0;
2057    pipeline_param->blend_state = &blend_state;
2058    pipeline_param->num_additional_outputs = 1;
2059    pipeline_param->additional_outputs = &rgbIn;
2060
2061    va_status = vaUnmapBuffer(va_dpy, pipeline_param_id);
2062    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaUnmapBuffer returns %08x", va_status);
2063
2064    va_status = vaBeginPicture(va_dpy, va_context, videoOut);
2065    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaBeginPicture returns %08x", va_status);
2066
2067    va_status = vaRenderPicture(va_dpy, va_context, &pipeline_param_id, 1);
2068    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaRenderPicture returns %08x", va_status);
2069
2070    va_status = vaEndPicture(va_dpy, va_context);
2071    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaEndPicture returns %08x", va_status);
2072
2073    va_status = vaSyncSurface(va_dpy, videoOut);
2074    if (va_status != VA_STATUS_SUCCESS) ETRACE("vaSyncSurface returns %08x", va_status);
2075}
2076
2077static uint32_t min(uint32_t a, uint32_t b)
2078{
2079    return (a < b) ? a : b;
2080}
2081
2082bool VirtualDevice::getFrameOfSize(uint32_t width, uint32_t height, const IVideoPayloadManager::MetaData& metadata, IVideoPayloadManager::Buffer& info)
2083{
2084    if (metadata.transform == 0 || metadata.transform == HAL_TRANSFORM_ROT_180)
2085        setMaxDecodeResolution(min(width, metadata.normalBuffer.width), min(height, metadata.normalBuffer.height));
2086    else
2087        setMaxDecodeResolution(min(height, metadata.normalBuffer.width), min(width, metadata.normalBuffer.height));
2088
2089    if (metadata.transform == 0) {
2090        if (metadata.normalBuffer.khandle != 0 && metadata.normalBuffer.width <= width && metadata.normalBuffer.height <= height) {
2091            info = metadata.normalBuffer;
2092            return true;
2093        }
2094
2095        if (metadata.scalingBuffer.khandle != 0 && metadata.scalingBuffer.width <= width && metadata.scalingBuffer.height <= height) {
2096            info = metadata.scalingBuffer;
2097            return true;
2098        }
2099    } else {
2100        if (metadata.rotationBuffer.khandle != 0 && metadata.rotationBuffer.width <= width && metadata.rotationBuffer.height <= height) {
2101            info = metadata.rotationBuffer;
2102            return true;
2103        }
2104    }
2105
2106    return false;
2107}
2108
2109void VirtualDevice::setMaxDecodeResolution(uint32_t width, uint32_t height)
2110{
2111    if (mDecWidth == width && mDecHeight == height)
2112        return;
2113
2114    int sessionID = mHwc.getDisplayAnalyzer()->getFirstVideoInstanceSessionID();
2115    if (sessionID < 0) {
2116        ETRACE("Session id is less than 0");
2117        return;
2118    }
2119
2120    MultiDisplayObserver* mds = mHwc.getMultiDisplayObserver();
2121    status_t ret = mds->setDecoderOutputResolution(sessionID, width, height, 0, 0, width, height);
2122    if (ret != NO_ERROR) {
2123        ETRACE("Failed to set scaling to %ux%u: %x", width, height, ret);
2124        return;
2125    }
2126
2127    mDecWidth = width;
2128    mDecHeight = height;
2129    ITRACE("Set scaling to %ux%u",mDecWidth, mDecHeight);
2130}
2131
2132bool VirtualDevice::vsyncControl(bool enabled)
2133{
2134    RETURN_FALSE_IF_NOT_INIT();
2135    return mVsyncObserver->control(enabled);
2136}
2137
2138bool VirtualDevice::blank(bool blank)
2139{
2140    RETURN_FALSE_IF_NOT_INIT();
2141    return true;
2142}
2143
2144bool VirtualDevice::getDisplaySize(int *width, int *height)
2145{
2146    RETURN_FALSE_IF_NOT_INIT();
2147    if (!width || !height) {
2148        ETRACE("invalid parameters");
2149        return false;
2150    }
2151
2152    // TODO: make this platform specifc
2153    *width = 1280;
2154    *height = 720;
2155    return true;
2156}
2157
2158bool VirtualDevice::getDisplayConfigs(uint32_t *configs,
2159                                         size_t *numConfigs)
2160{
2161    RETURN_FALSE_IF_NOT_INIT();
2162    if (!configs || !numConfigs) {
2163        ETRACE("invalid parameters");
2164        return false;
2165    }
2166
2167    *configs = 0;
2168    *numConfigs = 1;
2169
2170    return true;
2171}
2172
2173bool VirtualDevice::getDisplayAttributes(uint32_t configs,
2174                                            const uint32_t *attributes,
2175                                            int32_t *values)
2176{
2177    RETURN_FALSE_IF_NOT_INIT();
2178
2179    if (!attributes || !values) {
2180        ETRACE("invalid parameters");
2181        return false;
2182    }
2183
2184    int i = 0;
2185    while (attributes[i] != HWC_DISPLAY_NO_ATTRIBUTE) {
2186        switch (attributes[i]) {
2187        case HWC_DISPLAY_VSYNC_PERIOD:
2188            values[i] = 1e9 / 60;
2189            break;
2190        case HWC_DISPLAY_WIDTH:
2191            values[i] = 1280;
2192            break;
2193        case HWC_DISPLAY_HEIGHT:
2194            values[i] = 720;
2195            break;
2196        case HWC_DISPLAY_DPI_X:
2197            values[i] = 0;
2198            break;
2199        case HWC_DISPLAY_DPI_Y:
2200            values[i] = 0;
2201            break;
2202        default:
2203            ETRACE("unknown attribute %d", attributes[i]);
2204            break;
2205        }
2206        i++;
2207    }
2208
2209    return true;
2210}
2211
2212bool VirtualDevice::compositionComplete()
2213{
2214    RETURN_FALSE_IF_NOT_INIT();
2215    return true;
2216}
2217
2218bool VirtualDevice::initialize()
2219{
2220    mRgbLayer = -1;
2221    mYuvLayer = -1;
2222    char prop[PROPERTY_VALUE_MAX];
2223    char *retptr;
2224
2225    if (property_get("hwc.fps_divider", prop, "1") > 0) {
2226        uint32_t divider = strtoul(prop, &retptr, 10);
2227        if (*retptr == '\0' && divider > 1 && divider < 60) {
2228            mFpsDivider = divider;
2229            ALOGI("Virtual display, setting HWC FPS divider to %d", mFpsDivider);
2230        }
2231    }
2232
2233#ifdef INTEL_WIDI
2234    // Add initialization codes here. If init fails, invoke DEINIT_AND_RETURN_FALSE();
2235    mNextConfig.typeChangeListener = NULL;
2236    mNextConfig.policy.scaledWidth = 0;
2237    mNextConfig.policy.scaledHeight = 0;
2238    mNextConfig.policy.xdpi = 96;
2239    mNextConfig.policy.ydpi = 96;
2240    mNextConfig.policy.refresh = 60;
2241    mNextConfig.extendedModeEnabled = false;
2242    mNextConfig.forceNotifyFrameType = false;
2243    mNextConfig.forceNotifyBufferInfo = false;
2244    mCurrentConfig = mNextConfig;
2245
2246    memset(&mLastInputFrameInfo, 0, sizeof(mLastInputFrameInfo));
2247    memset(&mLastOutputFrameInfo, 0, sizeof(mLastOutputFrameInfo));
2248#endif
2249    mPayloadManager = mHwc.getPlatFactory()->createVideoPayloadManager();
2250
2251    if (!mPayloadManager) {
2252        DEINIT_AND_RETURN_FALSE("Failed to create payload manager");
2253    }
2254
2255    mVsyncObserver = new SoftVsyncObserver(*this);
2256    if (!mVsyncObserver || !mVsyncObserver->initialize()) {
2257        DEINIT_AND_RETURN_FALSE("Failed to create Soft Vsync Observer");
2258    }
2259
2260    mSyncTimelineFd = sw_sync_timeline_create();
2261    mNextSyncPoint = 1;
2262    mExpectAcquireFences = false;
2263
2264    mThread = new WidiBlitThread(this);
2265    mThread->run("WidiBlit", PRIORITY_URGENT_DISPLAY);
2266
2267#ifdef INTEL_WIDI
2268    // Publish frame server service with service manager
2269    status_t ret = defaultServiceManager()->addService(String16("hwc.widi"), this);
2270    if (ret == NO_ERROR) {
2271        ProcessState::self()->startThreadPool();
2272        mInitialized = true;
2273    } else {
2274        ETRACE("Could not register hwc.widi with service manager, error = %d", ret);
2275        deinitialize();
2276    }
2277#else
2278    mInitialized = true;
2279#endif
2280    mVspEnabled = false;
2281    mVspInUse = false;
2282    mVspWidth = 0;
2283    mVspHeight = 0;
2284    va_dpy = NULL;
2285    va_config = 0;
2286    va_context = 0;
2287    va_blank_yuv_in = 0;
2288    va_blank_rgb_in = 0;
2289    mVspUpscale = false;
2290    mDebugVspClear = false;
2291    mDebugVspDump = false;
2292    mDebugCounter = 0;
2293
2294    ITRACE("Init done.");
2295
2296    return mInitialized;
2297}
2298
2299bool VirtualDevice::isConnected() const
2300{
2301    return true;
2302}
2303
2304const char* VirtualDevice::getName() const
2305{
2306    return "Virtual";
2307}
2308
2309int VirtualDevice::getType() const
2310{
2311    return DEVICE_VIRTUAL;
2312}
2313
2314void VirtualDevice::onVsync(int64_t timestamp)
2315{
2316    mHwc.vsync(DEVICE_VIRTUAL, timestamp);
2317}
2318
2319void VirtualDevice::dump(Dump& d)
2320{
2321}
2322
2323uint32_t VirtualDevice::getFpsDivider()
2324{
2325    return mFpsDivider;
2326}
2327
2328void VirtualDevice::deinitialize()
2329{
2330    VAStatus va_status;
2331
2332    if (mPayloadManager) {
2333        delete mPayloadManager;
2334        mPayloadManager = NULL;
2335    }
2336    DEINIT_AND_DELETE_OBJ(mVsyncObserver);
2337    mInitialized = false;
2338}
2339
2340bool VirtualDevice::setPowerMode(int /*mode*/)
2341{
2342    return true;
2343}
2344
2345int VirtualDevice::getActiveConfig()
2346{
2347    return 0;
2348}
2349
2350bool VirtualDevice::setActiveConfig(int /*index*/)
2351{
2352    return false;
2353}
2354
2355} // namespace intel
2356} // namespace android
2357