alloc_controller.cpp revision 89a6e5a7179ee29ca2e94ea53ee3f41badb2f2e2
1/*
2 * Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
3
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions are
6 * met:
7 *   * Redistributions of source code must retain the above copyright
8 *     notice, this list of conditions and the following disclaimer.
9 *   * Redistributions in binary form must reproduce the above
10 *     copyright notice, this list of conditions and the following
11 *     disclaimer in the documentation and/or other materials provided
12 *     with the distribution.
13 *   * Neither the name of The Linux Foundation nor the names of its
14 *     contributors may be used to endorse or promote products derived
15 *     from this software without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20 * ARE DISCLAIMED.  IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 */
29
30#include <cutils/log.h>
31#include <fcntl.h>
32#include <dlfcn.h>
33#include "gralloc_priv.h"
34#include "alloc_controller.h"
35#include "memalloc.h"
36#include "ionalloc.h"
37#include "gr.h"
38#include "comptype.h"
39
40#ifdef VENUS_COLOR_FORMAT
41#include <media/msm_media_info.h>
42#else
43#define VENUS_Y_STRIDE(args...) 0
44#define VENUS_Y_SCANLINES(args...) 0
45#define VENUS_BUFFER_SIZE(args...) 0
46#endif
47
48using namespace gralloc;
49using namespace qdutils;
50
51ANDROID_SINGLETON_STATIC_INSTANCE(AdrenoMemInfo);
52
53//Common functions
54static bool canFallback(int usage, bool triedSystem)
55{
56    // Fallback to system heap when alloc fails unless
57    // 1. Composition type is MDP
58    // 2. Alloc from system heap was already tried
59    // 3. The heap type is requsted explicitly
60    // 4. The heap type is protected
61    // 5. The buffer is meant for external display only
62
63    if(QCCompositionType::getInstance().getCompositionType() &
64       COMPOSITION_TYPE_MDP)
65        return false;
66    if(triedSystem)
67        return false;
68    if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PROTECTED))
69        return false;
70    if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PRIVATE_EXTERNAL_ONLY))
71        return false;
72    //Return true by default
73    return true;
74}
75
76static bool useUncached(int usage)
77{
78    if (usage & GRALLOC_USAGE_PRIVATE_UNCACHED)
79        return true;
80    if(((usage & GRALLOC_USAGE_SW_WRITE_MASK) == GRALLOC_USAGE_SW_WRITE_RARELY)
81       ||((usage & GRALLOC_USAGE_SW_READ_MASK) == GRALLOC_USAGE_SW_READ_RARELY))
82        return true;
83    return false;
84}
85
86//-------------- AdrenoMemInfo-----------------------//
87AdrenoMemInfo::AdrenoMemInfo()
88{
89    libadreno_utils = ::dlopen("libadreno_utils.so", RTLD_NOW);
90    if (libadreno_utils) {
91        *(void **)&LINK_adreno_compute_padding = ::dlsym(libadreno_utils,
92                                           "compute_surface_padding");
93    }
94}
95
96AdrenoMemInfo::~AdrenoMemInfo()
97{
98    if (libadreno_utils) {
99        ::dlclose(libadreno_utils);
100    }
101}
102
103int AdrenoMemInfo::getStride(int width, int format)
104{
105    int stride = ALIGN(width, 32);
106    // Currently surface padding is only computed for RGB* surfaces.
107    if (format < 0x7) {
108        // Don't add any additional padding if debug.gralloc.map_fb_memory
109        // is enabled
110        char property[PROPERTY_VALUE_MAX];
111        if((property_get("debug.gralloc.map_fb_memory", property, NULL) > 0) &&
112           (!strncmp(property, "1", PROPERTY_VALUE_MAX ) ||
113           (!strncasecmp(property,"true", PROPERTY_VALUE_MAX )))) {
114              return stride;
115        }
116
117        int bpp = 4;
118        switch(format)
119        {
120            case HAL_PIXEL_FORMAT_RGB_888:
121                bpp = 3;
122                break;
123            case HAL_PIXEL_FORMAT_RGB_565:
124            case HAL_PIXEL_FORMAT_RGBA_5551:
125            case HAL_PIXEL_FORMAT_RGBA_4444:
126                bpp = 2;
127                break;
128            default: break;
129        }
130        if ((libadreno_utils) && (LINK_adreno_compute_padding)) {
131            int surface_tile_height = 1;   // Linear surface
132            int raster_mode         = 0;   // Adreno unknown raster mode.
133            int padding_threshold   = 512; // Threshold for padding surfaces.
134            // the function below expects the width to be a multiple of
135            // 32 pixels, hence we pass stride instead of width.
136            stride = LINK_adreno_compute_padding(stride, bpp,
137                                      surface_tile_height, raster_mode,
138                                      padding_threshold);
139        }
140    } else {
141        switch (format)
142        {
143            case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:
144            case HAL_PIXEL_FORMAT_RAW_SENSOR:
145                stride = ALIGN(width, 32);
146                break;
147            case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:
148                stride = ALIGN(width, 128);
149                break;
150            case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
151            case HAL_PIXEL_FORMAT_YCbCr_420_SP:
152            case HAL_PIXEL_FORMAT_YCrCb_420_SP:
153            case HAL_PIXEL_FORMAT_YV12:
154            case HAL_PIXEL_FORMAT_YCbCr_422_SP:
155            case HAL_PIXEL_FORMAT_YCrCb_422_SP:
156                stride = ALIGN(width, 16);
157                break;
158            case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
159                stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, width);
160                break;
161            case HAL_PIXEL_FORMAT_BLOB:
162                stride = width;
163                break;
164            default: break;
165        }
166    }
167    return stride;
168}
169
170//-------------- IAllocController-----------------------//
171IAllocController* IAllocController::sController = NULL;
172IAllocController* IAllocController::getInstance(void)
173{
174    if(sController == NULL) {
175        sController = new IonController();
176    }
177    return sController;
178}
179
180
181//-------------- IonController-----------------------//
182IonController::IonController()
183{
184    mIonAlloc = new IonAlloc();
185    mUseTZProtection = false;
186    char property[PROPERTY_VALUE_MAX];
187    if ((property_get("persist.gralloc.cp.level3", property, NULL) <= 0) ||
188                            (atoi(property) != 1)) {
189        mUseTZProtection = true;
190    }
191}
192
193int IonController::allocate(alloc_data& data, int usage)
194{
195    int ionFlags = 0;
196    int ret;
197
198    data.uncached = useUncached(usage);
199    data.allocType = 0;
200
201    if(usage & GRALLOC_USAGE_PRIVATE_UI_CONTIG_HEAP)
202        ionFlags |= ION_HEAP(ION_SF_HEAP_ID);
203
204    if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP)
205        ionFlags |= ION_HEAP(ION_SYSTEM_HEAP_ID);
206
207    if(usage & GRALLOC_USAGE_PRIVATE_IOMMU_HEAP)
208        ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
209
210    if(usage & GRALLOC_USAGE_PROTECTED) {
211        if ((mUseTZProtection) && (usage & GRALLOC_USAGE_PRIVATE_MM_HEAP)) {
212            ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
213            ionFlags |= ION_SECURE;
214        } else {
215            // for targets/OEMs which do not need HW level protection
216            // do not set ion secure flag & MM heap. Fallback to IOMMU heap.
217            ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
218        }
219    } else if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
220        //MM Heap is exclusively a secure heap.
221        //If it is used for non secure cases, fallback to IOMMU heap
222        ALOGW("GRALLOC_USAGE_PRIVATE_MM_HEAP \
223                                cannot be used as an insecure heap!\
224                                trying to use IOMMU instead !!");
225        ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
226    }
227
228    if(usage & GRALLOC_USAGE_PRIVATE_CAMERA_HEAP)
229        ionFlags |= ION_HEAP(ION_CAMERA_HEAP_ID);
230
231    if(usage & GRALLOC_USAGE_PRIVATE_ADSP_HEAP)
232        ionFlags |= ION_HEAP(ION_ADSP_HEAP_ID);
233
234    if(ionFlags & ION_SECURE)
235         data.allocType |= private_handle_t::PRIV_FLAGS_SECURE_BUFFER;
236
237    // if no flags are set, default to
238    // SF + IOMMU heaps, so that bypass can work
239    // we can fall back to system heap if
240    // we run out.
241    if(!ionFlags)
242        ionFlags = ION_HEAP(ION_SF_HEAP_ID) | ION_HEAP(ION_IOMMU_HEAP_ID);
243
244    data.flags = ionFlags;
245    ret = mIonAlloc->alloc_buffer(data);
246
247    // Fallback
248    if(ret < 0 && canFallback(usage,
249                              (ionFlags & ION_SYSTEM_HEAP_ID)))
250    {
251        ALOGW("Falling back to system heap");
252        data.flags = ION_HEAP(ION_SYSTEM_HEAP_ID);
253        ret = mIonAlloc->alloc_buffer(data);
254    }
255
256    if(ret >= 0 ) {
257        data.allocType |= private_handle_t::PRIV_FLAGS_USES_ION;
258    }
259
260    return ret;
261}
262
263IMemAlloc* IonController::getAllocator(int flags)
264{
265    IMemAlloc* memalloc = NULL;
266    if (flags & private_handle_t::PRIV_FLAGS_USES_ION) {
267        memalloc = mIonAlloc;
268    } else {
269        ALOGE("%s: Invalid flags passed: 0x%x", __FUNCTION__, flags);
270    }
271
272    return memalloc;
273}
274
275size_t getBufferSizeAndDimensions(int width, int height, int format,
276                                  int& alignedw, int &alignedh)
277{
278    size_t size;
279
280    alignedw = AdrenoMemInfo::getInstance().getStride(width, format);
281    alignedh = ALIGN(height, 32);
282    switch (format) {
283        case HAL_PIXEL_FORMAT_RGBA_8888:
284        case HAL_PIXEL_FORMAT_RGBX_8888:
285        case HAL_PIXEL_FORMAT_BGRA_8888:
286            size = alignedw * alignedh * 4;
287            break;
288        case HAL_PIXEL_FORMAT_RGB_888:
289            size = alignedw * alignedh * 3;
290            break;
291        case HAL_PIXEL_FORMAT_RGB_565:
292        case HAL_PIXEL_FORMAT_RGBA_5551:
293        case HAL_PIXEL_FORMAT_RGBA_4444:
294        case HAL_PIXEL_FORMAT_RAW_SENSOR:
295            size = alignedw * alignedh * 2;
296            break;
297
298            // adreno formats
299        case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO:  // NV21
300            size  = ALIGN(alignedw*alignedh, 4096);
301            size += ALIGN(2 * ALIGN(width/2, 32) * ALIGN(height/2, 32), 4096);
302            break;
303        case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED:   // NV12
304            // The chroma plane is subsampled,
305            // but the pitch in bytes is unchanged
306            // The GPU needs 4K alignment, but the video decoder needs 8K
307            size  = ALIGN( alignedw * alignedh, 8192);
308            size += ALIGN( alignedw * ALIGN(height/2, 32), 8192);
309            break;
310        case HAL_PIXEL_FORMAT_NV12_ENCODEABLE:
311        case HAL_PIXEL_FORMAT_YV12:
312            if ((format == HAL_PIXEL_FORMAT_YV12) && ((width&1) || (height&1))) {
313                ALOGE("w or h is odd for the YV12 format");
314                return -EINVAL;
315            }
316            alignedh = height;
317            if (HAL_PIXEL_FORMAT_NV12_ENCODEABLE == format) {
318                // The encoder requires a 2K aligned chroma offset.
319                size = ALIGN(alignedw*alignedh, 2048) +
320                    (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
321            } else {
322                size = alignedw*alignedh +
323                    (ALIGN(alignedw/2, 16) * (alignedh/2))*2;
324            }
325            size = ALIGN(size, 4096);
326            break;
327        case HAL_PIXEL_FORMAT_YCbCr_420_SP:
328        case HAL_PIXEL_FORMAT_YCrCb_420_SP:
329            alignedh = height;
330            size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2, 4096);
331            break;
332        case HAL_PIXEL_FORMAT_YCbCr_422_SP:
333        case HAL_PIXEL_FORMAT_YCrCb_422_SP:
334            if(width & 1) {
335                ALOGE("width is odd for the YUV422_SP format");
336                return -EINVAL;
337            }
338            alignedh = height;
339            size = ALIGN(alignedw * alignedh * 2, 4096);
340            break;
341        case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS:
342            alignedh = VENUS_Y_SCANLINES(COLOR_FMT_NV12, height);
343            size = VENUS_BUFFER_SIZE(COLOR_FMT_NV12, width, height);
344            break;
345        case HAL_PIXEL_FORMAT_BLOB:
346            if(height != 1) {
347                ALOGE("%s: Buffers with format HAL_PIXEL_FORMAT_BLOB \
348                      must have height==1 ", __FUNCTION__);
349                return -EINVAL;
350            }
351            alignedh = height;
352            alignedw = width;
353            size = width;
354            break;
355        default:
356            ALOGE("unrecognized pixel format: 0x%x", format);
357            return -EINVAL;
358    }
359
360    return size;
361}
362
363// Allocate buffer from width, height and format into a
364// private_handle_t. It is the responsibility of the caller
365// to free the buffer using the free_buffer function
366int alloc_buffer(private_handle_t **pHnd, int w, int h, int format, int usage)
367{
368    alloc_data data;
369    int alignedw, alignedh;
370    gralloc::IAllocController* sAlloc =
371        gralloc::IAllocController::getInstance();
372    data.base = 0;
373    data.fd = -1;
374    data.offset = 0;
375    data.size = getBufferSizeAndDimensions(w, h, format, alignedw, alignedh);
376    data.align = getpagesize();
377    data.uncached = useUncached(usage);
378    int allocFlags = usage;
379
380    int err = sAlloc->allocate(data, allocFlags);
381    if (0 != err) {
382        ALOGE("%s: allocate failed", __FUNCTION__);
383        return -ENOMEM;
384    }
385
386    private_handle_t* hnd = new private_handle_t(data.fd, data.size,
387                                                 data.allocType, 0, format,
388                                                 alignedw, alignedh);
389    hnd->base = (int) data.base;
390    hnd->offset = data.offset;
391    hnd->gpuaddr = 0;
392    *pHnd = hnd;
393    return 0;
394}
395
396void free_buffer(private_handle_t *hnd)
397{
398    gralloc::IAllocController* sAlloc =
399        gralloc::IAllocController::getInstance();
400    if (hnd && hnd->fd > 0) {
401        IMemAlloc* memalloc = sAlloc->getAllocator(hnd->flags);
402        memalloc->free_buffer((void*)hnd->base, hnd->size, hnd->offset, hnd->fd);
403    }
404    if(hnd)
405        delete hnd;
406
407}
408