alloc_controller.cpp revision 32c2c1b1490e949a15dca9351f213d91be2b79d5
1/* 2 * Copyright (c) 2011-2012, The Linux Foundation. All rights reserved. 3 4 * Redistribution and use in source and binary forms, with or without 5 * modification, are permitted provided that the following conditions are 6 * met: 7 * * Redistributions of source code must retain the above copyright 8 * notice, this list of conditions and the following disclaimer. 9 * * Redistributions in binary form must reproduce the above 10 * copyright notice, this list of conditions and the following 11 * disclaimer in the documentation and/or other materials provided 12 * with the distribution. 13 * * Neither the name of The Linux Foundation nor the names of its 14 * contributors may be used to endorse or promote products derived 15 * from this software without specific prior written permission. 16 * 17 * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 18 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 20 * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 21 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 22 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 23 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 24 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 25 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 26 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 27 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 */ 29 30#include <cutils/log.h> 31#include <fcntl.h> 32#include <dlfcn.h> 33#include "gralloc_priv.h" 34#include "alloc_controller.h" 35#include "memalloc.h" 36#include "ionalloc.h" 37#include "gr.h" 38#include "comptype.h" 39 40#ifdef VENUS_COLOR_FORMAT 41#include <media/msm_media_info.h> 42#else 43#define VENUS_Y_STRIDE(args...) 0 44#define VENUS_Y_SCANLINES(args...) 0 45#define VENUS_BUFFER_SIZE(args...) 0 46#endif 47 48using namespace gralloc; 49using namespace qdutils; 50 51ANDROID_SINGLETON_STATIC_INSTANCE(AdrenoMemInfo); 52 53//Common functions 54static bool canFallback(int usage, bool triedSystem) 55{ 56 // Fallback to system heap when alloc fails unless 57 // 1. Composition type is MDP 58 // 2. Alloc from system heap was already tried 59 // 3. The heap type is requsted explicitly 60 // 4. The heap type is protected 61 // 5. The buffer is meant for external display only 62 63 if(QCCompositionType::getInstance().getCompositionType() & 64 COMPOSITION_TYPE_MDP) 65 return false; 66 if(triedSystem) 67 return false; 68 if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PROTECTED)) 69 return false; 70 if(usage & (GRALLOC_HEAP_MASK | GRALLOC_USAGE_PRIVATE_EXTERNAL_ONLY)) 71 return false; 72 //Return true by default 73 return true; 74} 75 76static bool useUncached(int usage) 77{ 78 // System heaps cannot be uncached 79 if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP) 80 return false; 81 if (usage & GRALLOC_USAGE_PRIVATE_UNCACHED) 82 return true; 83 return false; 84} 85 86//-------------- AdrenoMemInfo-----------------------// 87AdrenoMemInfo::AdrenoMemInfo() 88{ 89 libadreno_utils = ::dlopen("libadreno_utils.so", RTLD_NOW); 90 if (libadreno_utils) { 91 *(void **)&LINK_adreno_compute_padding = ::dlsym(libadreno_utils, 92 "compute_surface_padding"); 93 } 94} 95 96AdrenoMemInfo::~AdrenoMemInfo() 97{ 98 if (libadreno_utils) { 99 ::dlclose(libadreno_utils); 100 } 101} 102 103int AdrenoMemInfo::getStride(int width, int format) 104{ 105 int stride = ALIGN(width, 32); 106 // Currently surface padding is only computed for RGB* surfaces. 107 if (format <= HAL_PIXEL_FORMAT_sRGB_888) { 108 int bpp = 4; 109 switch(format) 110 { 111 case HAL_PIXEL_FORMAT_RGB_888: 112 case HAL_PIXEL_FORMAT_sRGB_888: 113 bpp = 3; 114 break; 115 case HAL_PIXEL_FORMAT_RGB_565: 116 bpp = 2; 117 break; 118 default: break; 119 } 120 if ((libadreno_utils) && (LINK_adreno_compute_padding)) { 121 int surface_tile_height = 1; // Linear surface 122 int raster_mode = 0; // Adreno unknown raster mode. 123 int padding_threshold = 512; // Threshold for padding surfaces. 124 // the function below expects the width to be a multiple of 125 // 32 pixels, hence we pass stride instead of width. 126 stride = LINK_adreno_compute_padding(stride, bpp, 127 surface_tile_height, raster_mode, 128 padding_threshold); 129 } 130 } else { 131 switch (format) 132 { 133 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO: 134 case HAL_PIXEL_FORMAT_RAW_SENSOR: 135 stride = ALIGN(width, 32); 136 break; 137 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED: 138 stride = ALIGN(width, 128); 139 break; 140 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE: 141 case HAL_PIXEL_FORMAT_YCbCr_420_SP: 142 case HAL_PIXEL_FORMAT_YCrCb_420_SP: 143 case HAL_PIXEL_FORMAT_YV12: 144 case HAL_PIXEL_FORMAT_YCbCr_422_SP: 145 case HAL_PIXEL_FORMAT_YCrCb_422_SP: 146 stride = ALIGN(width, 16); 147 break; 148 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS: 149 stride = VENUS_Y_STRIDE(COLOR_FMT_NV12, width); 150 break; 151 case HAL_PIXEL_FORMAT_BLOB: 152 stride = width; 153 break; 154 default: break; 155 } 156 } 157 return stride; 158} 159 160//-------------- IAllocController-----------------------// 161IAllocController* IAllocController::sController = NULL; 162IAllocController* IAllocController::getInstance(void) 163{ 164 if(sController == NULL) { 165 sController = new IonController(); 166 } 167 return sController; 168} 169 170 171//-------------- IonController-----------------------// 172IonController::IonController() 173{ 174 mIonAlloc = new IonAlloc(); 175} 176 177int IonController::allocate(alloc_data& data, int usage) 178{ 179 int ionFlags = 0; 180 int ret; 181 182 data.uncached = useUncached(usage); 183 data.allocType = 0; 184 185 if(usage & GRALLOC_USAGE_PRIVATE_UI_CONTIG_HEAP) 186 ionFlags |= ION_HEAP(ION_SF_HEAP_ID); 187 188 if(usage & GRALLOC_USAGE_PRIVATE_SYSTEM_HEAP) 189 ionFlags |= ION_HEAP(ION_SYSTEM_HEAP_ID); 190 191 if(usage & GRALLOC_USAGE_PRIVATE_IOMMU_HEAP) 192 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID); 193 194 //MM Heap is exclusively a secure heap. 195 if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) { 196 //XXX: Right now the MM heap is the only secure heap we have. When we 197 //have other secure heaps, we can change this. 198 if(usage & GRALLOC_USAGE_PROTECTED) { 199 ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID); 200 ionFlags |= ION_SECURE; 201 } 202 else { 203 ALOGW("GRALLOC_USAGE_PRIVATE_MM_HEAP \ 204 cannot be used as an insecure heap!\ 205 trying to use IOMMU instead !!"); 206 ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID); 207 } 208 } 209 210 if(usage & GRALLOC_USAGE_PRIVATE_CAMERA_HEAP) 211 ionFlags |= ION_HEAP(ION_CAMERA_HEAP_ID); 212 213 if(usage & GRALLOC_USAGE_PROTECTED) 214 data.allocType |= private_handle_t::PRIV_FLAGS_SECURE_BUFFER; 215 216 // if no flags are set, default to 217 // SF + IOMMU heaps, so that bypass can work 218 // we can fall back to system heap if 219 // we run out. 220 if(!ionFlags) 221 ionFlags = ION_HEAP(ION_SF_HEAP_ID) | ION_HEAP(ION_IOMMU_HEAP_ID); 222 223 data.flags = ionFlags; 224 ret = mIonAlloc->alloc_buffer(data); 225 226 // Fallback 227 if(ret < 0 && canFallback(usage, 228 (ionFlags & ION_SYSTEM_HEAP_ID))) 229 { 230 ALOGW("Falling back to system heap"); 231 data.flags = ION_HEAP(ION_SYSTEM_HEAP_ID); 232 ret = mIonAlloc->alloc_buffer(data); 233 } 234 235 if(ret >= 0 ) { 236 data.allocType |= private_handle_t::PRIV_FLAGS_USES_ION; 237 } 238 239 return ret; 240} 241 242IMemAlloc* IonController::getAllocator(int flags) 243{ 244 IMemAlloc* memalloc = NULL; 245 if (flags & private_handle_t::PRIV_FLAGS_USES_ION) { 246 memalloc = mIonAlloc; 247 } else { 248 ALOGE("%s: Invalid flags passed: 0x%x", __FUNCTION__, flags); 249 } 250 251 return memalloc; 252} 253 254size_t getBufferSizeAndDimensions(int width, int height, int format, 255 int& alignedw, int &alignedh) 256{ 257 size_t size; 258 259 alignedw = AdrenoMemInfo::getInstance().getStride(width, format); 260 alignedh = ALIGN(height, 32); 261 switch (format) { 262 case HAL_PIXEL_FORMAT_RGBA_8888: 263 case HAL_PIXEL_FORMAT_RGBX_8888: 264 case HAL_PIXEL_FORMAT_BGRA_8888: 265 case HAL_PIXEL_FORMAT_sRGB_A_8888: 266 size = alignedw * alignedh * 4; 267 break; 268 case HAL_PIXEL_FORMAT_RGB_888: 269 case HAL_PIXEL_FORMAT_sRGB_888: 270 size = alignedw * alignedh * 3; 271 break; 272 case HAL_PIXEL_FORMAT_RGB_565: 273 case HAL_PIXEL_FORMAT_RAW_SENSOR: 274 size = alignedw * alignedh * 2; 275 break; 276 277 // adreno formats 278 case HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO: // NV21 279 size = ALIGN(alignedw*alignedh, 4096); 280 size += ALIGN(2 * ALIGN(width/2, 32) * ALIGN(height/2, 32), 4096); 281 break; 282 case HAL_PIXEL_FORMAT_YCbCr_420_SP_TILED: // NV12 283 // The chroma plane is subsampled, 284 // but the pitch in bytes is unchanged 285 // The GPU needs 4K alignment, but the video decoder needs 8K 286 size = ALIGN( alignedw * alignedh, 8192); 287 size += ALIGN( alignedw * ALIGN(height/2, 32), 8192); 288 break; 289 case HAL_PIXEL_FORMAT_NV12_ENCODEABLE: 290 case HAL_PIXEL_FORMAT_YV12: 291 if ((format == HAL_PIXEL_FORMAT_YV12) && ((width&1) || (height&1))) { 292 ALOGE("w or h is odd for the YV12 format"); 293 return -EINVAL; 294 } 295 alignedh = height; 296 if (HAL_PIXEL_FORMAT_NV12_ENCODEABLE == format) { 297 // The encoder requires a 2K aligned chroma offset. 298 size = ALIGN(alignedw*alignedh, 2048) + 299 (ALIGN(alignedw/2, 16) * (alignedh/2))*2; 300 } else { 301 size = alignedw*alignedh + 302 (ALIGN(alignedw/2, 16) * (alignedh/2))*2; 303 } 304 size = ALIGN(size, 4096); 305 break; 306 case HAL_PIXEL_FORMAT_YCbCr_420_SP: 307 case HAL_PIXEL_FORMAT_YCrCb_420_SP: 308 alignedh = height; 309 size = ALIGN((alignedw*alignedh) + (alignedw* alignedh)/2, 4096); 310 break; 311 case HAL_PIXEL_FORMAT_YCbCr_422_SP: 312 case HAL_PIXEL_FORMAT_YCrCb_422_SP: 313 if(width & 1) { 314 ALOGE("width is odd for the YUV422_SP format"); 315 return -EINVAL; 316 } 317 alignedh = height; 318 size = ALIGN(alignedw * alignedh * 2, 4096); 319 break; 320 case HAL_PIXEL_FORMAT_YCbCr_420_SP_VENUS: 321 alignedh = VENUS_Y_SCANLINES(COLOR_FMT_NV12, height); 322 size = VENUS_BUFFER_SIZE(COLOR_FMT_NV12, width, height); 323 break; 324 case HAL_PIXEL_FORMAT_BLOB: 325 if(height != 1) { 326 ALOGE("%s: Buffers with format HAL_PIXEL_FORMAT_BLOB \ 327 must have height==1 ", __FUNCTION__); 328 return -EINVAL; 329 } 330 alignedh = height; 331 alignedw = width; 332 size = width; 333 break; 334 default: 335 ALOGE("unrecognized pixel format: 0x%x", format); 336 return -EINVAL; 337 } 338 339 return size; 340} 341 342// Allocate buffer from width, height and format into a 343// private_handle_t. It is the responsibility of the caller 344// to free the buffer using the free_buffer function 345int alloc_buffer(private_handle_t **pHnd, int w, int h, int format, int usage) 346{ 347 alloc_data data; 348 int alignedw, alignedh; 349 gralloc::IAllocController* sAlloc = 350 gralloc::IAllocController::getInstance(); 351 data.base = 0; 352 data.fd = -1; 353 data.offset = 0; 354 data.size = getBufferSizeAndDimensions(w, h, format, alignedw, alignedh); 355 data.align = getpagesize(); 356 data.uncached = useUncached(usage); 357 int allocFlags = usage; 358 359 int err = sAlloc->allocate(data, allocFlags); 360 if (0 != err) { 361 ALOGE("%s: allocate failed", __FUNCTION__); 362 return -ENOMEM; 363 } 364 365 private_handle_t* hnd = new private_handle_t(data.fd, data.size, 366 data.allocType, 0, format, 367 alignedw, alignedh); 368 hnd->base = (int) data.base; 369 hnd->offset = data.offset; 370 hnd->gpuaddr = 0; 371 *pHnd = hnd; 372 return 0; 373} 374 375void free_buffer(private_handle_t *hnd) 376{ 377 gralloc::IAllocController* sAlloc = 378 gralloc::IAllocController::getInstance(); 379 if (hnd && hnd->fd > 0) { 380 IMemAlloc* memalloc = sAlloc->getAllocator(hnd->flags); 381 memalloc->free_buffer((void*)hnd->base, hnd->size, hnd->offset, hnd->fd); 382 } 383 if(hnd) 384 delete hnd; 385 386} 387