hwc.cpp revision 04af919f7d16572b16a91d8b681afe42386fb4e1
1/* 2 * Copyright (C) 2010 The Android Open Source Project 3 * Copyright (C) 2012-2013, The Linux Foundation. All rights reserved. 4 * 5 * Not a Contribution, Apache license notifications and license are retained 6 * for attribution purposes only. 7 * 8 * Licensed under the Apache License, Version 2.0 (the "License"); 9 * you may not use this file except in compliance with the License. 10 * You may obtain a copy of the License at 11 * 12 * http://www.apache.org/licenses/LICENSE-2.0 13 * 14 * Unless required by applicable law or agreed to in writing, software 15 * distributed under the License is distributed on an "AS IS" BASIS, 16 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 17 * See the License for the specific language governing permissions and 18 * limitations under the License. 19 */ 20#define ATRACE_TAG (ATRACE_TAG_GRAPHICS | ATRACE_TAG_HAL) 21#include <fcntl.h> 22#include <errno.h> 23 24#include <cutils/log.h> 25#include <cutils/atomic.h> 26#include <EGL/egl.h> 27#include <utils/Trace.h> 28#include <sys/ioctl.h> 29#include <overlay.h> 30#include <overlayRotator.h> 31#include <mdp_version.h> 32#include "hwc_utils.h" 33#include "hwc_fbupdate.h" 34#include "hwc_mdpcomp.h" 35#include "external.h" 36#include "hwc_copybit.h" 37#include "profiler.h" 38 39using namespace qhwc; 40using namespace overlay; 41 42#define VSYNC_DEBUG 0 43#define BLANK_DEBUG 1 44 45static int hwc_device_open(const struct hw_module_t* module, 46 const char* name, 47 struct hw_device_t** device); 48 49static struct hw_module_methods_t hwc_module_methods = { 50 open: hwc_device_open 51}; 52 53hwc_module_t HAL_MODULE_INFO_SYM = { 54 common: { 55 tag: HARDWARE_MODULE_TAG, 56 version_major: 2, 57 version_minor: 0, 58 id: HWC_HARDWARE_MODULE_ID, 59 name: "Qualcomm Hardware Composer Module", 60 author: "CodeAurora Forum", 61 methods: &hwc_module_methods, 62 dso: 0, 63 reserved: {0}, 64 } 65}; 66 67/* 68 * Save callback functions registered to HWC 69 */ 70static void hwc_registerProcs(struct hwc_composer_device_1* dev, 71 hwc_procs_t const* procs) 72{ 73 ALOGI("%s", __FUNCTION__); 74 hwc_context_t* ctx = (hwc_context_t*)(dev); 75 if(!ctx) { 76 ALOGE("%s: Invalid context", __FUNCTION__); 77 return; 78 } 79 ctx->proc = procs; 80 81 // Now that we have the functions needed, kick off 82 // the uevent & vsync threads 83 init_uevent_thread(ctx); 84 init_vsync_thread(ctx); 85} 86 87//Helper 88static void reset(hwc_context_t *ctx, int numDisplays, 89 hwc_display_contents_1_t** displays) { 90 for(int i = 0; i < MAX_DISPLAYS; i++) { 91 hwc_display_contents_1_t *list = displays[i]; 92 // XXX:SurfaceFlinger no longer guarantees that this 93 // value is reset on every prepare. However, for the layer 94 // cache we need to reset it. 95 // We can probably rethink that later on 96 if (LIKELY(list && list->numHwLayers > 1)) { 97 for(uint32_t j = 0; j < list->numHwLayers; j++) { 98 if(list->hwLayers[j].compositionType != HWC_FRAMEBUFFER_TARGET) 99 list->hwLayers[j].compositionType = HWC_FRAMEBUFFER; 100 } 101 } 102 103 if(ctx->mFBUpdate[i]) 104 ctx->mFBUpdate[i]->reset(); 105 if(ctx->mCopyBit[i]) 106 ctx->mCopyBit[i]->reset(); 107 if(ctx->mLayerRotMap[i]) 108 ctx->mLayerRotMap[i]->reset(); 109 } 110} 111 112//clear prev layer prop flags and realloc for current frame 113static void reset_layer_prop(hwc_context_t* ctx, int dpy, int numAppLayers) { 114 if(ctx->layerProp[dpy]) { 115 delete[] ctx->layerProp[dpy]; 116 ctx->layerProp[dpy] = NULL; 117 } 118 ctx->layerProp[dpy] = new LayerProp[numAppLayers]; 119} 120 121static int display_commit(hwc_context_t *ctx, int dpy) { 122 int fbFd = ctx->dpyAttr[dpy].fd; 123 if(fbFd == -1) { 124 ALOGE("%s: Invalid FB fd for display: %d", __FUNCTION__, dpy); 125 return -1; 126 } 127 128 struct mdp_display_commit commit_info; 129 memset(&commit_info, 0, sizeof(struct mdp_display_commit)); 130 commit_info.flags = MDP_DISPLAY_COMMIT_OVERLAY; 131 if(ioctl(fbFd, MSMFB_DISPLAY_COMMIT, &commit_info) == -1) { 132 ALOGE("%s: MSMFB_DISPLAY_COMMIT for primary failed", __FUNCTION__); 133 return -errno; 134 } 135 return 0; 136} 137 138static int hwc_prepare_primary(hwc_composer_device_1 *dev, 139 hwc_display_contents_1_t *list) { 140 hwc_context_t* ctx = (hwc_context_t*)(dev); 141 const int dpy = HWC_DISPLAY_PRIMARY; 142 if(UNLIKELY(!ctx->mBasePipeSetup)) 143 setupBasePipe(ctx); 144 if (LIKELY(list && list->numHwLayers > 1) && 145 ctx->dpyAttr[dpy].isActive) { 146 reset_layer_prop(ctx, dpy, list->numHwLayers - 1); 147 uint32_t last = list->numHwLayers - 1; 148 hwc_layer_1_t *fbLayer = &list->hwLayers[last]; 149 if(fbLayer->handle) { 150 setListStats(ctx, list, dpy); 151 int fbZOrder = ctx->mMDPComp[dpy]->prepare(ctx, list); 152 if(fbZOrder >= 0) 153 ctx->mFBUpdate[dpy]->prepare(ctx, list, fbZOrder); 154 155 if (ctx->mMDP.version < qdutils::MDP_V4_0) { 156 if((fbZOrder >= 0) && ctx->mCopyBit[dpy]) 157 ctx->mCopyBit[dpy]->prepare(ctx, list, dpy); 158 } 159 } 160 } 161 return 0; 162} 163 164static int hwc_prepare_external(hwc_composer_device_1 *dev, 165 hwc_display_contents_1_t *list, int dpy) { 166 167 hwc_context_t* ctx = (hwc_context_t*)(dev); 168 Locker::Autolock _l(ctx->mExtLock); 169 170 if (LIKELY(list && list->numHwLayers > 1) && 171 ctx->dpyAttr[dpy].isActive && 172 ctx->dpyAttr[dpy].connected) { 173 reset_layer_prop(ctx, dpy, list->numHwLayers - 1); 174 uint32_t last = list->numHwLayers - 1; 175 hwc_layer_1_t *fbLayer = &list->hwLayers[last]; 176 if(!ctx->dpyAttr[dpy].isPause) { 177 if(fbLayer->handle) { 178 ctx->mExtDispConfiguring = false; 179 setListStats(ctx, list, dpy); 180 int fbZOrder = ctx->mMDPComp[dpy]->prepare(ctx, list); 181 if(fbZOrder >= 0) 182 ctx->mFBUpdate[dpy]->prepare(ctx, list, fbZOrder); 183 184 /* Temporarily commenting out C2D until we support partial 185 copybit composition for mixed mode MDP 186 187 if((fbZOrder >= 0) && ctx->mCopyBit[dpy]) 188 ctx->mCopyBit[dpy]->prepare(ctx, list, dpy); 189 */ 190 } 191 } else { 192 // External Display is in Pause state. 193 // ToDo: 194 // Mark all application layers as OVERLAY so that 195 // GPU will not compose. This is done for power 196 // optimization 197 } 198 } 199 return 0; 200} 201 202static int hwc_prepare_virtual(hwc_composer_device_1 *dev, 203 hwc_display_contents_1_t *list, int dpy) { 204 //XXX: Fix when framework support is added 205 return 0; 206} 207 208static int hwc_prepare(hwc_composer_device_1 *dev, size_t numDisplays, 209 hwc_display_contents_1_t** displays) 210{ 211 int ret = 0; 212 hwc_context_t* ctx = (hwc_context_t*)(dev); 213 Locker::Autolock _l(ctx->mBlankLock); 214 reset(ctx, numDisplays, displays); 215 216 ctx->mOverlay->configBegin(); 217 ctx->mRotMgr->configBegin(); 218 Overlay::setDMAMode(Overlay::DMA_LINE_MODE); 219 220 for (int32_t i = numDisplays - 1; i >= 0; i--) { 221 hwc_display_contents_1_t *list = displays[i]; 222 switch(i) { 223 case HWC_DISPLAY_PRIMARY: 224 ret = hwc_prepare_primary(dev, list); 225 break; 226 case HWC_DISPLAY_EXTERNAL: 227 ret = hwc_prepare_external(dev, list, i); 228 break; 229 case HWC_DISPLAY_VIRTUAL: 230 ret = hwc_prepare_virtual(dev, list, i); 231 break; 232 default: 233 ret = -EINVAL; 234 } 235 } 236 237 ctx->mOverlay->configDone(); 238 ctx->mRotMgr->configDone(); 239 240 return ret; 241} 242 243static int hwc_eventControl(struct hwc_composer_device_1* dev, int dpy, 244 int event, int enable) 245{ 246 int ret = 0; 247 hwc_context_t* ctx = (hwc_context_t*)(dev); 248 Locker::Autolock _l(ctx->mBlankLock); 249 if(!ctx->dpyAttr[dpy].isActive) { 250 ALOGE("Display is blanked - Cannot %s vsync", 251 enable ? "enable" : "disable"); 252 return -EINVAL; 253 } 254 255 switch(event) { 256 case HWC_EVENT_VSYNC: 257 if (ctx->vstate.enable == enable) 258 break; 259 ret = hwc_vsync_control(ctx, dpy, enable); 260 if(ret == 0) 261 ctx->vstate.enable = !!enable; 262 ALOGD_IF (VSYNC_DEBUG, "VSYNC state changed to %s", 263 (enable)?"ENABLED":"DISABLED"); 264 break; 265 default: 266 ret = -EINVAL; 267 } 268 return ret; 269} 270 271static int hwc_blank(struct hwc_composer_device_1* dev, int dpy, int blank) 272{ 273 ATRACE_CALL(); 274 hwc_context_t* ctx = (hwc_context_t*)(dev); 275 276 Locker::Autolock _l(ctx->mBlankLock); 277 int ret = 0; 278 ALOGD_IF(BLANK_DEBUG, "%s: %s display: %d", __FUNCTION__, 279 blank==1 ? "Blanking":"Unblanking", dpy); 280 if(blank) { 281 // free up all the overlay pipes in use 282 // when we get a blank for either display 283 // makes sure that all pipes are freed 284 ctx->mOverlay->configBegin(); 285 ctx->mOverlay->configDone(); 286 ctx->mRotMgr->clear(); 287 } 288 switch(dpy) { 289 case HWC_DISPLAY_PRIMARY: 290 if(blank) { 291 ret = ioctl(ctx->dpyAttr[dpy].fd, FBIOBLANK, 292 FB_BLANK_POWERDOWN); 293 } else { 294 ret = ioctl(ctx->dpyAttr[dpy].fd, FBIOBLANK,FB_BLANK_UNBLANK); 295 } 296 break; 297 case HWC_DISPLAY_EXTERNAL: 298 case HWC_DISPLAY_VIRTUAL: 299 if(blank) { 300 // call external framebuffer commit on blank, 301 // so that any pipe unsets gets committed 302 if (display_commit(ctx, dpy) < 0) { 303 ret = -1; 304 ALOGE("%s:post failed for external display !! ", 305 __FUNCTION__); 306 } 307 } else { 308 } 309 break; 310 default: 311 return -EINVAL; 312 } 313 // Enable HPD here, as during bootup unblank is called 314 // when SF is completely initialized 315 ctx->mExtDisplay->setHPD(1); 316 if(ret == 0){ 317 ctx->dpyAttr[dpy].isActive = !blank; 318 } else { 319 ALOGE("%s: Failed in %s display: %d error:%s", __FUNCTION__, 320 blank==1 ? "blanking":"unblanking", dpy, strerror(errno)); 321 return ret; 322 } 323 324 ALOGD_IF(BLANK_DEBUG, "%s: Done %s display: %d", __FUNCTION__, 325 blank==1 ? "blanking":"unblanking", dpy); 326 return 0; 327} 328 329static int hwc_query(struct hwc_composer_device_1* dev, 330 int param, int* value) 331{ 332 hwc_context_t* ctx = (hwc_context_t*)(dev); 333 int supported = HWC_DISPLAY_PRIMARY_BIT; 334 335 switch (param) { 336 case HWC_BACKGROUND_LAYER_SUPPORTED: 337 // Not supported for now 338 value[0] = 0; 339 break; 340 case HWC_DISPLAY_TYPES_SUPPORTED: 341 if(ctx->mMDP.hasOverlay) 342 supported |= HWC_DISPLAY_EXTERNAL_BIT; 343 value[0] = supported; 344 break; 345 default: 346 return -EINVAL; 347 } 348 return 0; 349 350} 351 352 353static int hwc_set_primary(hwc_context_t *ctx, hwc_display_contents_1_t* list) { 354 ATRACE_CALL(); 355 int ret = 0; 356 const int dpy = HWC_DISPLAY_PRIMARY; 357 358 if (LIKELY(list) && ctx->dpyAttr[dpy].isActive) { 359 uint32_t last = list->numHwLayers - 1; 360 hwc_layer_1_t *fbLayer = &list->hwLayers[last]; 361 int fd = -1; //FenceFD from the Copybit(valid in async mode) 362 bool copybitDone = false; 363 if(ctx->mCopyBit[dpy]) 364 copybitDone = ctx->mCopyBit[dpy]->draw(ctx, list, dpy, &fd); 365 if(list->numHwLayers > 1) 366 hwc_sync(ctx, list, dpy, fd); 367 368 if (!ctx->mMDPComp[dpy]->draw(ctx, list)) { 369 ALOGE("%s: MDPComp draw failed", __FUNCTION__); 370 ret = -1; 371 } 372 373 //TODO We dont check for SKIP flag on this layer because we need PAN 374 //always. Last layer is always FB 375 private_handle_t *hnd = (private_handle_t *)fbLayer->handle; 376 if(copybitDone) { 377 hnd = ctx->mCopyBit[dpy]->getCurrentRenderBuffer(); 378 } 379 380 if(hnd) { 381 if (!ctx->mFBUpdate[dpy]->draw(ctx, hnd)) { 382 ALOGE("%s: FBUpdate draw failed", __FUNCTION__); 383 ret = -1; 384 } 385 } 386 387 if (display_commit(ctx, dpy) < 0) { 388 ALOGE("%s: display commit fail!", __FUNCTION__); 389 ret = -1; 390 } 391 } 392 393 closeAcquireFds(list); 394 return ret; 395} 396 397static int hwc_set_external(hwc_context_t *ctx, 398 hwc_display_contents_1_t* list, int dpy) 399{ 400 ATRACE_CALL(); 401 int ret = 0; 402 Locker::Autolock _l(ctx->mExtLock); 403 404 if (LIKELY(list) && ctx->dpyAttr[dpy].isActive && 405 !ctx->dpyAttr[dpy].isPause && 406 ctx->dpyAttr[dpy].connected) { 407 uint32_t last = list->numHwLayers - 1; 408 hwc_layer_1_t *fbLayer = &list->hwLayers[last]; 409 int fd = -1; //FenceFD from the Copybit(valid in async mode) 410 bool copybitDone = false; 411 if(ctx->mCopyBit[dpy]) 412 copybitDone = ctx->mCopyBit[dpy]->draw(ctx, list, dpy, &fd); 413 414 if(list->numHwLayers > 1) 415 hwc_sync(ctx, list, dpy, fd); 416 417 if (!ctx->mMDPComp[dpy]->draw(ctx, list)) { 418 ALOGE("%s: MDPComp draw failed", __FUNCTION__); 419 ret = -1; 420 } 421 422 private_handle_t *hnd = (private_handle_t *)fbLayer->handle; 423 if(copybitDone) { 424 hnd = ctx->mCopyBit[dpy]->getCurrentRenderBuffer(); 425 } 426 427 if(hnd) { 428 if (!ctx->mFBUpdate[dpy]->draw(ctx, hnd)) { 429 ALOGE("%s: FBUpdate::draw fail!", __FUNCTION__); 430 ret = -1; 431 } 432 } 433 434 if (display_commit(ctx, dpy) < 0) { 435 ALOGE("%s: display commit fail!", __FUNCTION__); 436 ret = -1; 437 } 438 } 439 440 closeAcquireFds(list); 441 return ret; 442} 443 444static int hwc_set_virtual(hwc_context_t *ctx, 445 hwc_display_contents_1_t* list, int dpy) 446{ 447 //XXX: Implement set. 448 closeAcquireFds(list); 449 if (list) { 450 // SF assumes HWC waits for the acquire fence and returns a new fence 451 // that signals when we're done. Since we don't wait, and also don't 452 // touch the buffer, we can just handle the acquire fence back to SF 453 // as the retire fence. 454 list->retireFenceFd = list->outbufAcquireFenceFd; 455 } 456 return 0; 457} 458 459 460static int hwc_set(hwc_composer_device_1 *dev, 461 size_t numDisplays, 462 hwc_display_contents_1_t** displays) 463{ 464 int ret = 0; 465 hwc_context_t* ctx = (hwc_context_t*)(dev); 466 Locker::Autolock _l(ctx->mBlankLock); 467 for (uint32_t i = 0; i < numDisplays; i++) { 468 hwc_display_contents_1_t* list = displays[i]; 469 switch(i) { 470 case HWC_DISPLAY_PRIMARY: 471 ret = hwc_set_primary(ctx, list); 472 break; 473 case HWC_DISPLAY_EXTERNAL: 474 ret = hwc_set_external(ctx, list, i); 475 break; 476 case HWC_DISPLAY_VIRTUAL: 477 ret = hwc_set_virtual(ctx, list, i); 478 break; 479 default: 480 ret = -EINVAL; 481 } 482 } 483 // This is only indicative of how many times SurfaceFlinger posts 484 // frames to the display. 485 CALC_FPS(); 486 MDPComp::resetIdleFallBack(); 487 ctx->mVideoTransFlag = false; 488 return ret; 489} 490 491int hwc_getDisplayConfigs(struct hwc_composer_device_1* dev, int disp, 492 uint32_t* configs, size_t* numConfigs) { 493 int ret = 0; 494 hwc_context_t* ctx = (hwc_context_t*)(dev); 495 //in 1.1 there is no way to choose a config, report as config id # 0 496 //This config is passed to getDisplayAttributes. Ignore for now. 497 switch(disp) { 498 case HWC_DISPLAY_PRIMARY: 499 if(*numConfigs > 0) { 500 configs[0] = 0; 501 *numConfigs = 1; 502 } 503 ret = 0; //NO_ERROR 504 break; 505 case HWC_DISPLAY_EXTERNAL: 506 ret = -1; //Not connected 507 if(ctx->dpyAttr[HWC_DISPLAY_EXTERNAL].connected) { 508 ret = 0; //NO_ERROR 509 if(*numConfigs > 0) { 510 configs[0] = 0; 511 *numConfigs = 1; 512 } 513 } 514 break; 515 } 516 return ret; 517} 518 519int hwc_getDisplayAttributes(struct hwc_composer_device_1* dev, int disp, 520 uint32_t config, const uint32_t* attributes, int32_t* values) { 521 522 hwc_context_t* ctx = (hwc_context_t*)(dev); 523 //If hotpluggable displays are inactive return error 524 if(disp == HWC_DISPLAY_EXTERNAL && !ctx->dpyAttr[disp].connected) { 525 return -1; 526 } 527 528 //From HWComposer 529 static const uint32_t DISPLAY_ATTRIBUTES[] = { 530 HWC_DISPLAY_VSYNC_PERIOD, 531 HWC_DISPLAY_WIDTH, 532 HWC_DISPLAY_HEIGHT, 533 HWC_DISPLAY_DPI_X, 534 HWC_DISPLAY_DPI_Y, 535 HWC_DISPLAY_NO_ATTRIBUTE, 536 }; 537 538 const int NUM_DISPLAY_ATTRIBUTES = (sizeof(DISPLAY_ATTRIBUTES) / 539 sizeof(DISPLAY_ATTRIBUTES)[0]); 540 541 for (size_t i = 0; i < NUM_DISPLAY_ATTRIBUTES - 1; i++) { 542 switch (attributes[i]) { 543 case HWC_DISPLAY_VSYNC_PERIOD: 544 values[i] = ctx->dpyAttr[disp].vsync_period; 545 break; 546 case HWC_DISPLAY_WIDTH: 547 values[i] = ctx->dpyAttr[disp].xres; 548 ALOGD("%s disp = %d, width = %d",__FUNCTION__, disp, 549 ctx->dpyAttr[disp].xres); 550 break; 551 case HWC_DISPLAY_HEIGHT: 552 values[i] = ctx->dpyAttr[disp].yres; 553 ALOGD("%s disp = %d, height = %d",__FUNCTION__, disp, 554 ctx->dpyAttr[disp].yres); 555 break; 556 case HWC_DISPLAY_DPI_X: 557 values[i] = (int32_t) (ctx->dpyAttr[disp].xdpi*1000.0); 558 break; 559 case HWC_DISPLAY_DPI_Y: 560 values[i] = (int32_t) (ctx->dpyAttr[disp].ydpi*1000.0); 561 break; 562 default: 563 ALOGE("Unknown display attribute %d", 564 attributes[i]); 565 return -EINVAL; 566 } 567 } 568 return 0; 569} 570 571void hwc_dump(struct hwc_composer_device_1* dev, char *buff, int buff_len) 572{ 573 hwc_context_t* ctx = (hwc_context_t*)(dev); 574 android::String8 aBuf(""); 575 dumpsys_log(aBuf, "Qualcomm HWC state:\n"); 576 dumpsys_log(aBuf, " MDPVersion=%d\n", ctx->mMDP.version); 577 dumpsys_log(aBuf, " DisplayPanel=%c\n", ctx->mMDP.panel); 578 for(int dpy = 0; dpy < MAX_DISPLAYS; dpy++) { 579 if(ctx->mMDPComp[dpy]) 580 ctx->mMDPComp[dpy]->dump(aBuf); 581 } 582 char ovDump[2048] = {'\0'}; 583 ctx->mOverlay->getDump(ovDump, 2048); 584 dumpsys_log(aBuf, ovDump); 585 ovDump[0] = '\0'; 586 ctx->mRotMgr->getDump(ovDump, 1024); 587 dumpsys_log(aBuf, ovDump); 588 strlcpy(buff, aBuf.string(), buff_len); 589} 590 591static int hwc_device_close(struct hw_device_t *dev) 592{ 593 if(!dev) { 594 ALOGE("%s: NULL device pointer", __FUNCTION__); 595 return -1; 596 } 597 closeContext((hwc_context_t*)dev); 598 free(dev); 599 600 return 0; 601} 602 603static int hwc_device_open(const struct hw_module_t* module, const char* name, 604 struct hw_device_t** device) 605{ 606 int status = -EINVAL; 607 608 if (!strcmp(name, HWC_HARDWARE_COMPOSER)) { 609 struct hwc_context_t *dev; 610 dev = (hwc_context_t*)malloc(sizeof(*dev)); 611 memset(dev, 0, sizeof(*dev)); 612 613 //Initialize hwc context 614 initContext(dev); 615 616 //Setup HWC methods 617 dev->device.common.tag = HARDWARE_DEVICE_TAG; 618 dev->device.common.version = HWC_DEVICE_API_VERSION_1_2; 619 dev->device.common.module = const_cast<hw_module_t*>(module); 620 dev->device.common.close = hwc_device_close; 621 dev->device.prepare = hwc_prepare; 622 dev->device.set = hwc_set; 623 dev->device.eventControl = hwc_eventControl; 624 dev->device.blank = hwc_blank; 625 dev->device.query = hwc_query; 626 dev->device.registerProcs = hwc_registerProcs; 627 dev->device.dump = hwc_dump; 628 dev->device.getDisplayConfigs = hwc_getDisplayConfigs; 629 dev->device.getDisplayAttributes = hwc_getDisplayAttributes; 630 *device = &dev->device.common; 631 status = 0; 632 } 633 return status; 634} 635