1/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved. 2* 3* Redistribution and use in source and binary forms, with or without 4* modification, are permitted provided that the following conditions are 5* met: 6* * Redistributions of source code must retain the above copyright 7* notice, this list of conditions and the following disclaimer. 8* * Redistributions in binary form must reproduce the above 9* copyright notice, this list of conditions and the following 10* disclaimer in the documentation and/or other materials provided 11* with the distribution. 12* * Neither the name of The Linux Foundation nor the names of its 13* contributors may be used to endorse or promote products derived 14* from this software without specific prior written permission. 15* 16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27* 28*/ 29 30 31#define LOG_TAG "QCamera3Channel" 32 33// To remove 34#include <cutils/properties.h> 35 36// System dependencies 37#include <fcntl.h> 38#include <stdio.h> 39#include <stdlib.h> 40#include "hardware/gralloc.h" 41#include <utils/Timers.h> 42#include <sys/stat.h> 43 44// Camera dependencies 45#include "QCamera3Channel.h" 46#include "QCamera3HWI.h" 47#include "QCameraTrace.h" 48#include "QCameraFormat.h" 49extern "C" { 50#include "mm_camera_dbg.h" 51} 52 53using namespace android; 54 55namespace qcamera { 56#define IS_BUFFER_ERROR(x) (((x) & V4L2_BUF_FLAG_ERROR) == V4L2_BUF_FLAG_ERROR) 57 58/*=========================================================================== 59 * FUNCTION : QCamera3Channel 60 * 61 * DESCRIPTION: constrcutor of QCamera3Channel 62 * 63 * PARAMETERS : 64 * @cam_handle : camera handle 65 * @cam_ops : ptr to camera ops table 66 * 67 * RETURN : none 68 *==========================================================================*/ 69QCamera3Channel::QCamera3Channel(uint32_t cam_handle, 70 uint32_t channel_handle, 71 mm_camera_ops_t *cam_ops, 72 channel_cb_routine cb_routine, 73 channel_cb_buffer_err cb_buffer_err, 74 cam_padding_info_t *paddingInfo, 75 cam_feature_mask_t postprocess_mask, 76 void *userData, uint32_t numBuffers) 77{ 78 m_camHandle = cam_handle; 79 m_handle = channel_handle; 80 m_camOps = cam_ops; 81 m_bIsActive = false; 82 m_bUBWCenable = true; 83 84 m_numStreams = 0; 85 memset(mStreams, 0, sizeof(mStreams)); 86 mUserData = userData; 87 88 mStreamInfoBuf = NULL; 89 mChannelCB = cb_routine; 90 mChannelCbBufErr = cb_buffer_err; 91 mPaddingInfo = *paddingInfo; 92 mPaddingInfo.offset_info.offset_x = 0; 93 mPaddingInfo.offset_info.offset_y = 0; 94 95 mPostProcMask = postprocess_mask; 96 97 mIsType = IS_TYPE_NONE; 98 mNumBuffers = numBuffers; 99 mPerFrameMapUnmapEnable = true; 100 mDumpFrmCnt = 0; 101 mNRMode = 0; 102 103 mYUVDump = property_get_int32("persist.camera.dumpimg", 0); 104 mMapStreamBuffers = mYUVDump; 105} 106 107/*=========================================================================== 108 * FUNCTION : ~QCamera3Channel 109 * 110 * DESCRIPTION: destructor of QCamera3Channel 111 * 112 * PARAMETERS : none 113 * 114 * RETURN : none 115 *==========================================================================*/ 116QCamera3Channel::~QCamera3Channel() 117{ 118} 119 120/*=========================================================================== 121 * FUNCTION : destroy 122 * 123 * DESCRIPTION: internal destructor of QCamera3Channel called by the subclasses 124 * this destructor will call pure virtual functions. stop will eventuall call 125 * QCamera3Stream::putBufs. The putBufs function will 126 * call QCamera3Channel::putStreamBufs which is pure virtual 127 * 128 * PARAMETERS : none 129 * 130 * RETURN : none 131 *==========================================================================*/ 132void QCamera3Channel::destroy() 133{ 134 if (m_bIsActive) 135 stop(); 136 137 for (uint32_t i = 0; i < m_numStreams; i++) { 138 if (mStreams[i] != NULL) { 139 delete mStreams[i]; 140 mStreams[i] = 0; 141 } 142 } 143 m_numStreams = 0; 144} 145 146/*=========================================================================== 147 * FUNCTION : addStream 148 * 149 * DESCRIPTION: add a stream into channel 150 * 151 * PARAMETERS : 152 * @streamType : stream type 153 * @streamFormat : stream format 154 * @streamDim : stream dimension 155 * @streamRotation : rotation of the stream 156 * @minStreamBufNum : minimal buffer count for particular stream type 157 * @postprocessMask : post-proccess feature mask 158 * @isType : type of image stabilization required on the stream 159 * 160 * RETURN : int32_t type of status 161 * NO_ERROR -- success 162 * none-zero failure code 163 *==========================================================================*/ 164int32_t QCamera3Channel::addStream(cam_stream_type_t streamType, 165 cam_format_t streamFormat, 166 cam_dimension_t streamDim, 167 cam_rotation_t streamRotation, 168 uint8_t minStreamBufNum, 169 cam_feature_mask_t postprocessMask, 170 cam_is_type_t isType, 171 uint32_t batchSize) 172{ 173 int32_t rc = NO_ERROR; 174 175 if (m_numStreams >= 1) { 176 LOGE("Only one stream per channel supported in v3 Hal"); 177 return BAD_VALUE; 178 } 179 180 if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) { 181 LOGE("stream number (%d) exceeds max limit (%d)", 182 m_numStreams, MAX_STREAM_NUM_IN_BUNDLE); 183 return BAD_VALUE; 184 } 185 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle, 186 m_handle, 187 m_camOps, 188 &mPaddingInfo, 189 this, 190 mMapStreamBuffers); 191 if (pStream == NULL) { 192 LOGE("No mem for Stream"); 193 return NO_MEMORY; 194 } 195 LOGD("batch size is %d", batchSize); 196 197 rc = pStream->init(streamType, streamFormat, streamDim, streamRotation, 198 NULL, minStreamBufNum, postprocessMask, isType, batchSize, 199 streamCbRoutine, this); 200 if (rc == 0) { 201 mStreams[m_numStreams] = pStream; 202 m_numStreams++; 203 } else { 204 delete pStream; 205 } 206 return rc; 207} 208 209/*=========================================================================== 210 * FUNCTION : start 211 * 212 * DESCRIPTION: start channel, which will start all streams belong to this channel 213 * 214 * PARAMETERS : 215 * 216 * RETURN : int32_t type of status 217 * NO_ERROR -- success 218 * none-zero failure code 219 *==========================================================================*/ 220int32_t QCamera3Channel::start() 221{ 222 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_START); 223 int32_t rc = NO_ERROR; 224 225 if (m_numStreams > 1) { 226 LOGW("bundle not supported"); 227 } else if (m_numStreams == 0) { 228 return NO_INIT; 229 } 230 231 if(m_bIsActive) { 232 LOGW("Attempt to start active channel"); 233 return rc; 234 } 235 236 for (uint32_t i = 0; i < m_numStreams; i++) { 237 if (mStreams[i] != NULL) { 238 mStreams[i]->start(); 239 } 240 } 241 242 m_bIsActive = true; 243 244 return rc; 245} 246 247/*=========================================================================== 248 * FUNCTION : stop 249 * 250 * DESCRIPTION: stop a channel, which will stop all streams belong to this channel 251 * 252 * PARAMETERS : none 253 * 254 * RETURN : int32_t type of status 255 * NO_ERROR -- success 256 * none-zero failure code 257 *==========================================================================*/ 258int32_t QCamera3Channel::stop() 259{ 260 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_STOP); 261 int32_t rc = NO_ERROR; 262 if(!m_bIsActive) { 263 LOGE("Attempt to stop inactive channel"); 264 return rc; 265 } 266 267 for (uint32_t i = 0; i < m_numStreams; i++) { 268 if (mStreams[i] != NULL) { 269 mStreams[i]->stop(); 270 } 271 } 272 273 m_bIsActive = false; 274 return rc; 275} 276 277/*=========================================================================== 278 * FUNCTION : setBatchSize 279 * 280 * DESCRIPTION: Set batch size for the channel. This is a dummy implementation 281 * for the base class 282 * 283 * PARAMETERS : 284 * @batchSize : Number of image buffers in a batch 285 * 286 * RETURN : int32_t type of status 287 * NO_ERROR -- success always 288 * none-zero failure code 289 *==========================================================================*/ 290int32_t QCamera3Channel::setBatchSize(uint32_t batchSize) 291{ 292 LOGD("Dummy method. batchSize: %d unused ", batchSize); 293 return NO_ERROR; 294} 295 296/*=========================================================================== 297 * FUNCTION : queueBatchBuf 298 * 299 * DESCRIPTION: This is a dummy implementation for the base class 300 * 301 * PARAMETERS : 302 * 303 * RETURN : int32_t type of status 304 * NO_ERROR -- success always 305 * none-zero failure code 306 *==========================================================================*/ 307int32_t QCamera3Channel::queueBatchBuf() 308{ 309 LOGD("Dummy method. Unused "); 310 return NO_ERROR; 311} 312 313/*=========================================================================== 314 * FUNCTION : setPerFrameMapUnmap 315 * 316 * DESCRIPTION: Sets internal enable flag 317 * 318 * PARAMETERS : 319 * @enable : Bool value for the enable flag 320 * 321 * RETURN : int32_t type of status 322 * NO_ERROR -- success always 323 * none-zero failure code 324 *==========================================================================*/ 325int32_t QCamera3Channel::setPerFrameMapUnmap(bool enable) 326{ 327 mPerFrameMapUnmapEnable = enable; 328 return NO_ERROR; 329} 330 331/*=========================================================================== 332 * FUNCTION : flush 333 * 334 * DESCRIPTION: flush a channel 335 * 336 * PARAMETERS : none 337 * 338 * RETURN : int32_t type of status 339 * NO_ERROR -- success 340 * none-zero failure code 341 *==========================================================================*/ 342int32_t QCamera3Channel::flush() 343{ 344 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CH_FLUSH); 345 return NO_ERROR; 346} 347 348/*=========================================================================== 349 * FUNCTION : bufDone 350 * 351 * DESCRIPTION: return a stream buf back to kernel 352 * 353 * PARAMETERS : 354 * @recvd_frame : stream buf frame to be returned 355 * 356 * RETURN : int32_t type of status 357 * NO_ERROR -- success 358 * none-zero failure code 359 *==========================================================================*/ 360int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame) 361{ 362 int32_t rc = NO_ERROR; 363 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) { 364 if (recvd_frame->bufs[i] != NULL) { 365 for (uint32_t j = 0; j < m_numStreams; j++) { 366 if (mStreams[j] != NULL && 367 mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) { 368 rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx); 369 break; // break loop j 370 } 371 } 372 } 373 } 374 375 return rc; 376} 377 378int32_t QCamera3Channel::setBundleInfo(const cam_bundle_config_t &bundleInfo) 379{ 380 int32_t rc = NO_ERROR; 381 cam_stream_parm_buffer_t param; 382 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t)); 383 param.type = CAM_STREAM_PARAM_TYPE_SET_BUNDLE_INFO; 384 param.bundleInfo = bundleInfo; 385 if (m_numStreams > 0 && mStreams[0]) { 386 rc = mStreams[0]->setParameter(param); 387 if (rc != NO_ERROR) { 388 LOGE("stream setParameter for set bundle failed"); 389 } 390 } 391 return rc; 392} 393 394/*=========================================================================== 395 * FUNCTION : getStreamTypeMask 396 * 397 * DESCRIPTION: Get bit mask of all stream types in this channel 398 * 399 * PARAMETERS : None 400 * 401 * RETURN : Bit mask of all stream types in this channel 402 *==========================================================================*/ 403uint32_t QCamera3Channel::getStreamTypeMask() 404{ 405 uint32_t mask = 0; 406 for (uint32_t i = 0; i < m_numStreams; i++) { 407 mask |= (1U << mStreams[i]->getMyType()); 408 } 409 return mask; 410} 411 412/*=========================================================================== 413 * FUNCTION : getStreamID 414 * 415 * DESCRIPTION: Get StreamID of requested stream type 416 * 417 * PARAMETERS : streamMask 418 * 419 * RETURN : Stream ID 420 *==========================================================================*/ 421uint32_t QCamera3Channel::getStreamID(uint32_t streamMask) 422{ 423 uint32_t streamID = 0; 424 for (uint32_t i = 0; i < m_numStreams; i++) { 425 if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) { 426 streamID = mStreams[i]->getMyServerID(); 427 break; 428 } 429 } 430 return streamID; 431} 432 433/*=========================================================================== 434 * FUNCTION : getStreamByHandle 435 * 436 * DESCRIPTION: return stream object by stream handle 437 * 438 * PARAMETERS : 439 * @streamHandle : stream handle 440 * 441 * RETURN : stream object. NULL if not found 442 *==========================================================================*/ 443QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle) 444{ 445 for (uint32_t i = 0; i < m_numStreams; i++) { 446 if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) { 447 return mStreams[i]; 448 } 449 } 450 return NULL; 451} 452 453/*=========================================================================== 454 * FUNCTION : getStreamByIndex 455 * 456 * DESCRIPTION: return stream object by index 457 * 458 * PARAMETERS : 459 * @streamHandle : stream handle 460 * 461 * RETURN : stream object. NULL if not found 462 *==========================================================================*/ 463QCamera3Stream *QCamera3Channel::getStreamByIndex(uint32_t index) 464{ 465 if (index < m_numStreams) { 466 return mStreams[index]; 467 } 468 return NULL; 469} 470 471/*=========================================================================== 472 * FUNCTION : streamCbRoutine 473 * 474 * DESCRIPTION: callback routine for stream 475 * 476 * PARAMETERS : 477 * @streamHandle : stream handle 478 * 479 * RETURN : stream object. NULL if not found 480 *==========================================================================*/ 481void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 482 QCamera3Stream *stream, void *userdata) 483{ 484 QCamera3Channel *channel = (QCamera3Channel *)userdata; 485 if (channel == NULL) { 486 LOGE("invalid channel pointer"); 487 return; 488 } 489 channel->streamCbRoutine(super_frame, stream); 490} 491 492/*=========================================================================== 493 * FUNCTION : dumpYUV 494 * 495 * DESCRIPTION: function to dump the YUV data from ISP/pproc 496 * 497 * PARAMETERS : 498 * @frame : frame to be dumped 499 * @dim : dimension of the stream 500 * @offset : offset of the data 501 * @name : 1 if it is ISP output/pproc input, 2 if it is pproc output 502 * 503 * RETURN : 504 *==========================================================================*/ 505void QCamera3Channel::dumpYUV(mm_camera_buf_def_t *frame, cam_dimension_t dim, 506 cam_frame_len_offset_t offset, uint8_t dump_type) 507{ 508 char buf[FILENAME_MAX]; 509 memset(buf, 0, sizeof(buf)); 510 static int counter = 0; 511 if (mYUVDump & dump_type) { 512 mFrmNum = ((mYUVDump & 0xffff0000) >> 16); 513 if (mFrmNum == 0) { 514 mFrmNum = 10; 515 } 516 if (mFrmNum > 256) { 517 mFrmNum = 256; 518 } 519 mSkipMode = ((mYUVDump & 0x0000ff00) >> 8); 520 if (mSkipMode == 0) { 521 mSkipMode = 1; 522 } 523 if (mDumpSkipCnt == 0) { 524 mDumpSkipCnt = 1; 525 } 526 if (mDumpSkipCnt % mSkipMode == 0) { 527 if (mDumpFrmCnt < mFrmNum) { 528 /* Note that the image dimension will be the unrotated stream dimension. 529 * If you feel that the image would have been rotated during reprocess 530 * then swap the dimensions while opening the file 531 * */ 532 switch (dump_type) { 533 case QCAMERA_DUMP_FRM_PREVIEW: 534 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"p_%d_%d_%dx%d.yuv", 535 counter, frame->frame_idx, dim.width, dim.height); 536 break; 537 case QCAMERA_DUMP_FRM_VIDEO: 538 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"v_%d_%d_%dx%d.yuv", 539 counter, frame->frame_idx, dim.width, dim.height); 540 break; 541 case QCAMERA_DUMP_FRM_INPUT_JPEG: 542 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.yuv", 543 counter, frame->frame_idx, dim.width, dim.height); 544 break; 545 case QCAMERA_DUMP_FRM_INPUT_REPROCESS: 546 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"ir_%d_%d_%dx%d.yuv", 547 counter, frame->frame_idx, dim.width, dim.height); 548 break; 549 case QCAMERA_DUMP_FRM_CALLBACK: 550 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"c_%d_%d_%dx%d.yuv", 551 counter, frame->frame_idx, dim.width, dim.height); 552 break; 553 case QCAMERA_DUMP_FRM_OUTPUT_JPEG: 554 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"j_%d_%d_%dx%d.jpg", 555 counter, frame->frame_idx, dim.width, dim.height); 556 break; 557 default : 558 LOGE("dumping not enabled for stream type %d",dump_type); 559 break; 560 } 561 counter++; 562 int file_fd = open(buf, O_RDWR | O_CREAT, 0777); 563 ssize_t written_len = 0; 564 if (file_fd >= 0) { 565 void *data = NULL; 566 fchmod(file_fd, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH); 567 if( dump_type == QCAMERA_DUMP_FRM_OUTPUT_JPEG ) { 568 written_len = write(file_fd, frame->buffer, frame->frame_len); 569 } 570 else { 571 for (uint32_t i = 0; i < offset.num_planes; i++) { 572 uint32_t index = offset.mp[i].offset; 573 if (i > 0) { 574 index += offset.mp[i-1].len; 575 } 576 for (int j = 0; j < offset.mp[i].height; j++) { 577 data = (void *)((uint8_t *)frame->buffer + index); 578 written_len += write(file_fd, data, 579 (size_t)offset.mp[i].width); 580 index += (uint32_t)offset.mp[i].stride; 581 } 582 } 583 } 584 LOGH("written number of bytes %ld\n", written_len); 585 mDumpFrmCnt++; 586 frame->cache_flags |= CPU_HAS_READ; 587 close(file_fd); 588 } else { 589 LOGE("failed to open file to dump image"); 590 } 591 } 592 } else { 593 mDumpSkipCnt++; 594 } 595 } 596} 597 598/*=========================================================================== 599 * FUNCTION : isUBWCEnabled 600 * 601 * DESCRIPTION: Function to get UBWC hardware support. 602 * 603 * PARAMETERS : None 604 * 605 * RETURN : TRUE -- UBWC format supported 606 * FALSE -- UBWC is not supported. 607 *==========================================================================*/ 608bool QCamera3Channel::isUBWCEnabled() 609{ 610#ifdef UBWC_PRESENT 611 char value[PROPERTY_VALUE_MAX]; 612 int prop_value = 0; 613 memset(value, 0, sizeof(value)); 614 property_get("debug.gralloc.gfx_ubwc_disable", value, "0"); 615 prop_value = atoi(value); 616 if (prop_value) { 617 return FALSE; 618 } 619 620 //Disable UBWC if Eztune is enabled 621 //EzTune process CPP output frame and cannot understand UBWC. 622 memset(value, 0, sizeof(value)); 623 property_get("persist.camera.eztune.enable", value, "0"); 624 prop_value = atoi(value); 625 if (prop_value) { 626 return FALSE; 627 } 628 return TRUE; 629#else 630 return FALSE; 631#endif 632} 633 634/*=========================================================================== 635 * FUNCTION : setUBWCEnabled 636 * 637 * DESCRIPTION: set UBWC enable 638 * 639 * PARAMETERS : UBWC enable value 640 * 641 * RETURN : none 642 * 643 *==========================================================================*/ 644void QCamera3Channel::setUBWCEnabled(bool val) 645{ 646 m_bUBWCenable = val; 647} 648 649/*=========================================================================== 650 * FUNCTION : getStreamDefaultFormat 651 * 652 * DESCRIPTION: return default buffer format for the stream 653 * 654 * PARAMETERS : type : Stream type 655 * 656 ** RETURN : format for stream type 657 * 658 *==========================================================================*/ 659cam_format_t QCamera3Channel::getStreamDefaultFormat(cam_stream_type_t type, 660 uint32_t width, uint32_t height, bool forcePreviewUBWC, cam_is_type_t isType) 661{ 662 cam_format_t streamFormat; 663 664 switch (type) { 665 case CAM_STREAM_TYPE_PREVIEW: 666 if (isUBWCEnabled()) { 667 668 char prop[PROPERTY_VALUE_MAX]; 669 int pFormat; 670 memset(prop, 0, sizeof(prop)); 671 property_get("persist.camera.preview.ubwc", prop, "1"); 672 pFormat = atoi(prop); 673 674 // When goog_zoom is linked to the preview stream, disable ubwc to preview 675 property_get("persist.camera.gzoom.at", prop, "0"); 676 bool is_goog_zoom_preview_enabled = ((atoi(prop) & 2) > 0) && isType == IS_TYPE_EIS_3_0; 677 678 if (pFormat == 1 && forcePreviewUBWC && !is_goog_zoom_preview_enabled) { 679 streamFormat = CAM_FORMAT_YUV_420_NV12_UBWC; 680 } else { 681 /* Changed to macro to ensure format sent to gralloc for preview 682 is also changed if the preview format is changed at camera HAL */ 683 streamFormat = PREVIEW_STREAM_FORMAT; 684 } 685 } else { 686 /* Changed to macro to ensure format sent to gralloc for preview 687 is also changed if the preview format is changed at camera HAL */ 688 streamFormat = PREVIEW_STREAM_FORMAT; 689 } 690 break; 691 case CAM_STREAM_TYPE_VIDEO: 692 { 693 /* Disable UBWC for smaller video resolutions due to CPP downscale 694 limits. Refer cpp_hw_params.h::CPP_DOWNSCALE_LIMIT_UBWC */ 695 if (isUBWCEnabled() && (width >= 640) && (height >= 480)) { 696 // When goog_zoom is linked to the video stream, disable ubwc to video 697 char prop[PROPERTY_VALUE_MAX]; 698 property_get("persist.camera.gzoom.at", prop, "0"); 699 bool is_goog_zoom_video_enabled = ((atoi(prop) & 1) > 0) && isType == IS_TYPE_EIS_3_0; 700 701 property_get("persist.camera.gzoom.4k", prop, "0"); 702 bool is_goog_zoom_4k_enabled = (atoi(prop) > 0); 703 bool is_4k_video = (width >= 3840 && height >= 2160); 704 705 if ((QCameraCommon::isVideoUBWCEnabled()) && (!is_goog_zoom_video_enabled 706 || (is_4k_video && !is_goog_zoom_4k_enabled))) { 707 streamFormat = CAM_FORMAT_YUV_420_NV12_UBWC; 708 } else { 709 streamFormat = CAM_FORMAT_YUV_420_NV12_VENUS; 710 } 711 } else { 712#if VENUS_PRESENT 713 streamFormat = CAM_FORMAT_YUV_420_NV12_VENUS; 714#else 715 streamFormat = CAM_FORMAT_YUV_420_NV12; 716#endif 717 } 718 break; 719 } 720 case CAM_STREAM_TYPE_SNAPSHOT: 721 streamFormat = CAM_FORMAT_YUV_420_NV21; 722 break; 723 case CAM_STREAM_TYPE_CALLBACK: 724 /* Changed to macro to ensure format sent to gralloc for callback 725 is also changed if the preview format is changed at camera HAL */ 726 streamFormat = CALLBACK_STREAM_FORMAT; 727 break; 728 case CAM_STREAM_TYPE_RAW: 729 streamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG; 730 break; 731 default: 732 streamFormat = CAM_FORMAT_YUV_420_NV21; 733 break; 734 } 735 return streamFormat; 736} 737 738 739/* QCamera3ProcessingChannel methods */ 740 741/*=========================================================================== 742 * FUNCTION : QCamera3ProcessingChannel 743 * 744 * DESCRIPTION: constructor of QCamera3ProcessingChannel 745 * 746 * PARAMETERS : 747 * @cam_handle : camera handle 748 * @cam_ops : ptr to camera ops table 749 * @cb_routine : callback routine to frame aggregator 750 * @paddingInfo: stream padding info 751 * @userData : HWI handle 752 * @stream : camera3_stream_t structure 753 * @stream_type: Channel stream type 754 * @postprocess_mask: the postprocess mask for streams of this channel 755 * @metadataChannel: handle to the metadataChannel 756 * @numBuffers : number of max dequeued buffers 757 * RETURN : none 758 *==========================================================================*/ 759QCamera3ProcessingChannel::QCamera3ProcessingChannel(uint32_t cam_handle, 760 uint32_t channel_handle, 761 mm_camera_ops_t *cam_ops, 762 channel_cb_routine cb_routine, 763 channel_cb_buffer_err cb_buffer_err, 764 cam_padding_info_t *paddingInfo, 765 void *userData, 766 camera3_stream_t *stream, 767 cam_stream_type_t stream_type, 768 cam_feature_mask_t postprocess_mask, 769 QCamera3Channel *metadataChannel, 770 uint32_t numBuffers) : 771 QCamera3Channel(cam_handle, channel_handle, cam_ops, cb_routine, 772 cb_buffer_err, paddingInfo, postprocess_mask, userData, numBuffers), 773 m_postprocessor(this), 774 mFrameCount(0), 775 mLastFrameCount(0), 776 mLastFpsTime(0), 777 mMemory(numBuffers), 778 mCamera3Stream(stream), 779 mNumBufs(CAM_MAX_NUM_BUFS_PER_STREAM), 780 mStreamType(stream_type), 781 mPostProcStarted(false), 782 mReprocessType(REPROCESS_TYPE_NONE), 783 mInputBufferConfig(false), 784 m_pMetaChannel(metadataChannel), 785 mMetaFrame(NULL), 786 mOfflineMemory(0), 787 mOfflineMetaMemory(numBuffers + (MAX_REPROCESS_PIPELINE_STAGES - 1)) 788{ 789 char prop[PROPERTY_VALUE_MAX]; 790 property_get("persist.debug.sf.showfps", prop, "0"); 791 mDebugFPS = (uint8_t) atoi(prop); 792 793 int32_t rc = m_postprocessor.init(&mMemory); 794 if (rc != 0) { 795 LOGE("Init Postprocessor failed"); 796 } 797} 798 799/*=========================================================================== 800 * FUNCTION : ~QCamera3ProcessingChannel 801 * 802 * DESCRIPTION: destructor of QCamera3ProcessingChannel 803 * 804 * PARAMETERS : none 805 * 806 * RETURN : none 807 *==========================================================================*/ 808QCamera3ProcessingChannel::~QCamera3ProcessingChannel() 809{ 810 destroy(); 811 812 int32_t rc = m_postprocessor.deinit(); 813 if (rc != 0) { 814 LOGE("De-init Postprocessor failed"); 815 } 816 817 if (0 < mOfflineMetaMemory.getCnt()) { 818 mOfflineMetaMemory.deallocate(); 819 } 820 if (0 < mOfflineMemory.getCnt()) { 821 mOfflineMemory.unregisterBuffers(); 822 } 823 824} 825 826/*=========================================================================== 827 * FUNCTION : streamCbRoutine 828 * 829 * DESCRIPTION: 830 * 831 * PARAMETERS : 832 * @super_frame : the super frame with filled buffer 833 * @stream : stream on which the buffer was requested and filled 834 * 835 * RETURN : none 836 *==========================================================================*/ 837void QCamera3ProcessingChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 838 QCamera3Stream *stream) 839{ 840 if (mStreamType == CAM_STREAM_TYPE_PREVIEW) { 841 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PREVIEW_STRM_CB); 842 } else { 843 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_STRM_CB); 844 } 845 //FIXME Q Buf back in case of error? 846 uint8_t frameIndex; 847 buffer_handle_t *resultBuffer; 848 int32_t resultFrameNumber; 849 camera3_stream_buffer_t result; 850 cam_dimension_t dim; 851 cam_frame_len_offset_t offset; 852 853 memset(&dim, 0, sizeof(dim)); 854 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 855 if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) { 856 LOGE("Error with the stream callback"); 857 return; 858 } 859 860 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx; 861 if(frameIndex >= mNumBufs) { 862 LOGE("Error, Invalid index for buffer"); 863 stream->bufDone(frameIndex); 864 return; 865 } 866 867 if (mDebugFPS) { 868 showDebugFPS(stream->getMyType()); 869 } 870 stream->getFrameDimension(dim); 871 stream->getFrameOffset(offset); 872 if (stream->getMyType() == CAM_STREAM_TYPE_PREVIEW) { 873 dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_PREVIEW); 874 } else if (stream->getMyType() == CAM_STREAM_TYPE_VIDEO) { 875 dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_VIDEO); 876 } else if (stream->getMyType() == CAM_STREAM_TYPE_CALLBACK) { 877 dumpYUV(super_frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_CALLBACK); 878 } 879 880 do { 881 882 //Use below data to issue framework callback 883 resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex); 884 resultFrameNumber = mMemory.getFrameNumber(frameIndex); 885 uint32_t oldestBufIndex; 886 int32_t lowestFrameNumber = mMemory.getOldestFrameNumber(oldestBufIndex); 887 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData; 888 if ((lowestFrameNumber != -1 ) && (lowestFrameNumber < resultFrameNumber) && 889 hal_obj->mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) { 890 LOGE("Error buffer dropped for framenumber:%d with bufidx:%d", 891 lowestFrameNumber, oldestBufIndex); 892 if (mOutOfSequenceBuffers.empty()) { 893 stream->cancelBuffer(oldestBufIndex); 894 } 895 896 //push in order! 897 auto itr = mOutOfSequenceBuffers.begin(); 898 for (; itr != mOutOfSequenceBuffers.end(); itr++) { 899 mm_camera_super_buf_t *super_buf = *itr; 900 uint32_t buf_idx = super_buf->bufs[0]->buf_idx; 901 int32_t frame_num = mMemory.getFrameNumber(buf_idx); 902 if (resultFrameNumber < frame_num) { 903 LOGE("Out of order frame!! set buffer status error flag!"); 904 mOutOfSequenceBuffers.insert(itr, super_frame); 905 super_buf->bufs[0]->flags |= V4L2_BUF_FLAG_ERROR; 906 break; 907 } 908 } 909 910 if (itr == mOutOfSequenceBuffers.end()) { 911 LOGE("Add the frame to the end of mOutOfSequenceBuffers"); 912 // add the buffer 913 mOutOfSequenceBuffers.push_back(super_frame); 914 } 915 return; 916 } 917 918 if(hal_obj->mStreamConfig == true) { 919 switch (stream->getMyType()) { 920 case CAM_STREAM_TYPE_PREVIEW: 921 LOGH("[KPI Perf] : PROFILE_FIRST_PREVIEW_FRAME"); 922 break; 923 case CAM_STREAM_TYPE_VIDEO: 924 LOGH("[KPI Perf] : PROFILE_FIRST_VIDEO_FRAME"); 925 break; 926 default: 927 break; 928 } 929 hal_obj->mStreamConfig = false; 930 } 931 932 result.stream = mCamera3Stream; 933 result.buffer = resultBuffer; 934 if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) { 935 result.status = CAMERA3_BUFFER_STATUS_ERROR; 936 LOGW("CAMERA3_BUFFER_STATUS_ERROR for stream_type: %d", 937 mStreams[0]->getMyType()); 938 mChannelCbBufErr(this, resultFrameNumber, CAMERA3_BUFFER_STATUS_ERROR, mUserData); 939 } else { 940 result.status = CAMERA3_BUFFER_STATUS_OK; 941 } 942 result.acquire_fence = -1; 943 result.release_fence = -1; 944 if(mPerFrameMapUnmapEnable) { 945 int32_t rc = stream->bufRelease(frameIndex); 946 if (NO_ERROR != rc) { 947 LOGE("Error %d releasing stream buffer %d", 948 rc, frameIndex); 949 } 950 951 rc = mMemory.unregisterBuffer(frameIndex); 952 if (NO_ERROR != rc) { 953 LOGE("Error %d unregistering stream buffer %d", 954 rc, frameIndex); 955 } 956 } 957 958 if (0 <= resultFrameNumber) { 959 if (mChannelCB) { 960 mChannelCB(NULL, &result, (uint32_t)resultFrameNumber, false, mUserData); 961 } 962 } else { 963 LOGE("Bad frame number"); 964 } 965 free(super_frame); 966 super_frame = NULL; 967 if (mOutOfSequenceBuffers.empty()) { 968 break; 969 } else { 970 auto itr = mOutOfSequenceBuffers.begin(); 971 super_frame = *itr; 972 frameIndex = super_frame->bufs[0]->buf_idx; 973 resultFrameNumber = mMemory.getFrameNumber(frameIndex); 974 lowestFrameNumber = mMemory.getOldestFrameNumber(oldestBufIndex); 975 LOGE("Attempting to recover next frame: result Frame#: %d, resultIdx: %d, " 976 "Lowest Frame#: %d, oldestBufIndex: %d", 977 resultFrameNumber, frameIndex, lowestFrameNumber, oldestBufIndex); 978 if ((lowestFrameNumber != -1) && (lowestFrameNumber < resultFrameNumber)) { 979 LOGE("Multiple frame dropped requesting cancel for frame %d, idx:%d", 980 lowestFrameNumber, oldestBufIndex); 981 stream->cancelBuffer(oldestBufIndex); 982 return; 983 } else if (lowestFrameNumber == resultFrameNumber) { 984 LOGE("Time to flush out head of list continue loop with this new super frame"); 985 itr = mOutOfSequenceBuffers.erase(itr); 986 } else { 987 LOGE("Unexpected condition head of list is not the lowest frame number"); 988 itr = mOutOfSequenceBuffers.erase(itr); 989 } 990 } 991 } while (1); 992 return; 993} 994 995/*=========================================================================== 996 * FUNCTION : putStreamBufs 997 * 998 * DESCRIPTION: release the buffers allocated to the stream 999 * 1000 * PARAMETERS : NONE 1001 * 1002 * RETURN : NONE 1003 *==========================================================================*/ 1004void QCamera3YUVChannel::putStreamBufs() 1005{ 1006 QCamera3ProcessingChannel::putStreamBufs(); 1007 1008 // Free allocated heap buffer. 1009 mMemory.deallocate(); 1010 // Clear free heap buffer list. 1011 mFreeHeapBufferList.clear(); 1012 // Clear offlinePpInfoList 1013 mOfflinePpInfoList.clear(); 1014} 1015 1016/*=========================================================================== 1017 * FUNCTION : timeoutFrame 1018 * 1019 * DESCRIPTION: Method to indicate to channel that a given frame has take too 1020 * long to be generated 1021 * 1022 * PARAMETERS : framenumber indicating the framenumber of the buffer timingout 1023 * 1024 * RETURN : int32_t type of status 1025 * NO_ERROR -- success 1026 * none-zero failure code 1027 *==========================================================================*/ 1028int32_t QCamera3ProcessingChannel::timeoutFrame(uint32_t frameNumber) 1029{ 1030 int32_t bufIdx; 1031 1032 bufIdx = mMemory.getBufferIndex(frameNumber); 1033 1034 if (bufIdx < 0) { 1035 LOGE("%s: Buffer not found for frame:%d", __func__, frameNumber); 1036 return -1; 1037 } 1038 1039 mStreams[0]->timeoutFrame(bufIdx); 1040 return NO_ERROR; 1041} 1042 1043/*=========================================================================== 1044 * FUNCTION : request 1045 * 1046 * DESCRIPTION: handle the request - either with an input buffer or a direct 1047 * output request 1048 * 1049 * PARAMETERS : 1050 * @buffer : pointer to the output buffer 1051 * @frameNumber : frame number of the request 1052 * @pInputBuffer : pointer to input buffer if an input request 1053 * @metadata : parameters associated with the request 1054 * @internalreq : boolean to indicate if this is purely internal request 1055 * needing internal buffer allocation 1056 * @meteringonly : boolean indicating metering only frame subset of internal 1057 * not consumed by postprocessor 1058 * 1059 * RETURN : 0 on a success start of capture 1060 * -EINVAL on invalid input 1061 * -ENODEV on serious error 1062 *==========================================================================*/ 1063int32_t QCamera3ProcessingChannel::request(buffer_handle_t *buffer, 1064 uint32_t frameNumber, 1065 camera3_stream_buffer_t* pInputBuffer, 1066 metadata_buffer_t* metadata, 1067 int &indexUsed, 1068 __unused bool internalRequest = false, 1069 __unused bool meteringOnly = false) 1070{ 1071 int32_t rc = NO_ERROR; 1072 int index; 1073 1074 if (NULL == buffer || NULL == metadata) { 1075 LOGE("Invalid buffer/metadata in channel request"); 1076 return BAD_VALUE; 1077 } 1078 1079 if (pInputBuffer) { 1080 //need to send to reprocessing 1081 LOGD("Got a request with input buffer, output streamType = %d", mStreamType); 1082 reprocess_config_t reproc_cfg; 1083 cam_dimension_t dim; 1084 memset(&reproc_cfg, 0, sizeof(reprocess_config_t)); 1085 memset(&dim, 0, sizeof(dim)); 1086 setReprocConfig(reproc_cfg, pInputBuffer, metadata, mStreamFormat, dim); 1087 startPostProc(reproc_cfg); 1088 1089 qcamera_fwk_input_pp_data_t *src_frame = NULL; 1090 src_frame = (qcamera_fwk_input_pp_data_t *)calloc(1, 1091 sizeof(qcamera_fwk_input_pp_data_t)); 1092 if (src_frame == NULL) { 1093 LOGE("No memory for src frame"); 1094 return NO_MEMORY; 1095 } 1096 rc = setFwkInputPPData(src_frame, pInputBuffer, &reproc_cfg, metadata, buffer, frameNumber); 1097 if (NO_ERROR != rc) { 1098 LOGE("Error %d while setting framework input PP data", rc); 1099 free(src_frame); 1100 return rc; 1101 } 1102 LOGH("Post-process started"); 1103 m_postprocessor.processData(src_frame); 1104 } else { 1105 index = mMemory.getMatchBufIndex((void*)buffer); 1106 if(index < 0) { 1107 rc = registerBuffer(buffer, mIsType); 1108 if (NO_ERROR != rc) { 1109 LOGE("On-the-fly buffer registration failed %d", 1110 rc); 1111 return rc; 1112 } 1113 1114 index = mMemory.getMatchBufIndex((void*)buffer); 1115 if (index < 0) { 1116 LOGE("Could not find object among registered buffers"); 1117 return DEAD_OBJECT; 1118 } 1119 } 1120 rc = mMemory.markFrameNumber(index, frameNumber); 1121 if(rc != NO_ERROR) { 1122 LOGE("Error marking frame number:%d for index %d", frameNumber, 1123 index); 1124 return rc; 1125 } 1126 if (m_bIsActive) { 1127 rc = mStreams[0]->bufDone(index); 1128 if(rc != NO_ERROR) { 1129 LOGE("Failed to Q new buffer to stream"); 1130 mMemory.markFrameNumber(index, -1); 1131 return rc; 1132 } 1133 } 1134 indexUsed = index; 1135 } 1136 return rc; 1137} 1138 1139/*=========================================================================== 1140 * FUNCTION : initialize 1141 * 1142 * DESCRIPTION: 1143 * 1144 * PARAMETERS : isType : type of image stabilization on the buffer 1145 * 1146 * RETURN : int32_t type of status 1147 * NO_ERROR -- success 1148 * none-zero failure code 1149 *==========================================================================*/ 1150int32_t QCamera3ProcessingChannel::initialize(__unused cam_is_type_t isType) 1151{ 1152 int32_t rc = NO_ERROR; 1153 rc = mOfflineMetaMemory.allocateAll(sizeof(metadata_buffer_t)); 1154 if (rc == NO_ERROR) { 1155 Mutex::Autolock lock(mFreeOfflineMetaBuffersLock); 1156 mFreeOfflineMetaBuffersList.clear(); 1157 for (uint32_t i = 0; i < mNumBuffers + (MAX_REPROCESS_PIPELINE_STAGES - 1); 1158 i++) { 1159 mFreeOfflineMetaBuffersList.push_back(i); 1160 } 1161 } else { 1162 LOGE("Could not allocate offline meta buffers for input reprocess"); 1163 } 1164 mOutOfSequenceBuffers.clear(); 1165 return rc; 1166} 1167 1168/*=========================================================================== 1169 * FUNCTION : registerBuffer 1170 * 1171 * DESCRIPTION: register streaming buffer to the channel object 1172 * 1173 * PARAMETERS : 1174 * @buffer : buffer to be registered 1175 * @isType : image stabilization type on the stream 1176 * 1177 * RETURN : int32_t type of status 1178 * NO_ERROR -- success 1179 * none-zero failure code 1180 *==========================================================================*/ 1181int32_t QCamera3ProcessingChannel::registerBuffer(buffer_handle_t *buffer, 1182 cam_is_type_t isType) 1183{ 1184 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_REG_BUF); 1185 int rc = 0; 1186 mIsType = isType; 1187 cam_stream_type_t streamType; 1188 1189 if ((uint32_t)mMemory.getCnt() > (mNumBufs - 1)) { 1190 LOGE("Trying to register more buffers than initially requested"); 1191 return BAD_VALUE; 1192 } 1193 1194 if (0 == m_numStreams) { 1195 rc = initialize(mIsType); 1196 if (rc != NO_ERROR) { 1197 LOGE("Couldn't initialize camera stream %d", rc); 1198 return rc; 1199 } 1200 } 1201 1202 streamType = mStreams[0]->getMyType(); 1203 rc = mMemory.registerBuffer(buffer, streamType); 1204 if (ALREADY_EXISTS == rc) { 1205 return NO_ERROR; 1206 } else if (NO_ERROR != rc) { 1207 LOGE("Buffer %p couldn't be registered %d", buffer, rc); 1208 return rc; 1209 } 1210 1211 return rc; 1212} 1213 1214int32_t QCamera3ProcessingChannel::registerBufferAndGetBufDef(buffer_handle_t *buffer, 1215 mm_camera_buf_def_t *frame) 1216{ 1217 if (buffer == nullptr || frame == nullptr) { 1218 ALOGE("%s: buffer and frame cannot be nullptr.", __FUNCTION__); 1219 return BAD_VALUE; 1220 } 1221 1222 status_t rc; 1223 1224 // Get the buffer index. 1225 int index = mMemory.getMatchBufIndex((void*)buffer); 1226 if(index < 0) { 1227 // Register the buffer if it was not registered. 1228 rc = registerBuffer(buffer, mIsType); 1229 if (rc != OK) { 1230 ALOGE("%s: Regitering buffer failed: %s (%d)", __FUNCTION__, strerror(-rc), rc); 1231 return rc; 1232 } 1233 1234 index = mMemory.getMatchBufIndex((void*)buffer); 1235 if (index < 0) { 1236 ALOGE("%s: Could not find object among registered buffers", __FUNCTION__); 1237 return DEAD_OBJECT; 1238 } 1239 } 1240 1241 cam_frame_len_offset_t offset = {}; 1242 mStreams[0]->getFrameOffset(offset); 1243 1244 // Get the buffer def. 1245 rc = mMemory.getBufDef(offset, *frame, index, mMapStreamBuffers); 1246 if (rc != 0) { 1247 ALOGE("%s: Getting a frame failed: %s (%d).", __FUNCTION__, strerror(-rc), rc); 1248 return rc; 1249 } 1250 1251 // Set the frame's stream ID because it's not set in getBufDef. 1252 frame->stream_id = mStreams[0]->getMyHandle(); 1253 return 0; 1254} 1255 1256void QCamera3ProcessingChannel::unregisterBuffer(mm_camera_buf_def_t *frame) 1257{ 1258 if (frame == nullptr) { 1259 ALOGE("%s: frame is nullptr", __FUNCTION__); 1260 return; 1261 } 1262 1263 mMemory.unregisterBuffer(frame->buf_idx); 1264} 1265 1266/*=========================================================================== 1267 * FUNCTION : setFwkInputPPData 1268 * 1269 * DESCRIPTION: fill out the framework src frame information for reprocessing 1270 * 1271 * PARAMETERS : 1272 * @src_frame : input pp data to be filled out 1273 * @pInputBuffer : input buffer for reprocessing 1274 * @reproc_cfg : pointer to the reprocess config 1275 * @metadata : pointer to the metadata buffer 1276 * @output_buffer : output buffer for reprocessing; could be NULL if not 1277 * framework allocated 1278 * @frameNumber : frame number of the request 1279 * 1280 * RETURN : int32_t type of status 1281 * NO_ERROR -- success 1282 * none-zero failure code 1283 *==========================================================================*/ 1284int32_t QCamera3ProcessingChannel::setFwkInputPPData(qcamera_fwk_input_pp_data_t *src_frame, 1285 camera3_stream_buffer_t *pInputBuffer, reprocess_config_t *reproc_cfg, 1286 metadata_buffer_t *metadata, buffer_handle_t *output_buffer, 1287 uint32_t frameNumber) 1288{ 1289 int32_t rc = NO_ERROR; 1290 int input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer); 1291 if(input_index < 0) { 1292 rc = mOfflineMemory.registerBuffer(pInputBuffer->buffer, mStreamType); 1293 if (NO_ERROR != rc) { 1294 LOGE("On-the-fly input buffer registration failed %d", 1295 rc); 1296 return rc; 1297 } 1298 input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer); 1299 if (input_index < 0) { 1300 LOGE("Could not find object among registered buffers"); 1301 return DEAD_OBJECT; 1302 } 1303 } 1304 mOfflineMemory.markFrameNumber(input_index, frameNumber); 1305 1306 src_frame->src_frame = *pInputBuffer; 1307 rc = mOfflineMemory.getBufDef(reproc_cfg->input_stream_plane_info.plane_info, 1308 src_frame->input_buffer, input_index, mMapStreamBuffers); 1309 if (rc != 0) { 1310 return rc; 1311 } 1312 dumpYUV(&src_frame->input_buffer, reproc_cfg->input_stream_dim, 1313 reproc_cfg->input_stream_plane_info.plane_info, QCAMERA_DUMP_FRM_INPUT_REPROCESS); 1314 cam_dimension_t dim = {sizeof(metadata_buffer_t), 1}; 1315 cam_stream_buf_plane_info_t meta_planes; 1316 rc = mm_stream_calc_offset_metadata(&dim, &mPaddingInfo, &meta_planes); 1317 if (rc != 0) { 1318 LOGE("Metadata stream plane info calculation failed!"); 1319 return rc; 1320 } 1321 uint32_t metaBufIdx; 1322 { 1323 Mutex::Autolock lock(mFreeOfflineMetaBuffersLock); 1324 if (mFreeOfflineMetaBuffersList.empty()) { 1325 LOGE("mFreeOfflineMetaBuffersList is null. Fatal"); 1326 return BAD_VALUE; 1327 } 1328 1329 metaBufIdx = *(mFreeOfflineMetaBuffersList.begin()); 1330 mFreeOfflineMetaBuffersList.erase(mFreeOfflineMetaBuffersList.begin()); 1331 LOGD("erasing %d, mFreeOfflineMetaBuffersList.size %d", metaBufIdx, 1332 mFreeOfflineMetaBuffersList.size()); 1333 } 1334 1335 mOfflineMetaMemory.markFrameNumber(metaBufIdx, frameNumber); 1336 1337 mm_camera_buf_def_t meta_buf; 1338 cam_frame_len_offset_t offset = meta_planes.plane_info; 1339 rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, metaBufIdx, true /*virtualAddr*/); 1340 if (NO_ERROR != rc) { 1341 return rc; 1342 } 1343 memcpy(meta_buf.buffer, metadata, sizeof(metadata_buffer_t)); 1344 src_frame->metadata_buffer = meta_buf; 1345 src_frame->reproc_config = *reproc_cfg; 1346 src_frame->output_buffer = output_buffer; 1347 src_frame->frameNumber = frameNumber; 1348 return rc; 1349} 1350 1351/*=========================================================================== 1352 * FUNCTION : checkStreamCbErrors 1353 * 1354 * DESCRIPTION: check the stream callback for errors 1355 * 1356 * PARAMETERS : 1357 * @super_frame : the super frame with filled buffer 1358 * @stream : stream on which the buffer was requested and filled 1359 * 1360 * RETURN : int32_t type of status 1361 * NO_ERROR -- success 1362 * none-zero failure code 1363 *==========================================================================*/ 1364int32_t QCamera3ProcessingChannel::checkStreamCbErrors(mm_camera_super_buf_t *super_frame, 1365 QCamera3Stream *stream) 1366{ 1367 if (NULL == stream) { 1368 LOGE("Invalid stream"); 1369 return BAD_VALUE; 1370 } 1371 1372 if(NULL == super_frame) { 1373 LOGE("Invalid Super buffer"); 1374 return BAD_VALUE; 1375 } 1376 1377 if(super_frame->num_bufs != 1) { 1378 LOGE("Multiple streams are not supported"); 1379 return BAD_VALUE; 1380 } 1381 if(NULL == super_frame->bufs[0]) { 1382 LOGE("Error, Super buffer frame does not contain valid buffer"); 1383 return BAD_VALUE; 1384 } 1385 return NO_ERROR; 1386} 1387 1388/*=========================================================================== 1389 * FUNCTION : getStreamSize 1390 * 1391 * DESCRIPTION: get the size from the camera3_stream_t for the channel 1392 * 1393 * PARAMETERS : 1394 * @dim : Return the size of the stream 1395 * 1396 * RETURN : int32_t type of status 1397 * NO_ERROR -- success 1398 * none-zero failure code 1399 *==========================================================================*/ 1400int32_t QCamera3ProcessingChannel::getStreamSize(cam_dimension_t &dim) 1401{ 1402 if (mCamera3Stream) { 1403 dim.width = mCamera3Stream->width; 1404 dim.height = mCamera3Stream->height; 1405 return NO_ERROR; 1406 } else { 1407 return BAD_VALUE; 1408 } 1409} 1410 1411/*=========================================================================== 1412 * FUNCTION : getStreamBufs 1413 * 1414 * DESCRIPTION: get the buffers allocated to the stream 1415 * 1416 * PARAMETERS : 1417 * @len : buffer length 1418 * 1419 * RETURN : int32_t type of status 1420 * NO_ERROR -- success 1421 * none-zero failure code 1422 *==========================================================================*/ 1423QCamera3StreamMem* QCamera3ProcessingChannel::getStreamBufs(uint32_t /*len*/) 1424{ 1425 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GETSTREAMBUFS); 1426 return &mMemory; 1427} 1428 1429/*=========================================================================== 1430 * FUNCTION : putStreamBufs 1431 * 1432 * DESCRIPTION: release the buffers allocated to the stream 1433 * 1434 * PARAMETERS : NONE 1435 * 1436 * RETURN : NONE 1437 *==========================================================================*/ 1438void QCamera3ProcessingChannel::putStreamBufs() 1439{ 1440 mMemory.unregisterBuffers(); 1441 1442 /* Reclaim all the offline metabuffers and push them to free list */ 1443 { 1444 Mutex::Autolock lock(mFreeOfflineMetaBuffersLock); 1445 mFreeOfflineMetaBuffersList.clear(); 1446 for (uint32_t i = 0; i < mOfflineMetaMemory.getCnt(); i++) { 1447 mFreeOfflineMetaBuffersList.push_back(i); 1448 } 1449 } 1450} 1451 1452 1453/*=========================================================================== 1454 * FUNCTION : stop 1455 * 1456 * DESCRIPTION: stop processing channel, which will stop all streams within, 1457 * including the reprocessing channel in postprocessor. 1458 * 1459 * PARAMETERS : none 1460 * 1461 * RETURN : int32_t type of status 1462 * NO_ERROR -- success 1463 * none-zero failure code 1464 *==========================================================================*/ 1465int32_t QCamera3ProcessingChannel::stop() 1466{ 1467 if (mStreamType == CAM_STREAM_TYPE_PREVIEW) { 1468 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW); 1469 } 1470 int32_t rc = NO_ERROR; 1471 if(!m_bIsActive) { 1472 LOGE("Attempt to stop inactive channel"); 1473 return rc; 1474 } 1475 1476 m_postprocessor.stop(); 1477 mPostProcStarted = false; 1478 rc |= QCamera3Channel::stop(); 1479 return rc; 1480} 1481 1482/*=========================================================================== 1483 * FUNCTION : startPostProc 1484 * 1485 * DESCRIPTION: figure out if the postprocessor needs to be restarted and if yes 1486 * start it 1487 * 1488 * PARAMETERS : 1489 * @inputBufExists : whether there is an input buffer for post processing 1490 * @config : reprocessing configuration 1491 * @metadata : metadata associated with the reprocessing request 1492 * 1493 * RETURN : NONE 1494 *==========================================================================*/ 1495void QCamera3ProcessingChannel::startPostProc(const reprocess_config_t &config) 1496{ 1497 if (mPostProcStarted) { 1498 if (config.reprocess_type != mReprocessType) { 1499 // If the reprocess type doesn't match, stop and start with the new type 1500 m_postprocessor.stop(); 1501 mPostProcStarted = false; 1502 } else { 1503 // Return if reprocess type is the same. 1504 return; 1505 } 1506 } 1507 1508 m_postprocessor.start(config); 1509 mPostProcStarted = true; 1510 mReprocessType = config.reprocess_type; 1511} 1512 1513/*=========================================================================== 1514 * FUNCTION : queueReprocMetadata 1515 * 1516 * DESCRIPTION: queue the reprocess metadata to the postprocessor 1517 * 1518 * PARAMETERS : metadata : the metadata corresponding to the pp frame 1519 * 1520 * RETURN : int32_t type of status 1521 * NO_ERROR -- success 1522 * none-zero failure code 1523 *==========================================================================*/ 1524int32_t QCamera3ProcessingChannel::queueReprocMetadata(mm_camera_super_buf_t *metadata) 1525{ 1526 return m_postprocessor.processPPMetadata(metadata); 1527} 1528 1529/*=========================================================================== 1530 * FUNCTION : metadataBufDone 1531 * 1532 * DESCRIPTION: Buffer done method for a metadata buffer 1533 * 1534 * PARAMETERS : 1535 * @recvd_frame : received metadata frame 1536 * 1537 * RETURN : int32_t type of status 1538 * NO_ERROR -- success 1539 * none-zero failure code 1540 *==========================================================================*/ 1541int32_t QCamera3ProcessingChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame) 1542{ 1543 int32_t rc = NO_ERROR;; 1544 if ((NULL == m_pMetaChannel) || (NULL == recvd_frame)) { 1545 LOGE("Metadata channel or metadata buffer invalid"); 1546 return BAD_VALUE; 1547 } 1548 1549 rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame); 1550 1551 return rc; 1552} 1553 1554/*=========================================================================== 1555 * FUNCTION : translateStreamTypeAndFormat 1556 * 1557 * DESCRIPTION: translates the framework stream format into HAL stream type 1558 * and format 1559 * 1560 * PARAMETERS : 1561 * @streamType : translated stream type 1562 * @streamFormat : translated stream format 1563 * @stream : fwk stream 1564 * 1565 * RETURN : int32_t type of status 1566 * NO_ERROR -- success 1567 * none-zero failure code 1568 *==========================================================================*/ 1569int32_t QCamera3ProcessingChannel::translateStreamTypeAndFormat(camera3_stream_t *stream, 1570 cam_stream_type_t &streamType, cam_format_t &streamFormat) 1571{ 1572 switch (stream->format) { 1573 case HAL_PIXEL_FORMAT_YCbCr_420_888: 1574 if(stream->stream_type == CAMERA3_STREAM_INPUT){ 1575 streamType = CAM_STREAM_TYPE_SNAPSHOT; 1576 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT, 1577 stream->width, stream->height, m_bUBWCenable, mIsType); 1578 } else { 1579 streamType = CAM_STREAM_TYPE_CALLBACK; 1580 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_CALLBACK, 1581 stream->width, stream->height, m_bUBWCenable, mIsType); 1582 } 1583 break; 1584 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 1585 if (stream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER) { 1586 streamType = CAM_STREAM_TYPE_VIDEO; 1587 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_VIDEO, 1588 stream->width, stream->height, m_bUBWCenable, mIsType); 1589 } else if(stream->stream_type == CAMERA3_STREAM_INPUT || 1590 stream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL || 1591 IS_USAGE_ZSL(stream->usage)){ 1592 streamType = CAM_STREAM_TYPE_SNAPSHOT; 1593 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT, 1594 stream->width, stream->height, m_bUBWCenable, mIsType); 1595 } else { 1596 streamType = CAM_STREAM_TYPE_PREVIEW; 1597 streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_PREVIEW, 1598 stream->width, stream->height, m_bUBWCenable, mIsType); 1599 } 1600 break; 1601 case HAL_PIXEL_FORMAT_RAW_OPAQUE: 1602 case HAL_PIXEL_FORMAT_RAW16: 1603 case HAL_PIXEL_FORMAT_RAW10: 1604 streamType = CAM_STREAM_TYPE_RAW; 1605 if ((HAL_DATASPACE_DEPTH == stream->data_space) && 1606 (HAL_PIXEL_FORMAT_RAW16 == stream->format)) { 1607 streamFormat = CAM_FORMAT_META_RAW_10BIT; 1608 } else { 1609 streamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG; 1610 } 1611 break; 1612 default: 1613 return -EINVAL; 1614 } 1615 LOGD("fwk_format = %d, streamType = %d, streamFormat = %d", 1616 stream->format, streamType, streamFormat); 1617 return NO_ERROR; 1618} 1619 1620/*=========================================================================== 1621 * FUNCTION : setReprocConfig 1622 * 1623 * DESCRIPTION: sets the reprocessing parameters for the input buffer 1624 * 1625 * PARAMETERS : 1626 * @reproc_cfg : the configuration to be set 1627 * @pInputBuffer : pointer to the input buffer 1628 * @metadata : pointer to the reprocessing metadata buffer 1629 * @streamFormat : format of the input stream 1630 * 1631 * RETURN : int32_t type of status 1632 * NO_ERROR -- success 1633 * none-zero failure code 1634 *==========================================================================*/ 1635int32_t QCamera3ProcessingChannel::setReprocConfig(reprocess_config_t &reproc_cfg, 1636 camera3_stream_buffer_t *pInputBuffer, 1637 __unused metadata_buffer_t *metadata, 1638 cam_format_t streamFormat, cam_dimension_t dim) 1639{ 1640 int32_t rc = 0; 1641 reproc_cfg.padding = &mPaddingInfo; 1642 //to ensure a big enough buffer size set the height and width 1643 //padding to max(height padding, width padding) 1644 if (reproc_cfg.padding->height_padding > reproc_cfg.padding->width_padding) { 1645 reproc_cfg.padding->width_padding = reproc_cfg.padding->height_padding; 1646 } else { 1647 reproc_cfg.padding->height_padding = reproc_cfg.padding->width_padding; 1648 } 1649 if (NULL != pInputBuffer) { 1650 reproc_cfg.input_stream_dim.width = (int32_t)pInputBuffer->stream->width; 1651 reproc_cfg.input_stream_dim.height = (int32_t)pInputBuffer->stream->height; 1652 } else { 1653 reproc_cfg.input_stream_dim.width = (int32_t)dim.width; 1654 reproc_cfg.input_stream_dim.height = (int32_t)dim.height; 1655 } 1656 reproc_cfg.src_channel = this; 1657 reproc_cfg.output_stream_dim.width = mCamera3Stream->width; 1658 reproc_cfg.output_stream_dim.height = mCamera3Stream->height; 1659 reproc_cfg.reprocess_type = getReprocessType(); 1660 1661 //offset calculation 1662 if (NULL != pInputBuffer) { 1663 rc = translateStreamTypeAndFormat(pInputBuffer->stream, 1664 reproc_cfg.stream_type, reproc_cfg.stream_format); 1665 if (rc != NO_ERROR) { 1666 LOGE("Stream format %d is not supported", 1667 pInputBuffer->stream->format); 1668 return rc; 1669 } 1670 } else { 1671 reproc_cfg.stream_type = mStreamType; 1672 reproc_cfg.stream_format = streamFormat; 1673 } 1674 1675 switch (reproc_cfg.stream_type) { 1676 case CAM_STREAM_TYPE_PREVIEW: 1677 if (getStreamByIndex(0) == NULL) { 1678 LOGE("Could not find stream"); 1679 rc = -1; 1680 break; 1681 } 1682 rc = mm_stream_calc_offset_preview( 1683 getStreamByIndex(0)->getStreamInfo(), 1684 &reproc_cfg.input_stream_dim, 1685 reproc_cfg.padding, 1686 &reproc_cfg.input_stream_plane_info); 1687 break; 1688 case CAM_STREAM_TYPE_VIDEO: 1689 rc = mm_stream_calc_offset_video(reproc_cfg.stream_format, 1690 &reproc_cfg.input_stream_dim, 1691 &reproc_cfg.input_stream_plane_info); 1692 break; 1693 case CAM_STREAM_TYPE_RAW: 1694 rc = mm_stream_calc_offset_raw(reproc_cfg.stream_format, 1695 &reproc_cfg.input_stream_dim, 1696 reproc_cfg.padding, &reproc_cfg.input_stream_plane_info); 1697 break; 1698 case CAM_STREAM_TYPE_SNAPSHOT: 1699 case CAM_STREAM_TYPE_CALLBACK: 1700 default: 1701 rc = mm_stream_calc_offset_snapshot(streamFormat, &reproc_cfg.input_stream_dim, 1702 reproc_cfg.padding, &reproc_cfg.input_stream_plane_info); 1703 break; 1704 } 1705 if (rc != 0) { 1706 LOGE("Stream %d plane info calculation failed!", mStreamType); 1707 return rc; 1708 } 1709 1710 IF_META_AVAILABLE(cam_hdr_param_t, hdr_info, CAM_INTF_PARM_HAL_BRACKETING_HDR, metadata) { 1711 reproc_cfg.hdr_param = *hdr_info; 1712 } 1713 1714 return rc; 1715} 1716 1717/*=========================================================================== 1718 * FUNCTION : reprocessCbRoutine 1719 * 1720 * DESCRIPTION: callback function for the reprocessed frame. This frame now 1721 * should be returned to the framework 1722 * 1723 * PARAMETERS : 1724 * @resultBuffer : buffer containing the reprocessed data 1725 * @resultFrameNumber : frame number on which the buffer was requested 1726 * 1727 * RETURN : NONE 1728 * 1729 *==========================================================================*/ 1730void QCamera3ProcessingChannel::reprocessCbRoutine(buffer_handle_t *resultBuffer, 1731 uint32_t resultFrameNumber) 1732{ 1733 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CH_REPROC_CB); 1734 int rc = NO_ERROR; 1735 1736 rc = releaseOfflineMemory(resultFrameNumber); 1737 if (NO_ERROR != rc) { 1738 LOGE("Error releasing offline memory %d", rc); 1739 } 1740 /* Since reprocessing is done, send the callback to release the input buffer */ 1741 if (mChannelCB) { 1742 mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData); 1743 } 1744 issueChannelCb(resultBuffer, resultFrameNumber); 1745 1746 return; 1747} 1748 1749/*=========================================================================== 1750 * FUNCTION : issueChannelCb 1751 * 1752 * DESCRIPTION: function to set the result and issue channel callback 1753 * 1754 * PARAMETERS : 1755 * @resultBuffer : buffer containing the data 1756 * @resultFrameNumber : frame number on which the buffer was requested 1757 * 1758 * RETURN : NONE 1759 * 1760 * 1761 *==========================================================================*/ 1762void QCamera3ProcessingChannel::issueChannelCb(buffer_handle_t *resultBuffer, 1763 uint32_t resultFrameNumber) 1764{ 1765 camera3_stream_buffer_t result; 1766 //Use below data to issue framework callback 1767 result.stream = mCamera3Stream; 1768 result.buffer = resultBuffer; 1769 result.status = CAMERA3_BUFFER_STATUS_OK; 1770 result.acquire_fence = -1; 1771 result.release_fence = -1; 1772 1773 if (mChannelCB) { 1774 mChannelCB(NULL, &result, resultFrameNumber, false, mUserData); 1775 } 1776} 1777 1778/*=========================================================================== 1779 * FUNCTION : showDebugFPS 1780 * 1781 * DESCRIPTION: Function to log the fps for preview, video, callback and raw 1782 * streams 1783 * 1784 * PARAMETERS : Stream type 1785 * 1786 * RETURN : None 1787 *==========================================================================*/ 1788void QCamera3ProcessingChannel::showDebugFPS(int32_t streamType) 1789{ 1790 double fps = 0; 1791 mFrameCount++; 1792 nsecs_t now = systemTime(); 1793 nsecs_t diff = now - mLastFpsTime; 1794 if (diff > ms2ns(250)) { 1795 fps = (((double)(mFrameCount - mLastFrameCount)) * 1796 (double)(s2ns(1))) / (double)diff; 1797 switch(streamType) { 1798 case CAM_STREAM_TYPE_PREVIEW: 1799 LOGH("PROFILE_PREVIEW_FRAMES_PER_SECOND : %.4f: mFrameCount=%d", 1800 fps, mFrameCount); 1801 break; 1802 case CAM_STREAM_TYPE_VIDEO: 1803 LOGH("PROFILE_VIDEO_FRAMES_PER_SECOND : %.4f", 1804 fps); 1805 break; 1806 case CAM_STREAM_TYPE_CALLBACK: 1807 LOGH("PROFILE_CALLBACK_FRAMES_PER_SECOND : %.4f", 1808 fps); 1809 break; 1810 case CAM_STREAM_TYPE_RAW: 1811 LOGH("PROFILE_RAW_FRAMES_PER_SECOND : %.4f", 1812 fps); 1813 break; 1814 default: 1815 LOGH("logging not supported for the stream"); 1816 break; 1817 } 1818 mLastFpsTime = now; 1819 mLastFrameCount = mFrameCount; 1820 } 1821} 1822 1823/*=========================================================================== 1824 * FUNCTION : releaseOfflineMemory 1825 * 1826 * DESCRIPTION: function to clean up the offline memory used for input reprocess 1827 * 1828 * PARAMETERS : 1829 * @resultFrameNumber : frame number on which the buffer was requested 1830 * 1831 * RETURN : int32_t type of status 1832 * NO_ERROR -- success 1833 * non-zero failure code 1834 * 1835 * 1836 *==========================================================================*/ 1837int32_t QCamera3ProcessingChannel::releaseOfflineMemory(uint32_t resultFrameNumber) 1838{ 1839 int32_t rc = NO_ERROR; 1840 int32_t inputBufIndex = 1841 mOfflineMemory.getGrallocBufferIndex(resultFrameNumber); 1842 if (0 <= inputBufIndex) { 1843 rc = mOfflineMemory.unregisterBuffer(inputBufIndex); 1844 } else { 1845 LOGW("Could not find offline input buffer, resultFrameNumber %d", 1846 resultFrameNumber); 1847 } 1848 if (rc != NO_ERROR) { 1849 LOGE("Failed to unregister offline input buffer"); 1850 } 1851 1852 int32_t metaBufIndex = 1853 mOfflineMetaMemory.getHeapBufferIndex(resultFrameNumber); 1854 if (0 <= metaBufIndex) { 1855 Mutex::Autolock lock(mFreeOfflineMetaBuffersLock); 1856 mFreeOfflineMetaBuffersList.push_back((uint32_t)metaBufIndex); 1857 } else { 1858 LOGW("Could not find offline meta buffer, resultFrameNumber %d", 1859 resultFrameNumber); 1860 } 1861 1862 return rc; 1863} 1864 1865/* Regular Channel methods */ 1866/*=========================================================================== 1867 * FUNCTION : QCamera3RegularChannel 1868 * 1869 * DESCRIPTION: constructor of QCamera3RegularChannel 1870 * 1871 * PARAMETERS : 1872 * @cam_handle : camera handle 1873 * @cam_ops : ptr to camera ops table 1874 * @cb_routine : callback routine to frame aggregator 1875 * @stream : camera3_stream_t structure 1876 * @stream_type: Channel stream type 1877 * @postprocess_mask: feature mask for postprocessing 1878 * @metadataChannel : metadata channel for the session 1879 * @numBuffers : number of max dequeued buffers 1880 * 1881 * RETURN : none 1882 *==========================================================================*/ 1883QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle, 1884 uint32_t channel_handle, 1885 mm_camera_ops_t *cam_ops, 1886 channel_cb_routine cb_routine, 1887 channel_cb_buffer_err cb_buffer_err, 1888 cam_padding_info_t *paddingInfo, 1889 void *userData, 1890 camera3_stream_t *stream, 1891 cam_stream_type_t stream_type, 1892 cam_feature_mask_t postprocess_mask, 1893 QCamera3Channel *metadataChannel, 1894 uint32_t numBuffers) : 1895 QCamera3ProcessingChannel(cam_handle, channel_handle, cam_ops, 1896 cb_routine, cb_buffer_err, paddingInfo, userData, stream, stream_type, 1897 postprocess_mask, metadataChannel, numBuffers), 1898 mBatchSize(0), 1899 mRotation(ROTATE_0) 1900{ 1901} 1902 1903/*=========================================================================== 1904 * FUNCTION : ~QCamera3RegularChannel 1905 * 1906 * DESCRIPTION: destructor of QCamera3RegularChannel 1907 * 1908 * PARAMETERS : none 1909 * 1910 * RETURN : none 1911 *==========================================================================*/ 1912QCamera3RegularChannel::~QCamera3RegularChannel() 1913{ 1914 destroy(); 1915} 1916 1917/*=========================================================================== 1918 * FUNCTION : initialize 1919 * 1920 * DESCRIPTION: Initialize and add camera channel & stream 1921 * 1922 * PARAMETERS : 1923 * @isType : type of image stabilization required on this stream 1924 * 1925 * RETURN : int32_t type of status 1926 * NO_ERROR -- success 1927 * none-zero failure code 1928 *==========================================================================*/ 1929 1930int32_t QCamera3RegularChannel::initialize(cam_is_type_t isType) 1931{ 1932 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REG_CH_INIT); 1933 int32_t rc = NO_ERROR; 1934 1935 cam_dimension_t streamDim; 1936 1937 if (NULL == mCamera3Stream) { 1938 LOGE("Camera stream uninitialized"); 1939 return NO_INIT; 1940 } 1941 1942 if (1 <= m_numStreams) { 1943 // Only one stream per channel supported in v3 Hal 1944 return NO_ERROR; 1945 } 1946 1947 mIsType = isType; 1948 1949 rc = translateStreamTypeAndFormat(mCamera3Stream, mStreamType, 1950 mStreamFormat); 1951 if (rc != NO_ERROR) { 1952 return -EINVAL; 1953 } 1954 1955 1956 if ((mStreamType == CAM_STREAM_TYPE_VIDEO) || 1957 (mStreamType == CAM_STREAM_TYPE_PREVIEW)) { 1958 if ((mCamera3Stream->rotation != CAMERA3_STREAM_ROTATION_0) && 1959 ((mPostProcMask & CAM_QCOM_FEATURE_ROTATION) == 0)) { 1960 LOGE("attempting rotation %d when rotation is disabled", 1961 mCamera3Stream->rotation); 1962 return -EINVAL; 1963 } 1964 1965 switch (mCamera3Stream->rotation) { 1966 case CAMERA3_STREAM_ROTATION_0: 1967 mRotation = ROTATE_0; 1968 break; 1969 case CAMERA3_STREAM_ROTATION_90: { 1970 mRotation = ROTATE_90; 1971 break; 1972 } 1973 case CAMERA3_STREAM_ROTATION_180: 1974 mRotation = ROTATE_180; 1975 break; 1976 case CAMERA3_STREAM_ROTATION_270: { 1977 mRotation = ROTATE_270; 1978 break; 1979 } 1980 default: 1981 LOGE("Unknown rotation: %d", 1982 mCamera3Stream->rotation); 1983 return -EINVAL; 1984 } 1985 1986 // Camera3/HAL3 spec expecting counter clockwise rotation but CPP HW is 1987 // doing Clockwise rotation and so swap it. 1988 if (mRotation == ROTATE_90) { 1989 mRotation = ROTATE_270; 1990 } else if (mRotation == ROTATE_270) { 1991 mRotation = ROTATE_90; 1992 } 1993 1994 } else if (mCamera3Stream->rotation != CAMERA3_STREAM_ROTATION_0) { 1995 LOGE("Rotation %d is not supported by stream type %d", 1996 mCamera3Stream->rotation, 1997 mStreamType); 1998 return -EINVAL; 1999 } 2000 2001 streamDim.width = mCamera3Stream->width; 2002 streamDim.height = mCamera3Stream->height; 2003 2004 LOGD("batch size is %d", mBatchSize); 2005 rc = QCamera3Channel::addStream(mStreamType, 2006 mStreamFormat, 2007 streamDim, 2008 mRotation, 2009 mNumBufs, 2010 mPostProcMask, 2011 mIsType, 2012 mBatchSize); 2013 2014 return rc; 2015} 2016 2017/*=========================================================================== 2018 * FUNCTION : setBatchSize 2019 * 2020 * DESCRIPTION: Set batch size for the channel. 2021 * 2022 * PARAMETERS : 2023 * @batchSize : Number of image buffers in a batch 2024 * 2025 * RETURN : int32_t type of status 2026 * NO_ERROR -- success always 2027 * none-zero failure code 2028 *==========================================================================*/ 2029int32_t QCamera3RegularChannel::setBatchSize(uint32_t batchSize) 2030{ 2031 int32_t rc = NO_ERROR; 2032 2033 mBatchSize = batchSize; 2034 LOGD("Batch size set: %d", mBatchSize); 2035 return rc; 2036} 2037 2038/*=========================================================================== 2039 * FUNCTION : getStreamTypeMask 2040 * 2041 * DESCRIPTION: Get bit mask of all stream types in this channel. 2042 * If stream is not initialized, then generate mask based on 2043 * local streamType 2044 * 2045 * PARAMETERS : None 2046 * 2047 * RETURN : Bit mask of all stream types in this channel 2048 *==========================================================================*/ 2049uint32_t QCamera3RegularChannel::getStreamTypeMask() 2050{ 2051 if (mStreams[0]) { 2052 return QCamera3Channel::getStreamTypeMask(); 2053 } else { 2054 return (1U << mStreamType); 2055 } 2056} 2057 2058/*=========================================================================== 2059 * FUNCTION : queueBatchBuf 2060 * 2061 * DESCRIPTION: queue batch container to downstream 2062 * 2063 * PARAMETERS : 2064 * 2065 * RETURN : int32_t type of status 2066 * NO_ERROR -- success always 2067 * none-zero failure code 2068 *==========================================================================*/ 2069int32_t QCamera3RegularChannel::queueBatchBuf() 2070{ 2071 int32_t rc = NO_ERROR; 2072 2073 if (mStreams[0]) { 2074 rc = mStreams[0]->queueBatchBuf(); 2075 } 2076 if (rc != NO_ERROR) { 2077 LOGE("stream->queueBatchContainer failed"); 2078 } 2079 return rc; 2080} 2081 2082/*=========================================================================== 2083 * FUNCTION : request 2084 * 2085 * DESCRIPTION: process a request from camera service. Stream on if ncessary. 2086 * 2087 * PARAMETERS : 2088 * @buffer : buffer to be filled for this request 2089 * 2090 * RETURN : 0 on a success start of capture 2091 * -EINVAL on invalid input 2092 * -ENODEV on serious error 2093 *==========================================================================*/ 2094int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber, int &indexUsed) 2095{ 2096 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REG_CH_REQ); 2097 //FIX ME: Return buffer back in case of failures below. 2098 2099 int32_t rc = NO_ERROR; 2100 int index; 2101 2102 if (NULL == buffer) { 2103 LOGE("Invalid buffer in channel request"); 2104 return BAD_VALUE; 2105 } 2106 2107 index = mMemory.getMatchBufIndex((void*)buffer); 2108 if(index < 0) { 2109 rc = registerBuffer(buffer, mIsType); 2110 if (NO_ERROR != rc) { 2111 LOGE("On-the-fly buffer registration failed %d", 2112 rc); 2113 return rc; 2114 } 2115 2116 index = mMemory.getMatchBufIndex((void*)buffer); 2117 if (index < 0) { 2118 LOGE("Could not find object among registered buffers"); 2119 return DEAD_OBJECT; 2120 } 2121 } 2122 2123 rc = mMemory.markFrameNumber((uint32_t)index, frameNumber); 2124 if(rc != NO_ERROR) { 2125 LOGE("Failed to mark FrameNumber:%d,idx:%d",frameNumber,index); 2126 return rc; 2127 } 2128 if (m_bIsActive) { 2129 rc = mStreams[0]->bufDone((uint32_t)index); 2130 if(rc != NO_ERROR) { 2131 LOGE("Failed to Q new buffer to stream"); 2132 mMemory.markFrameNumber(index, -1); 2133 return rc; 2134 } 2135 } 2136 2137 indexUsed = index; 2138 return rc; 2139} 2140 2141/*=========================================================================== 2142 * FUNCTION : getReprocessType 2143 * 2144 * DESCRIPTION: get the type of reprocess output supported by this channel 2145 * 2146 * PARAMETERS : NONE 2147 * 2148 * RETURN : reprocess_type_t : type of reprocess 2149 *==========================================================================*/ 2150reprocess_type_t QCamera3RegularChannel::getReprocessType() 2151{ 2152 return REPROCESS_TYPE_PRIVATE; 2153} 2154 2155 2156QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle, 2157 uint32_t channel_handle, 2158 mm_camera_ops_t *cam_ops, 2159 channel_cb_routine cb_routine, 2160 channel_cb_buffer_err cb_buffer_err, 2161 cam_padding_info_t *paddingInfo, 2162 cam_feature_mask_t postprocess_mask, 2163 void *userData, uint32_t numBuffers) : 2164 QCamera3Channel(cam_handle, channel_handle, cam_ops, 2165 cb_routine, cb_buffer_err, paddingInfo, postprocess_mask, 2166 userData, numBuffers), 2167 mMemory(NULL), mDepthDataPresent(false) 2168{ 2169 mMapStreamBuffers = true; 2170} 2171 2172QCamera3MetadataChannel::~QCamera3MetadataChannel() 2173{ 2174 destroy(); 2175 2176 if (mMemory) { 2177 mMemory->deallocate(); 2178 delete mMemory; 2179 mMemory = NULL; 2180 } 2181} 2182 2183int32_t QCamera3MetadataChannel::initialize(cam_is_type_t isType) 2184{ 2185 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_METADATA_CH_INIT); 2186 int32_t rc; 2187 cam_dimension_t streamDim; 2188 2189 if (mMemory || m_numStreams > 0) { 2190 LOGE("metadata channel already initialized"); 2191 return -EINVAL; 2192 } 2193 2194 streamDim.width = (int32_t)sizeof(metadata_buffer_t), 2195 streamDim.height = 1; 2196 2197 mIsType = isType; 2198 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX, 2199 streamDim, ROTATE_0, (uint8_t)mNumBuffers, mPostProcMask, mIsType); 2200 if (rc < 0) { 2201 LOGE("addStream failed"); 2202 } 2203 return rc; 2204} 2205 2206int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/, 2207 uint32_t /*frameNumber*/, 2208 int& /*indexUsed*/) 2209{ 2210 if (!m_bIsActive) { 2211 return start(); 2212 } 2213 else 2214 return 0; 2215} 2216 2217void QCamera3MetadataChannel::streamCbRoutine( 2218 mm_camera_super_buf_t *super_frame, 2219 QCamera3Stream * /*stream*/) 2220{ 2221 ATRACE_NAME("metadata_stream_cb_routine"); 2222 uint32_t requestNumber = 0; 2223 if (super_frame == NULL || super_frame->num_bufs != 1) { 2224 LOGE("super_frame is not valid"); 2225 return; 2226 } 2227 if (mChannelCB) { 2228 mChannelCB(super_frame, NULL, requestNumber, false, mUserData); 2229 } 2230} 2231 2232QCamera3StreamMem* QCamera3MetadataChannel::getStreamBufs(uint32_t len) 2233{ 2234 int rc; 2235 if (len < sizeof(metadata_buffer_t)) { 2236 LOGE("Metadata buffer size less than structure %d vs %d", 2237 len, 2238 sizeof(metadata_buffer_t)); 2239 return NULL; 2240 } 2241 mMemory = new QCamera3StreamMem(MIN_STREAMING_BUFFER_NUM); 2242 if (!mMemory) { 2243 LOGE("unable to create metadata memory"); 2244 return NULL; 2245 } 2246 rc = mMemory->allocateAll(len); 2247 if (rc < 0) { 2248 LOGE("unable to allocate metadata memory"); 2249 delete mMemory; 2250 mMemory = NULL; 2251 return NULL; 2252 } 2253 clear_metadata_buffer((metadata_buffer_t*)mMemory->getPtr(0)); 2254 2255 for (uint32_t i = 0; i < mMemory->getCnt(); i++) { 2256 if (mMemory->valid(i)) { 2257 metadata_buffer_t *metadata_buffer_t = 2258 static_cast<::metadata_buffer_t *> (mMemory->getPtr(i)); 2259 metadata_buffer_t->depth_data.depth_data = nullptr; 2260 if (mDepthDataPresent) { 2261 metadata_buffer_t->depth_data.depth_data = 2262 new uint8_t[PD_DATA_SIZE]; 2263 } 2264 } else { 2265 LOGE("Invalid meta buffer at index: %d", i); 2266 } 2267 } 2268 2269 return mMemory; 2270} 2271 2272void QCamera3MetadataChannel::putStreamBufs() 2273{ 2274 for (uint32_t i = 0; i < mMemory->getCnt(); i++) { 2275 if (mMemory->valid(i)) { 2276 metadata_buffer_t *metadata_buffer_t = 2277 static_cast<::metadata_buffer_t *> (mMemory->getPtr(i)); 2278 if (nullptr != metadata_buffer_t->depth_data.depth_data) { 2279 delete [] metadata_buffer_t->depth_data.depth_data; 2280 metadata_buffer_t->depth_data.depth_data = nullptr; 2281 } 2282 } else { 2283 LOGE("Invalid meta buffer at index: %d", i); 2284 } 2285 } 2286 2287 mMemory->deallocate(); 2288 delete mMemory; 2289 mMemory = NULL; 2290} 2291/*************************************************************************************/ 2292// RAW Channel related functions 2293QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle, 2294 uint32_t channel_handle, 2295 mm_camera_ops_t *cam_ops, 2296 channel_cb_routine cb_routine, 2297 channel_cb_buffer_err cb_buffer_err, 2298 cam_padding_info_t *paddingInfo, 2299 void *userData, 2300 camera3_stream_t *stream, 2301 cam_feature_mask_t postprocess_mask, 2302 QCamera3Channel *metadataChannel, 2303 bool raw_16, uint32_t numBuffers) : 2304 QCamera3RegularChannel(cam_handle, channel_handle, cam_ops, 2305 cb_routine, cb_buffer_err, paddingInfo, userData, stream, 2306 CAM_STREAM_TYPE_RAW, postprocess_mask, 2307 metadataChannel, numBuffers), 2308 mIsRaw16(raw_16) 2309{ 2310 char prop[PROPERTY_VALUE_MAX]; 2311 property_get("persist.camera.raw.debug.dump", prop, "0"); 2312 mRawDump = atoi(prop); 2313 mMapStreamBuffers = (mRawDump || mIsRaw16); 2314} 2315 2316QCamera3RawChannel::~QCamera3RawChannel() 2317{ 2318} 2319 2320/*=========================================================================== 2321 * FUNCTION : initialize 2322 * 2323 * DESCRIPTION: Initialize and add camera channel & stream 2324 * 2325 * PARAMETERS : 2326 * @isType : image stabilization type on the stream 2327 * 2328 * RETURN : int32_t type of status 2329 * NO_ERROR -- success 2330 * none-zero failure code 2331 *==========================================================================*/ 2332 2333int32_t QCamera3RawChannel::initialize(cam_is_type_t isType) 2334{ 2335 return QCamera3RegularChannel::initialize(isType); 2336} 2337 2338void QCamera3RawChannel::streamCbRoutine( 2339 mm_camera_super_buf_t *super_frame, 2340 QCamera3Stream * stream) 2341{ 2342 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_RAW_CH_STRM_CB); 2343 /* Move this back down once verified */ 2344 if (mRawDump) 2345 dumpRawSnapshot(super_frame->bufs[0]); 2346 2347 if (mIsRaw16) { 2348 cam_format_t streamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_RAW, 2349 mCamera3Stream->width, mCamera3Stream->height, m_bUBWCenable, mIsType); 2350 if (streamFormat == CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG) 2351 convertMipiToRaw16(super_frame->bufs[0]); 2352 else 2353 convertLegacyToRaw16(super_frame->bufs[0]); 2354 2355 //Make sure cache coherence because extra processing is done 2356 mMemory.cleanCache(super_frame->bufs[0]->buf_idx); 2357 } 2358 2359 QCamera3RegularChannel::streamCbRoutine(super_frame, stream); 2360 return; 2361} 2362 2363void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame) 2364{ 2365 QCamera3Stream *stream = getStreamByIndex(0); 2366 if (stream != NULL) { 2367 char buf[FILENAME_MAX]; 2368 memset(buf, 0, sizeof(buf)); 2369 cam_dimension_t dim; 2370 memset(&dim, 0, sizeof(dim)); 2371 stream->getFrameDimension(dim); 2372 2373 cam_frame_len_offset_t offset; 2374 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 2375 stream->getFrameOffset(offset); 2376 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"r_%d_%dx%d.raw", 2377 frame->frame_idx, offset.mp[0].stride, offset.mp[0].scanline); 2378 2379 int file_fd = open(buf, O_RDWR| O_CREAT, 0644); 2380 if (file_fd >= 0) { 2381 ssize_t written_len = write(file_fd, frame->buffer, frame->frame_len); 2382 LOGD("written number of bytes %zd", written_len); 2383 frame->cache_flags |= CPU_HAS_READ; 2384 close(file_fd); 2385 } else { 2386 LOGE("failed to open file to dump image"); 2387 } 2388 } else { 2389 LOGE("Could not find stream"); 2390 } 2391 2392} 2393 2394void QCamera3RawChannel::convertLegacyToRaw16(mm_camera_buf_def_t *frame) 2395{ 2396 // Convert image buffer from Opaque raw format to RAW16 format 2397 // 10bit Opaque raw is stored in the format of: 2398 // 0000 - p5 - p4 - p3 - p2 - p1 - p0 2399 // where p0 to p5 are 6 pixels (each is 10bit)_and most significant 2400 // 4 bits are 0s. Each 64bit word contains 6 pixels. 2401 2402 QCamera3Stream *stream = getStreamByIndex(0); 2403 if (stream != NULL) { 2404 cam_dimension_t dim; 2405 memset(&dim, 0, sizeof(dim)); 2406 stream->getFrameDimension(dim); 2407 2408 cam_frame_len_offset_t offset; 2409 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 2410 stream->getFrameOffset(offset); 2411 2412 uint32_t raw16_stride = ((uint32_t)dim.width + 15U) & ~15U; 2413 uint16_t* raw16_buffer = (uint16_t *)frame->buffer; 2414 2415 // In-place format conversion. 2416 // Raw16 format always occupy more memory than opaque raw10. 2417 // Convert to Raw16 by iterating through all pixels from bottom-right 2418 // to top-left of the image. 2419 // One special notes: 2420 // 1. Cross-platform raw16's stride is 16 pixels. 2421 // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes. 2422 for (int32_t ys = dim.height - 1; ys >= 0; ys--) { 2423 uint32_t y = (uint32_t)ys; 2424 uint64_t* row_start = (uint64_t *)frame->buffer + 2425 y * (uint32_t)offset.mp[0].stride_in_bytes / 8; 2426 for (int32_t xs = dim.width - 1; xs >= 0; xs--) { 2427 uint32_t x = (uint32_t)xs; 2428 uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6))); 2429 raw16_buffer[y*raw16_stride+x] = raw16_pixel; 2430 } 2431 } 2432 } else { 2433 LOGE("Could not find stream"); 2434 } 2435 2436} 2437 2438void QCamera3RawChannel::convertMipiToRaw16(mm_camera_buf_def_t *frame) 2439{ 2440 // Convert image buffer from mipi10 raw format to RAW16 format 2441 // mipi10 opaque raw is stored in the format of: 2442 // P3(1:0) P2(1:0) P1(1:0) P0(1:0) P3(9:2) P2(9:2) P1(9:2) P0(9:2) 2443 // 4 pixels occupy 5 bytes, no padding needed 2444 2445 QCamera3Stream *stream = getStreamByIndex(0); 2446 if (stream != NULL) { 2447 cam_dimension_t dim; 2448 memset(&dim, 0, sizeof(dim)); 2449 stream->getFrameDimension(dim); 2450 2451 cam_frame_len_offset_t offset; 2452 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 2453 stream->getFrameOffset(offset); 2454 2455 uint32_t raw16_stride = ((uint32_t)dim.width + 15U) & ~15U; 2456 uint16_t* raw16_buffer = (uint16_t *)frame->buffer; 2457 uint8_t first_quintuple[5]; 2458 memcpy(first_quintuple, raw16_buffer, sizeof(first_quintuple)); 2459 2460 // In-place format conversion. 2461 // Raw16 format always occupy more memory than opaque raw10. 2462 // Convert to Raw16 by iterating through all pixels from bottom-right 2463 // to top-left of the image. 2464 // One special notes: 2465 // 1. Cross-platform raw16's stride is 16 pixels. 2466 // 2. mipi raw10's stride is 4 pixels, and aligned to 16 bytes. 2467 for (int32_t ys = dim.height - 1; ys >= 0; ys--) { 2468 uint32_t y = (uint32_t)ys; 2469 uint8_t* row_start = (uint8_t *)frame->buffer + 2470 y * (uint32_t)offset.mp[0].stride_in_bytes; 2471 for (int32_t xs = dim.width - 1; xs >= 0; xs--) { 2472 uint32_t x = (uint32_t)xs; 2473 uint8_t upper_8bit = row_start[5*(x/4)+x%4]; 2474 uint8_t lower_2bit = ((row_start[5*(x/4)+4] >> ((x%4) << 1)) & 0x3); 2475 uint16_t raw16_pixel = 2476 (uint16_t)(((uint16_t)upper_8bit)<<2 | 2477 (uint16_t)lower_2bit); 2478 raw16_buffer[y*raw16_stride+x] = raw16_pixel; 2479 } 2480 } 2481 2482 // Re-convert the first 2 pixels of the buffer because the loop above messes 2483 // them up by reading the first quintuple while modifying it. 2484 raw16_buffer[0] = ((uint16_t)first_quintuple[0]<<2) | (first_quintuple[4] & 0x3); 2485 raw16_buffer[1] = ((uint16_t)first_quintuple[1]<<2) | ((first_quintuple[4] >> 2) & 0x3); 2486 2487 } else { 2488 LOGE("Could not find stream"); 2489 } 2490 2491} 2492 2493/*=========================================================================== 2494 * FUNCTION : getReprocessType 2495 * 2496 * DESCRIPTION: get the type of reprocess output supported by this channel 2497 * 2498 * PARAMETERS : NONE 2499 * 2500 * RETURN : reprocess_type_t : type of reprocess 2501 *==========================================================================*/ 2502reprocess_type_t QCamera3RawChannel::getReprocessType() 2503{ 2504 return REPROCESS_TYPE_RAW; 2505} 2506 2507 2508/*************************************************************************************/ 2509// RAW Dump Channel related functions 2510 2511/*=========================================================================== 2512 * FUNCTION : QCamera3RawDumpChannel 2513 * 2514 * DESCRIPTION: Constructor for RawDumpChannel 2515 * 2516 * PARAMETERS : 2517 * @cam_handle : Handle for Camera 2518 * @cam_ops : Function pointer table 2519 * @rawDumpSize : Dimensions for the Raw stream 2520 * @paddinginfo : Padding information for stream 2521 * @userData : Cookie for parent 2522 * @pp mask : PP feature mask for this stream 2523 * @numBuffers : number of max dequeued buffers 2524 * 2525 * RETURN : NA 2526 *==========================================================================*/ 2527QCamera3RawDumpChannel::QCamera3RawDumpChannel(uint32_t cam_handle, 2528 uint32_t channel_handle, 2529 mm_camera_ops_t *cam_ops, 2530 cam_dimension_t rawDumpSize, 2531 cam_padding_info_t *paddingInfo, 2532 void *userData, 2533 cam_feature_mask_t postprocess_mask, uint32_t numBuffers) : 2534 QCamera3Channel(cam_handle, channel_handle, cam_ops, NULL, 2535 NULL, paddingInfo, postprocess_mask, 2536 userData, numBuffers), 2537 mDim(rawDumpSize), 2538 mMemory(NULL) 2539{ 2540 char prop[PROPERTY_VALUE_MAX]; 2541 property_get("persist.camera.raw.dump", prop, "0"); 2542 mRawDump = atoi(prop); 2543} 2544 2545/*=========================================================================== 2546 * FUNCTION : QCamera3RawDumpChannel 2547 * 2548 * DESCRIPTION: Destructor for RawDumpChannel 2549 * 2550 * PARAMETERS : 2551 * 2552 * RETURN : NA 2553 *==========================================================================*/ 2554 2555QCamera3RawDumpChannel::~QCamera3RawDumpChannel() 2556{ 2557 destroy(); 2558} 2559 2560/*=========================================================================== 2561 * FUNCTION : dumpRawSnapshot 2562 * 2563 * DESCRIPTION: Helper function to dump Raw frames 2564 * 2565 * PARAMETERS : 2566 * @frame : stream buf frame to be dumped 2567 * 2568 * RETURN : NA 2569 *==========================================================================*/ 2570void QCamera3RawDumpChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame) 2571{ 2572 QCamera3Stream *stream = getStreamByIndex(0); 2573 if (stream != NULL) { 2574 char buf[FILENAME_MAX]; 2575 struct timeval tv; 2576 struct tm timeinfo_data; 2577 struct tm *timeinfo; 2578 2579 cam_dimension_t dim; 2580 memset(&dim, 0, sizeof(dim)); 2581 stream->getFrameDimension(dim); 2582 2583 cam_frame_len_offset_t offset; 2584 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 2585 stream->getFrameOffset(offset); 2586 2587 gettimeofday(&tv, NULL); 2588 timeinfo = localtime_r(&tv.tv_sec, &timeinfo_data); 2589 2590 if (NULL != timeinfo) { 2591 memset(buf, 0, sizeof(buf)); 2592 snprintf(buf, sizeof(buf), 2593 QCAMERA_DUMP_FRM_LOCATION 2594 "%04d-%02d-%02d-%02d-%02d-%02d-%06ld_%d_%dx%d.raw", 2595 timeinfo->tm_year + 1900, timeinfo->tm_mon + 1, 2596 timeinfo->tm_mday, timeinfo->tm_hour, 2597 timeinfo->tm_min, timeinfo->tm_sec,tv.tv_usec, 2598 frame->frame_idx, dim.width, dim.height); 2599 2600 int file_fd = open(buf, O_RDWR| O_CREAT, 0777); 2601 if (file_fd >= 0) { 2602 ssize_t written_len = 2603 write(file_fd, frame->buffer, offset.frame_len); 2604 LOGD("written number of bytes %zd", written_len); 2605 frame->cache_flags |= CPU_HAS_READ; 2606 close(file_fd); 2607 } else { 2608 LOGE("failed to open file to dump image"); 2609 } 2610 } else { 2611 LOGE("localtime_r() error"); 2612 } 2613 } else { 2614 LOGE("Could not find stream"); 2615 } 2616 2617} 2618 2619/*=========================================================================== 2620 * FUNCTION : streamCbRoutine 2621 * 2622 * DESCRIPTION: Callback routine invoked for each frame generated for 2623 * Rawdump channel 2624 * 2625 * PARAMETERS : 2626 * @super_frame : stream buf frame generated 2627 * @stream : Underlying Stream object cookie 2628 * 2629 * RETURN : NA 2630 *==========================================================================*/ 2631void QCamera3RawDumpChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 2632 __unused QCamera3Stream *stream) 2633{ 2634 LOGD("E"); 2635 if (super_frame == NULL || super_frame->num_bufs != 1) { 2636 LOGE("super_frame is not valid"); 2637 return; 2638 } 2639 2640 if (mRawDump) 2641 dumpRawSnapshot(super_frame->bufs[0]); 2642 2643 bufDone(super_frame); 2644 free(super_frame); 2645} 2646 2647/*=========================================================================== 2648 * FUNCTION : getStreamBufs 2649 * 2650 * DESCRIPTION: Callback function provided to interface to get buffers. 2651 * 2652 * PARAMETERS : 2653 * @len : Length of each buffer to be allocated 2654 * 2655 * RETURN : NULL on buffer allocation failure 2656 * QCamera3StreamMem object on sucess 2657 *==========================================================================*/ 2658QCamera3StreamMem* QCamera3RawDumpChannel::getStreamBufs(uint32_t len) 2659{ 2660 int rc; 2661 mMemory = new QCamera3StreamMem(mNumBuffers); 2662 2663 if (!mMemory) { 2664 LOGE("unable to create heap memory"); 2665 return NULL; 2666 } 2667 rc = mMemory->allocateAll((size_t)len); 2668 if (rc < 0) { 2669 LOGE("unable to allocate heap memory"); 2670 delete mMemory; 2671 mMemory = NULL; 2672 return NULL; 2673 } 2674 return mMemory; 2675} 2676 2677/*=========================================================================== 2678 * FUNCTION : putStreamBufs 2679 * 2680 * DESCRIPTION: Callback function provided to interface to return buffers. 2681 * Although no handles are actually returned, implicitl assumption 2682 * that interface will no longer use buffers and channel can 2683 * deallocated if necessary. 2684 * 2685 * PARAMETERS : NA 2686 * 2687 * RETURN : NA 2688 *==========================================================================*/ 2689void QCamera3RawDumpChannel::putStreamBufs() 2690{ 2691 mMemory->deallocate(); 2692 delete mMemory; 2693 mMemory = NULL; 2694} 2695 2696/*=========================================================================== 2697 * FUNCTION : request 2698 * 2699 * DESCRIPTION: Request function used as trigger 2700 * 2701 * PARAMETERS : 2702 * @recvd_frame : buffer- this will be NULL since this is internal channel 2703 * @frameNumber : Undefined again since this is internal stream 2704 * 2705 * RETURN : int32_t type of status 2706 * NO_ERROR -- success 2707 * none-zero failure code 2708 *==========================================================================*/ 2709int32_t QCamera3RawDumpChannel::request(buffer_handle_t * /*buffer*/, 2710 uint32_t /*frameNumber*/, 2711 int & /*indexUsed*/) 2712{ 2713 if (!m_bIsActive) { 2714 return QCamera3Channel::start(); 2715 } 2716 else 2717 return 0; 2718} 2719 2720/*=========================================================================== 2721 * FUNCTION : intialize 2722 * 2723 * DESCRIPTION: Initializes channel params and creates underlying stream 2724 * 2725 * PARAMETERS : 2726 * @isType : type of image stabilization required on this stream 2727 * 2728 * RETURN : int32_t type of status 2729 * NO_ERROR -- success 2730 * none-zero failure code 2731 *==========================================================================*/ 2732int32_t QCamera3RawDumpChannel::initialize(cam_is_type_t isType) 2733{ 2734 int32_t rc; 2735 2736 mIsType = isType; 2737 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_RAW, 2738 CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG, mDim, ROTATE_0, (uint8_t)mNumBuffers, 2739 mPostProcMask, mIsType); 2740 if (rc < 0) { 2741 LOGE("addStream failed"); 2742 } 2743 return rc; 2744} 2745 2746/*************************************************************************************/ 2747// HDR+ RAW Source Channel related functions 2748QCamera3HdrPlusRawSrcChannel::QCamera3HdrPlusRawSrcChannel(uint32_t cam_handle, 2749 uint32_t channel_handle, 2750 mm_camera_ops_t *cam_ops, 2751 cam_dimension_t rawDumpSize, 2752 cam_padding_info_t *paddingInfo, 2753 void *userData, 2754 cam_feature_mask_t postprocess_mask, 2755 std::shared_ptr<HdrPlusClient> hdrPlusClient, 2756 uint32_t hdrPlusStreamId, 2757 uint32_t numBuffers) : 2758 QCamera3RawDumpChannel(cam_handle, channel_handle, cam_ops, rawDumpSize, paddingInfo, userData, 2759 postprocess_mask, numBuffers), 2760 mHdrPlusClient(hdrPlusClient), 2761 mHdrPlusStreamId(hdrPlusStreamId) 2762{ 2763 2764} 2765 2766QCamera3HdrPlusRawSrcChannel::~QCamera3HdrPlusRawSrcChannel() 2767{ 2768} 2769 2770void QCamera3HdrPlusRawSrcChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 2771 __unused QCamera3Stream *stream) 2772{ 2773 if (super_frame == NULL || super_frame->num_bufs != 1) { 2774 LOGE("super_frame is not valid"); 2775 return; 2776 } 2777 2778 // Send RAW buffer to HDR+ service 2779 sendRawToHdrPlusService(super_frame->bufs[0]); 2780 2781 bufDone(super_frame); 2782 free(super_frame); 2783} 2784 2785void QCamera3HdrPlusRawSrcChannel::sendRawToHdrPlusService(mm_camera_buf_def_t *frame) 2786{ 2787 QCamera3Stream *stream = getStreamByIndex(0); 2788 if (stream == nullptr) { 2789 LOGE("%s: Could not find stream.", __FUNCTION__); 2790 return; 2791 } 2792 2793 cam_frame_len_offset_t offset = {}; 2794 stream->getFrameOffset(offset); 2795 2796 pbcamera::StreamBuffer buffer; 2797 buffer.streamId = mHdrPlusStreamId; 2798 buffer.data = frame->buffer; 2799 buffer.dataSize = offset.frame_len; 2800 2801 // Use the frame timestamp as mock Easel timestamp. 2802 int64_t mockEaselTimestampNs = (int64_t)frame->ts.tv_sec * 1000000000 + frame->ts.tv_nsec; 2803 mHdrPlusClient->notifyInputBuffer(buffer, mockEaselTimestampNs); 2804} 2805 2806/*************************************************************************************/ 2807 2808/* QCamera3YUVChannel methods */ 2809 2810/*=========================================================================== 2811 * FUNCTION : QCamera3YUVChannel 2812 * 2813 * DESCRIPTION: constructor of QCamera3YUVChannel 2814 * 2815 * PARAMETERS : 2816 * @cam_handle : camera handle 2817 * @cam_ops : ptr to camera ops table 2818 * @cb_routine : callback routine to frame aggregator 2819 * @paddingInfo : padding information for the stream 2820 * @stream : camera3_stream_t structure 2821 * @stream_type: Channel stream type 2822 * @postprocess_mask: the postprocess mask for streams of this channel 2823 * @metadataChannel: handle to the metadataChannel 2824 * RETURN : none 2825 *==========================================================================*/ 2826QCamera3YUVChannel::QCamera3YUVChannel(uint32_t cam_handle, 2827 uint32_t channel_handle, 2828 mm_camera_ops_t *cam_ops, 2829 channel_cb_routine cb_routine, 2830 channel_cb_buffer_err cb_buf_err, 2831 cam_padding_info_t *paddingInfo, 2832 void *userData, 2833 camera3_stream_t *stream, 2834 cam_stream_type_t stream_type, 2835 cam_feature_mask_t postprocess_mask, 2836 QCamera3Channel *metadataChannel) : 2837 QCamera3ProcessingChannel(cam_handle, channel_handle, cam_ops, 2838 cb_routine, cb_buf_err, paddingInfo, userData, stream, stream_type, 2839 postprocess_mask, metadataChannel) 2840{ 2841 2842 mBypass = (postprocess_mask == CAM_QCOM_FEATURE_NONE); 2843 mFrameLen = 0; 2844 mEdgeMode.edge_mode = CAM_EDGE_MODE_OFF; 2845 mEdgeMode.sharpness = 0; 2846 mNoiseRedMode = CAM_NOISE_REDUCTION_MODE_OFF; 2847 memset(&mCropRegion, 0, sizeof(mCropRegion)); 2848} 2849 2850/*=========================================================================== 2851 * FUNCTION : ~QCamera3YUVChannel 2852 * 2853 * DESCRIPTION: destructor of QCamera3YUVChannel 2854 * 2855 * PARAMETERS : none 2856 * 2857 * 2858 * RETURN : none 2859 *==========================================================================*/ 2860QCamera3YUVChannel::~QCamera3YUVChannel() 2861{ 2862 // Deallocation of heap buffers allocated in mMemory is freed 2863 // automatically by its destructor 2864} 2865 2866/*=========================================================================== 2867 * FUNCTION : initialize 2868 * 2869 * DESCRIPTION: Initialize and add camera channel & stream 2870 * 2871 * PARAMETERS : 2872 * @isType : the image stabilization type 2873 * 2874 * RETURN : int32_t type of status 2875 * NO_ERROR -- success 2876 * none-zero failure code 2877 *==========================================================================*/ 2878int32_t QCamera3YUVChannel::initialize(cam_is_type_t isType) 2879{ 2880 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_YUV_CH_INIT); 2881 int32_t rc = NO_ERROR; 2882 cam_dimension_t streamDim; 2883 2884 if (NULL == mCamera3Stream) { 2885 LOGE("Camera stream uninitialized"); 2886 return NO_INIT; 2887 } 2888 2889 if (1 <= m_numStreams) { 2890 // Only one stream per channel supported in v3 Hal 2891 return NO_ERROR; 2892 } 2893 2894 mIsType = isType; 2895 mStreamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_CALLBACK, 2896 mCamera3Stream->width, mCamera3Stream->height, m_bUBWCenable, mIsType); 2897 streamDim.width = mCamera3Stream->width; 2898 streamDim.height = mCamera3Stream->height; 2899 2900 rc = QCamera3Channel::addStream(mStreamType, 2901 mStreamFormat, 2902 streamDim, 2903 ROTATE_0, 2904 mNumBufs, 2905 mPostProcMask, 2906 mIsType); 2907 if (rc < 0) { 2908 LOGE("addStream failed"); 2909 return rc; 2910 } 2911 2912 cam_stream_buf_plane_info_t buf_planes; 2913 cam_padding_info_t paddingInfo = mPaddingInfo; 2914 2915 memset(&buf_planes, 0, sizeof(buf_planes)); 2916 //to ensure a big enough buffer size set the height and width 2917 //padding to max(height padding, width padding) 2918 paddingInfo.width_padding = MAX(paddingInfo.width_padding, paddingInfo.height_padding); 2919 paddingInfo.height_padding = paddingInfo.width_padding; 2920 2921 rc = mm_stream_calc_offset_snapshot(mStreamFormat, &streamDim, &paddingInfo, 2922 &buf_planes); 2923 if (rc < 0) { 2924 LOGE("mm_stream_calc_offset_preview failed"); 2925 return rc; 2926 } 2927 2928 mFrameLen = buf_planes.plane_info.frame_len; 2929 2930 if (NO_ERROR != rc) { 2931 LOGE("Initialize failed, rc = %d", rc); 2932 return rc; 2933 } 2934 2935 /* initialize offline meta memory for input reprocess */ 2936 rc = QCamera3ProcessingChannel::initialize(isType); 2937 if (NO_ERROR != rc) { 2938 LOGE("Processing Channel initialize failed, rc = %d", 2939 rc); 2940 } 2941 2942 return rc; 2943} 2944 2945/*=========================================================================== 2946 * FUNCTION : request 2947 * 2948 * DESCRIPTION: entry function for a request on a YUV stream. This function 2949 * has the logic to service a request based on its type 2950 * 2951 * PARAMETERS : 2952 * @buffer : pointer to the output buffer 2953 * @frameNumber : frame number of the request 2954 * @pInputBuffer : pointer to input buffer if an input request 2955 * @metadata : parameters associated with the request 2956 * @internalreq : boolean to indicate if this is purely internal request 2957 * needing internal buffer allocation 2958 * @meteringonly : boolean indicating metering only frame subset of internal 2959 * not consumed by postprocessor 2960 * 2961 * RETURN : 0 on a success start of capture 2962 * -EINVAL on invalid input 2963 * -ENODEV on serious error 2964 *==========================================================================*/ 2965int32_t QCamera3YUVChannel::request(buffer_handle_t *buffer, 2966 uint32_t frameNumber, 2967 camera3_stream_buffer_t* pInputBuffer, 2968 metadata_buffer_t* metadata, bool &needMetadata, 2969 int &indexUsed, 2970 __unused bool internalRequest = false, 2971 __unused bool meteringOnly = false) 2972{ 2973 int32_t rc = NO_ERROR; 2974 Mutex::Autolock lock(mOfflinePpLock); 2975 2976 LOGD("pInputBuffer is %p frame number %d", pInputBuffer, frameNumber); 2977 if (NULL == buffer || NULL == metadata) { 2978 LOGE("Invalid buffer/metadata in channel request"); 2979 return BAD_VALUE; 2980 } 2981 2982 PpInfo ppInfo; 2983 memset(&ppInfo, 0, sizeof(ppInfo)); 2984 ppInfo.frameNumber = frameNumber; 2985 ppInfo.offlinePpFlag = false; 2986 if (mBypass && !pInputBuffer ) { 2987 ppInfo.offlinePpFlag = needsFramePostprocessing(metadata); 2988 ppInfo.output = buffer; 2989 mOfflinePpInfoList.push_back(ppInfo); 2990 } 2991 2992 LOGD("offlinePpFlag is %d", ppInfo.offlinePpFlag); 2993 needMetadata = ppInfo.offlinePpFlag; 2994 if (!ppInfo.offlinePpFlag) { 2995 // regular request 2996 return QCamera3ProcessingChannel::request(buffer, frameNumber, 2997 pInputBuffer, metadata, indexUsed); 2998 } else { 2999 3000 //we need to send this frame through the CPP 3001 //Allocate heap memory, then buf done on the buffer 3002 uint32_t bufIdx; 3003 if (mFreeHeapBufferList.empty()) { 3004 rc = mMemory.allocateOne(mFrameLen); 3005 if (rc < 0) { 3006 LOGE("Failed allocating heap buffer. Fatal"); 3007 return BAD_VALUE; 3008 } else { 3009 bufIdx = (uint32_t)rc; 3010 } 3011 } else { 3012 bufIdx = *(mFreeHeapBufferList.begin()); 3013 mFreeHeapBufferList.erase(mFreeHeapBufferList.begin()); 3014 } 3015 3016 /* Configure and start postproc if necessary */ 3017 reprocess_config_t reproc_cfg; 3018 cam_dimension_t dim; 3019 memset(&reproc_cfg, 0, sizeof(reprocess_config_t)); 3020 memset(&dim, 0, sizeof(dim)); 3021 mStreams[0]->getFrameDimension(dim); 3022 setReprocConfig(reproc_cfg, NULL, metadata, mStreamFormat, dim); 3023 3024 // Start postprocessor without input buffer 3025 startPostProc(reproc_cfg); 3026 3027 LOGD("erasing %d", bufIdx); 3028 3029 mMemory.markFrameNumber(bufIdx, frameNumber); 3030 indexUsed = bufIdx; 3031 if (m_bIsActive) { 3032 mStreams[0]->bufDone(bufIdx); 3033 } 3034 3035 } 3036 return rc; 3037} 3038 3039/*=========================================================================== 3040 * FUNCTION : streamCbRoutine 3041 * 3042 * DESCRIPTION: 3043 * 3044 * PARAMETERS : 3045 * @super_frame : the super frame with filled buffer 3046 * @stream : stream on which the buffer was requested and filled 3047 * 3048 * RETURN : none 3049 *==========================================================================*/ 3050void QCamera3YUVChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 3051 QCamera3Stream *stream) 3052{ 3053 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_YUV_CH_STRM_CB); 3054 uint8_t frameIndex; 3055 int32_t resultFrameNumber; 3056 3057 if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) { 3058 LOGE("Error with the stream callback"); 3059 return; 3060 } 3061 3062 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx; 3063 if(frameIndex >= mNumBufs) { 3064 LOGE("Error, Invalid index for buffer"); 3065 stream->bufDone(frameIndex); 3066 return; 3067 } 3068 3069 if (mBypass) { 3070 { 3071 List<PpInfo>::iterator ppInfo; 3072 3073 Mutex::Autolock lock(mOfflinePpLock); 3074 resultFrameNumber = mMemory.getFrameNumber(frameIndex); 3075 for (ppInfo = mOfflinePpInfoList.begin(); 3076 ppInfo != mOfflinePpInfoList.end(); ppInfo++) { 3077 if (ppInfo->frameNumber == (uint32_t)resultFrameNumber) { 3078 break; 3079 } 3080 } 3081 LOGD("frame index %d, frame number %d", frameIndex, 3082 resultFrameNumber); 3083 //check the reprocessing required flag against the frame number 3084 if (ppInfo == mOfflinePpInfoList.end()) { 3085 LOGE("Error, request for frame number is a reprocess."); 3086 stream->bufDone(frameIndex); 3087 return; 3088 } 3089 3090 if (ppInfo->offlinePpFlag) { 3091 mm_camera_super_buf_t *frame = 3092 (mm_camera_super_buf_t *)malloc(sizeof( 3093 mm_camera_super_buf_t)); 3094 if (frame == NULL) { 3095 LOGE("Error allocating memory to save received_frame structure."); 3096 if(stream) { 3097 stream->bufDone(frameIndex); 3098 } 3099 return; 3100 } 3101 3102 *frame = *super_frame; 3103 m_postprocessor.processData(frame, ppInfo->output, 3104 resultFrameNumber); 3105 free(super_frame); 3106 return; 3107 } else { 3108 if (ppInfo != mOfflinePpInfoList.begin()) { 3109 // There is pending reprocess buffer, cache current buffer 3110 if (ppInfo->callback_buffer != NULL) { 3111 LOGE("Fatal: cached callback_buffer is already present"); 3112 } 3113 ppInfo->callback_buffer = super_frame; 3114 return; 3115 } else { 3116 mOfflinePpInfoList.erase(ppInfo); 3117 } 3118 } 3119 } 3120 3121 if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) { 3122 mChannelCbBufErr(this, resultFrameNumber, 3123 CAMERA3_BUFFER_STATUS_ERROR, mUserData); 3124 } 3125 } 3126 3127 QCamera3ProcessingChannel::streamCbRoutine(super_frame, stream); 3128 3129 /* return any pending buffers that were received out of order earlier */ 3130 while((super_frame = getNextPendingCbBuffer())) { 3131 QCamera3ProcessingChannel::streamCbRoutine(super_frame, stream); 3132 } 3133 3134 return; 3135} 3136 3137/*=========================================================================== 3138 * FUNCTION : getNextPendingCbBuffer 3139 * 3140 * DESCRIPTION: Returns the callback_buffer from the first entry of 3141 * mOfflinePpInfoList 3142 * 3143 * PARAMETERS : none 3144 * 3145 * RETURN : callback_buffer 3146 *==========================================================================*/ 3147mm_camera_super_buf_t* QCamera3YUVChannel::getNextPendingCbBuffer() { 3148 mm_camera_super_buf_t* super_frame = NULL; 3149 if (mOfflinePpInfoList.size()) { 3150 if ((super_frame = mOfflinePpInfoList.begin()->callback_buffer)) { 3151 mOfflinePpInfoList.erase(mOfflinePpInfoList.begin()); 3152 } 3153 } 3154 return super_frame; 3155} 3156 3157/*=========================================================================== 3158 * FUNCTION : reprocessCbRoutine 3159 * 3160 * DESCRIPTION: callback function for the reprocessed frame. This frame now 3161 * should be returned to the framework. This same callback is 3162 * used during input reprocessing or offline postprocessing 3163 * 3164 * PARAMETERS : 3165 * @resultBuffer : buffer containing the reprocessed data 3166 * @resultFrameNumber : frame number on which the buffer was requested 3167 * 3168 * RETURN : NONE 3169 * 3170 *==========================================================================*/ 3171void QCamera3YUVChannel::reprocessCbRoutine(buffer_handle_t *resultBuffer, 3172 uint32_t resultFrameNumber) 3173{ 3174 LOGD("E: frame number %d", resultFrameNumber); 3175 Vector<mm_camera_super_buf_t *> pendingCbs; 3176 3177 /* release the input buffer and input metadata buffer if used */ 3178 if (0 > mMemory.getHeapBufferIndex(resultFrameNumber)) { 3179 /* mOfflineMemory and mOfflineMetaMemory used only for input reprocessing */ 3180 int32_t rc = releaseOfflineMemory(resultFrameNumber); 3181 if (NO_ERROR != rc) { 3182 LOGE("Error releasing offline memory rc = %d", rc); 3183 } 3184 /* Since reprocessing is done, send the callback to release the input buffer */ 3185 if (mChannelCB) { 3186 mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData); 3187 } 3188 } 3189 3190 if (mBypass) { 3191 int32_t rc = handleOfflinePpCallback(resultFrameNumber, pendingCbs); 3192 if (rc != NO_ERROR) { 3193 return; 3194 } 3195 } 3196 3197 issueChannelCb(resultBuffer, resultFrameNumber); 3198 3199 // Call all pending callbacks to return buffers 3200 for (size_t i = 0; i < pendingCbs.size(); i++) { 3201 QCamera3ProcessingChannel::streamCbRoutine( 3202 pendingCbs[i], mStreams[0]); 3203 } 3204 3205} 3206 3207/*=========================================================================== 3208 * FUNCTION : needsFramePostprocessing 3209 * 3210 * DESCRIPTION: 3211 * 3212 * PARAMETERS : 3213 * 3214 * RETURN : 3215 * TRUE if frame needs to be postprocessed 3216 * FALSE is frame does not need to be postprocessed 3217 * 3218 *==========================================================================*/ 3219bool QCamera3YUVChannel::needsFramePostprocessing(metadata_buffer_t *meta) 3220{ 3221 bool ppNeeded = false; 3222 3223 //sharpness 3224 IF_META_AVAILABLE(cam_edge_application_t, edgeMode, 3225 CAM_INTF_META_EDGE_MODE, meta) { 3226 mEdgeMode = *edgeMode; 3227 } 3228 3229 //wnr 3230 IF_META_AVAILABLE(uint32_t, noiseRedMode, 3231 CAM_INTF_META_NOISE_REDUCTION_MODE, meta) { 3232 mNoiseRedMode = *noiseRedMode; 3233 } 3234 3235 //crop region 3236 IF_META_AVAILABLE(cam_crop_region_t, scalerCropRegion, 3237 CAM_INTF_META_SCALER_CROP_REGION, meta) { 3238 mCropRegion = *scalerCropRegion; 3239 } 3240 3241 if ((CAM_EDGE_MODE_OFF != mEdgeMode.edge_mode) && 3242 (CAM_EDGE_MODE_ZERO_SHUTTER_LAG != mEdgeMode.edge_mode)) { 3243 ppNeeded = true; 3244 } 3245 if ((CAM_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG != mNoiseRedMode) && 3246 (CAM_NOISE_REDUCTION_MODE_OFF != mNoiseRedMode) && 3247 (CAM_NOISE_REDUCTION_MODE_MINIMAL != mNoiseRedMode)) { 3248 ppNeeded = true; 3249 } 3250 if ((mCropRegion.width < (int32_t)mCamera3Stream->width) || 3251 (mCropRegion.height < (int32_t)mCamera3Stream->height)) { 3252 ppNeeded = true; 3253 } 3254 3255 return ppNeeded; 3256} 3257 3258/*=========================================================================== 3259 * FUNCTION : handleOfflinePpCallback 3260 * 3261 * DESCRIPTION: callback function for the reprocessed frame from offline 3262 * postprocessing. 3263 * 3264 * PARAMETERS : 3265 * @resultFrameNumber : frame number on which the buffer was requested 3266 * @pendingCbs : pending buffers to be returned first 3267 * 3268 * RETURN : int32_t type of status 3269 * NO_ERROR -- success 3270 * none-zero failure code 3271 *==========================================================================*/ 3272int32_t QCamera3YUVChannel::handleOfflinePpCallback(uint32_t resultFrameNumber, 3273 Vector<mm_camera_super_buf_t *>& pendingCbs) 3274{ 3275 Mutex::Autolock lock(mOfflinePpLock); 3276 List<PpInfo>::iterator ppInfo; 3277 3278 for (ppInfo = mOfflinePpInfoList.begin(); 3279 ppInfo != mOfflinePpInfoList.end(); ppInfo++) { 3280 if (ppInfo->frameNumber == resultFrameNumber) { 3281 break; 3282 } 3283 } 3284 3285 if (ppInfo == mOfflinePpInfoList.end()) { 3286 LOGI("Request of frame number %d is reprocessing", 3287 resultFrameNumber); 3288 return NO_ERROR; 3289 } else if (ppInfo != mOfflinePpInfoList.begin()) { 3290 LOGE("callback for frame number %d should be head of list", 3291 resultFrameNumber); 3292 return BAD_VALUE; 3293 } 3294 3295 if (ppInfo->offlinePpFlag) { 3296 // Need to get the input buffer frame index from the 3297 // mMemory object and add that to the free heap buffers list. 3298 int32_t bufferIndex = 3299 mMemory.getHeapBufferIndex(resultFrameNumber); 3300 if (bufferIndex < 0) { 3301 LOGE("Fatal %d: no buffer index for frame number %d", 3302 bufferIndex, resultFrameNumber); 3303 return BAD_VALUE; 3304 } 3305 mMemory.markFrameNumber(bufferIndex, -1); 3306 mFreeHeapBufferList.push_back(bufferIndex); 3307 //Move heap buffer into free pool and invalidate the frame number 3308 ppInfo = mOfflinePpInfoList.erase(ppInfo); 3309 3310 /* return any pending buffers that were received out of order 3311 earlier */ 3312 mm_camera_super_buf_t* super_frame; 3313 while((super_frame = getNextPendingCbBuffer())) { 3314 pendingCbs.push_back(super_frame); 3315 } 3316 } else { 3317 LOGE("Fatal: request of frame number %d doesn't need" 3318 " offline postprocessing. However there is" 3319 " reprocessing callback.", 3320 resultFrameNumber); 3321 return BAD_VALUE; 3322 } 3323 3324 return NO_ERROR; 3325} 3326 3327/*=========================================================================== 3328 * FUNCTION : getReprocessType 3329 * 3330 * DESCRIPTION: get the type of reprocess output supported by this channel 3331 * 3332 * PARAMETERS : NONE 3333 * 3334 * RETURN : reprocess_type_t : type of reprocess 3335 *==========================================================================*/ 3336reprocess_type_t QCamera3YUVChannel::getReprocessType() 3337{ 3338 return REPROCESS_TYPE_YUV; 3339} 3340 3341/* QCamera3PicChannel methods */ 3342 3343/*=========================================================================== 3344 * FUNCTION : jpegEvtHandle 3345 * 3346 * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events. 3347 Construct result payload and call mChannelCb to deliver buffer 3348 to framework. 3349 * 3350 * PARAMETERS : 3351 * @status : status of jpeg job 3352 * @client_hdl: jpeg client handle 3353 * @jobId : jpeg job Id 3354 * @p_ouput : ptr to jpeg output result struct 3355 * @userdata : user data ptr 3356 * 3357 * RETURN : none 3358 *==========================================================================*/ 3359void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status, 3360 uint32_t /*client_hdl*/, 3361 uint32_t jobId, 3362 mm_jpeg_output_t *p_output, 3363 void *userdata) 3364{ 3365 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_JPEG_EVT_HANDLE); 3366 buffer_handle_t *resultBuffer = NULL; 3367 buffer_handle_t *jpegBufferHandle = NULL; 3368 int resultStatus = CAMERA3_BUFFER_STATUS_OK; 3369 camera3_stream_buffer_t result; 3370 camera3_jpeg_blob_t jpegHeader; 3371 3372 QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata; 3373 if (obj) { 3374 //Construct payload for process_capture_result. Call mChannelCb 3375 3376 qcamera_hal3_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId); 3377 3378 if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) { 3379 LOGE("Error in jobId: (%d) with status: %d", jobId, status); 3380 resultStatus = CAMERA3_BUFFER_STATUS_ERROR; 3381 } 3382 3383 if (NULL != job) { 3384 uint32_t bufIdx = (uint32_t)job->jpeg_settings->out_buf_index; 3385 LOGD("jpeg out_buf_index: %d", bufIdx); 3386 3387 //Construct jpeg transient header of type camera3_jpeg_blob_t 3388 //Append at the end of jpeg image of buf_filled_len size 3389 3390 jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID; 3391 if (JPEG_JOB_STATUS_DONE == status) { 3392 jpegHeader.jpeg_size = (uint32_t)p_output->buf_filled_len; 3393 char* jpeg_buf = (char *)p_output->buf_vaddr; 3394 cam_frame_len_offset_t offset; 3395 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 3396 mm_camera_buf_def_t *jpeg_dump_buffer = NULL; 3397 cam_dimension_t dim; 3398 dim.width = obj->mCamera3Stream->width; 3399 dim.height = obj->mCamera3Stream->height; 3400 jpeg_dump_buffer = (mm_camera_buf_def_t *)malloc(sizeof(mm_camera_buf_def_t)); 3401 if(!jpeg_dump_buffer) { 3402 LOGE("Could not allocate jpeg dump buffer"); 3403 } else { 3404 jpeg_dump_buffer->buffer = p_output->buf_vaddr; 3405 jpeg_dump_buffer->frame_len = p_output->buf_filled_len; 3406 jpeg_dump_buffer->frame_idx = obj->mMemory.getFrameNumber(bufIdx); 3407 obj->dumpYUV(jpeg_dump_buffer, dim, offset, QCAMERA_DUMP_FRM_OUTPUT_JPEG); 3408 free(jpeg_dump_buffer); 3409 } 3410 3411 ssize_t maxJpegSize = -1; 3412 3413 // Gralloc buffer may have additional padding for 4K page size 3414 // Follow size guidelines based on spec since framework relies 3415 // on that to reach end of buffer and with it the header 3416 3417 //Handle same as resultBuffer, but for readablity 3418 jpegBufferHandle = 3419 (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx); 3420 3421 if (NULL != jpegBufferHandle) { 3422 maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width; 3423 if (maxJpegSize > obj->mMemory.getSize(bufIdx)) { 3424 maxJpegSize = obj->mMemory.getSize(bufIdx); 3425 } 3426 3427 size_t jpeg_eof_offset = 3428 (size_t)(maxJpegSize - (ssize_t)sizeof(jpegHeader)); 3429 char *jpeg_eof = &jpeg_buf[jpeg_eof_offset]; 3430 memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader)); 3431 obj->mMemory.cleanInvalidateCache(bufIdx); 3432 } else { 3433 LOGE("JPEG buffer not found and index: %d", 3434 bufIdx); 3435 resultStatus = CAMERA3_BUFFER_STATUS_ERROR; 3436 } 3437 } 3438 3439 ////Use below data to issue framework callback 3440 resultBuffer = 3441 (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx); 3442 int32_t resultFrameNumber = obj->mMemory.getFrameNumber(bufIdx); 3443 int32_t rc = obj->mMemory.unregisterBuffer(bufIdx); 3444 if (NO_ERROR != rc) { 3445 LOGE("Error %d unregistering stream buffer %d", 3446 rc, bufIdx); 3447 } 3448 3449 result.stream = obj->mCamera3Stream; 3450 result.buffer = resultBuffer; 3451 result.status = resultStatus; 3452 result.acquire_fence = -1; 3453 result.release_fence = -1; 3454 3455 // Release any snapshot buffers before calling 3456 // the user callback. The callback can potentially 3457 // unblock pending requests to snapshot stream. 3458 int32_t snapshotIdx = -1; 3459 mm_camera_super_buf_t* src_frame = NULL; 3460 3461 if (job->src_reproc_frame) 3462 src_frame = job->src_reproc_frame; 3463 else 3464 src_frame = job->src_frame; 3465 3466 if (src_frame) { 3467 if (obj->mStreams[0]->getMyHandle() == 3468 src_frame->bufs[0]->stream_id) { 3469 snapshotIdx = (int32_t)src_frame->bufs[0]->buf_idx; 3470 if (0 <= snapshotIdx) { 3471 Mutex::Autolock lock(obj->mFreeBuffersLock); 3472 obj->mFreeBufferList.push_back((uint32_t)snapshotIdx); 3473 } 3474 } 3475 } 3476 3477 LOGI("Issue Jpeg Callback frameNumber = %d status = %d", 3478 resultFrameNumber, resultStatus); 3479 ATRACE_ASYNC_END("SNAPSHOT", resultFrameNumber); 3480 if (obj->mChannelCB) { 3481 obj->mChannelCB(NULL, 3482 &result, 3483 (uint32_t)resultFrameNumber, 3484 false, 3485 obj->mUserData); 3486 } 3487 3488 // release internal data for jpeg job 3489 if ((NULL != job->fwk_frame) || (NULL != job->fwk_src_buffer)) { 3490 /* unregister offline input buffer */ 3491 int32_t inputBufIndex = 3492 obj->mOfflineMemory.getGrallocBufferIndex((uint32_t)resultFrameNumber); 3493 if (0 <= inputBufIndex) { 3494 rc = obj->mOfflineMemory.unregisterBuffer(inputBufIndex); 3495 } else { 3496 LOGE("could not find the input buf index, frame number %d", 3497 resultFrameNumber); 3498 } 3499 if (NO_ERROR != rc) { 3500 LOGE("Error %d unregistering input buffer %d", 3501 rc, bufIdx); 3502 } 3503 3504 /* unregister offline meta buffer */ 3505 int32_t metaBufIndex = 3506 obj->mOfflineMetaMemory.getHeapBufferIndex((uint32_t)resultFrameNumber); 3507 if (0 <= metaBufIndex) { 3508 Mutex::Autolock lock(obj->mFreeOfflineMetaBuffersLock); 3509 obj->mFreeOfflineMetaBuffersList.push_back((uint32_t)metaBufIndex); 3510 } else { 3511 LOGE("could not find the input meta buf index, frame number %d", 3512 resultFrameNumber); 3513 } 3514 } 3515 obj->m_postprocessor.releaseOfflineBuffers(false); 3516 obj->m_postprocessor.releaseJpegJobData(job); 3517 free(job); 3518 } 3519 3520 return; 3521 // } 3522 } else { 3523 LOGE("Null userdata in jpeg callback"); 3524 } 3525} 3526 3527QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle, 3528 uint32_t channel_handle, 3529 mm_camera_ops_t *cam_ops, 3530 channel_cb_routine cb_routine, 3531 channel_cb_buffer_err cb_buf_err, 3532 cam_padding_info_t *paddingInfo, 3533 void *userData, 3534 camera3_stream_t *stream, 3535 cam_feature_mask_t postprocess_mask, 3536 __unused bool is4KVideo, 3537 bool isInputStreamConfigured, 3538 QCamera3Channel *metadataChannel, 3539 uint32_t numBuffers) : 3540 QCamera3ProcessingChannel(cam_handle, channel_handle, 3541 cam_ops, cb_routine, cb_buf_err, paddingInfo, userData, 3542 stream, CAM_STREAM_TYPE_SNAPSHOT, 3543 postprocess_mask, metadataChannel, numBuffers), 3544 mNumSnapshotBufs(0), 3545 mInputBufferHint(isInputStreamConfigured), 3546 mYuvMemory(NULL), 3547 mFrameLen(0) 3548{ 3549 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData; 3550 m_max_pic_dim = hal_obj->calcMaxJpegDim(); 3551 mYuvWidth = stream->width; 3552 mYuvHeight = stream->height; 3553 mStreamType = CAM_STREAM_TYPE_SNAPSHOT; 3554 // Use same pixelformat for 4K video case 3555 mStreamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_SNAPSHOT, 3556 stream->width, stream->height, m_bUBWCenable, IS_TYPE_NONE); 3557 int32_t rc = m_postprocessor.initJpeg(jpegEvtHandle, &m_max_pic_dim, this); 3558 if (rc != 0) { 3559 LOGE("Init Postprocessor failed"); 3560 } 3561} 3562 3563/*=========================================================================== 3564 * FUNCTION : flush 3565 * 3566 * DESCRIPTION: flush pic channel, which will stop all processing within, including 3567 * the reprocessing channel in postprocessor and YUV stream. 3568 * 3569 * PARAMETERS : none 3570 * 3571 * RETURN : int32_t type of status 3572 * NO_ERROR -- success 3573 * none-zero failure code 3574 *==========================================================================*/ 3575int32_t QCamera3PicChannel::flush() 3576{ 3577 int32_t rc = NO_ERROR; 3578 if(!m_bIsActive) { 3579 LOGE("Attempt to flush inactive channel"); 3580 return NO_INIT; 3581 } 3582 3583 rc = m_postprocessor.flush(); 3584 if (rc == 0) { 3585 LOGE("Postprocessor flush failed, rc = %d", rc); 3586 return rc; 3587 } 3588 3589 if (0 < mOfflineMetaMemory.getCnt()) { 3590 mOfflineMetaMemory.deallocate(); 3591 } 3592 if (0 < mOfflineMemory.getCnt()) { 3593 mOfflineMemory.unregisterBuffers(); 3594 } 3595 Mutex::Autolock lock(mFreeBuffersLock); 3596 mFreeBufferList.clear(); 3597 3598 for (uint32_t i = 0; i < mCamera3Stream->max_buffers; i++) { 3599 mFreeBufferList.push_back(i); 3600 } 3601 return rc; 3602} 3603 3604 3605QCamera3PicChannel::~QCamera3PicChannel() 3606{ 3607 if (mYuvMemory != nullptr) { 3608 mYuvMemory->deallocate(); 3609 delete mYuvMemory; 3610 mYuvMemory = nullptr; 3611 } 3612} 3613 3614/*=========================================================================== 3615 * FUNCTION : metadataBufDone 3616 * 3617 * DESCRIPTION: Buffer done method for a metadata buffer 3618 * 3619 * PARAMETERS : 3620 * @recvd_frame : received metadata frame 3621 * 3622 * RETURN : int32_t type of status 3623 * OK -- success 3624 * none-zero failure code 3625 *==========================================================================*/ 3626int32_t QCamera3PicChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame) 3627{ 3628 // Check if this is an external metadata 3629 if (recvd_frame != nullptr && recvd_frame->num_bufs == 1) { 3630 Mutex::Autolock lock(mPendingExternalMetadataLock); 3631 auto iter = mPendingExternalMetadata.begin(); 3632 while (iter != mPendingExternalMetadata.end()) { 3633 if (iter->get() == recvd_frame->bufs[0]->buffer) { 3634 // Remove the metadata allocated externally. 3635 mPendingExternalMetadata.erase(iter); 3636 return OK; 3637 } 3638 3639 iter++; 3640 } 3641 } 3642 3643 // If this is not an external metadata, return the metadata. 3644 return QCamera3ProcessingChannel::metadataBufDone(recvd_frame); 3645} 3646 3647int32_t QCamera3PicChannel::initialize(cam_is_type_t isType) 3648{ 3649 int32_t rc = NO_ERROR; 3650 cam_dimension_t streamDim; 3651 cam_stream_type_t streamType; 3652 cam_format_t streamFormat; 3653 3654 if (NULL == mCamera3Stream) { 3655 LOGE("Camera stream uninitialized"); 3656 return NO_INIT; 3657 } 3658 3659 if (1 <= m_numStreams) { 3660 // Only one stream per channel supported in v3 Hal 3661 return NO_ERROR; 3662 } 3663 3664 mIsType = isType; 3665 streamType = mStreamType; 3666 streamFormat = mStreamFormat; 3667 streamDim.width = (int32_t)mYuvWidth; 3668 streamDim.height = (int32_t)mYuvHeight; 3669 3670 mNumSnapshotBufs = mCamera3Stream->max_buffers; 3671 rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim, 3672 ROTATE_0, (uint8_t)mCamera3Stream->max_buffers, mPostProcMask, 3673 mIsType); 3674 3675 if (NO_ERROR != rc) { 3676 LOGE("Initialize failed, rc = %d", rc); 3677 return rc; 3678 } 3679 3680 mYuvMemory = new QCamera3StreamMem(mCamera3Stream->max_buffers); 3681 if (!mYuvMemory) { 3682 LOGE("unable to create YUV buffers"); 3683 return NO_MEMORY; 3684 } 3685 cam_stream_buf_plane_info_t buf_planes; 3686 cam_padding_info_t paddingInfo = mPaddingInfo; 3687 3688 memset(&buf_planes, 0, sizeof(buf_planes)); 3689 //to ensure a big enough buffer size set the height and width 3690 //padding to max(height padding, width padding) 3691 paddingInfo.width_padding = MAX(paddingInfo.width_padding, paddingInfo.height_padding); 3692 paddingInfo.height_padding = paddingInfo.width_padding; 3693 3694 rc = mm_stream_calc_offset_snapshot(mStreamFormat, &streamDim, &paddingInfo, 3695 &buf_planes); 3696 if (rc < 0) { 3697 LOGE("mm_stream_calc_offset_preview failed"); 3698 return rc; 3699 } 3700 mFrameLen = buf_planes.plane_info.frame_len; 3701 3702 /* initialize offline meta memory for input reprocess */ 3703 rc = QCamera3ProcessingChannel::initialize(isType); 3704 if (NO_ERROR != rc) { 3705 LOGE("Processing Channel initialize failed, rc = %d", 3706 rc); 3707 } 3708 3709 return rc; 3710} 3711 3712/*=========================================================================== 3713 * FUNCTION : request 3714 * 3715 * DESCRIPTION: handle the request - either with an input buffer or a direct 3716 * output request 3717 * 3718 * PARAMETERS : 3719 * @buffer : pointer to the output buffer 3720 * @frameNumber : frame number of the request 3721 * @pInputBuffer : pointer to input buffer if an input request 3722 * @metadata : parameters associated with the request 3723 * @internalreq : boolean to indicate if this is purely internal request 3724 * needing internal buffer allocation 3725 * @meteringonly : boolean indicating metering only frame subset of internal 3726 * not consumed by postprocessor 3727 * 3728 * RETURN : 0 on a success start of capture 3729 * -EINVAL on invalid input 3730 * -ENODEV on serious error 3731 *==========================================================================*/ 3732int32_t QCamera3PicChannel::request(buffer_handle_t *buffer, 3733 uint32_t frameNumber, 3734 camera3_stream_buffer_t *pInputBuffer, 3735 metadata_buffer_t *metadata, int &indexUsed, 3736 bool internalRequest, bool meteringOnly) 3737{ 3738 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_REQ); 3739 //FIX ME: Return buffer back in case of failures below. 3740 3741 int32_t rc = NO_ERROR; 3742 3743 reprocess_config_t reproc_cfg; 3744 cam_dimension_t dim; 3745 memset(&reproc_cfg, 0, sizeof(reprocess_config_t)); 3746 //make sure to set the correct input stream dim in case of YUV size override 3747 //and recalculate the plane info 3748 dim.width = (int32_t)mYuvWidth; 3749 dim.height = (int32_t)mYuvHeight; 3750 3751 setReprocConfig(reproc_cfg, pInputBuffer, metadata, mStreamFormat, dim); 3752 3753 // Start postprocessor 3754 startPostProc(reproc_cfg); 3755 3756 if (!internalRequest) { 3757 int index = mMemory.getMatchBufIndex((void*)buffer); 3758 3759 if(index < 0) { 3760 rc = registerBuffer(buffer, mIsType); 3761 if (NO_ERROR != rc) { 3762 LOGE("On-the-fly buffer registration failed %d", 3763 rc); 3764 return rc; 3765 } 3766 3767 index = mMemory.getMatchBufIndex((void*)buffer); 3768 if (index < 0) { 3769 LOGE("Could not find object among registered buffers"); 3770 return DEAD_OBJECT; 3771 } 3772 } 3773 LOGD("buffer index %d, frameNumber: %u", index, frameNumber); 3774 3775 rc = mMemory.markFrameNumber((uint32_t)index, frameNumber); 3776 3777 // Queue jpeg settings 3778 rc = queueJpegSetting((uint32_t)index, metadata); 3779 3780 } else { 3781 LOGD("Internal request @ Picchannel"); 3782 } 3783 3784 3785 if (pInputBuffer == NULL) { 3786 Mutex::Autolock lock(mFreeBuffersLock); 3787 uint32_t bufIdx; 3788 if (mFreeBufferList.empty()) { 3789 rc = mYuvMemory->allocateOne(mFrameLen, /*isCached*/false); 3790 if (rc < 0) { 3791 LOGE("Failed to allocate heap buffer. Fatal"); 3792 return rc; 3793 } else { 3794 bufIdx = (uint32_t)rc; 3795 } 3796 } else { 3797 List<uint32_t>::iterator it = mFreeBufferList.begin(); 3798 bufIdx = *it; 3799 mFreeBufferList.erase(it); 3800 } 3801 if (meteringOnly) { 3802 mYuvMemory->markFrameNumber(bufIdx, 0xFFFFFFFF); 3803 } else { 3804 mYuvMemory->markFrameNumber(bufIdx, frameNumber); 3805 } 3806 if (m_bIsActive) { 3807 mStreams[0]->bufDone(bufIdx); 3808 } 3809 indexUsed = bufIdx; 3810 } else { 3811 qcamera_fwk_input_pp_data_t *src_frame = NULL; 3812 src_frame = (qcamera_fwk_input_pp_data_t *)calloc(1, 3813 sizeof(qcamera_fwk_input_pp_data_t)); 3814 if (src_frame == NULL) { 3815 LOGE("No memory for src frame"); 3816 return NO_MEMORY; 3817 } 3818 rc = setFwkInputPPData(src_frame, pInputBuffer, &reproc_cfg, metadata, 3819 NULL /*fwk output buffer*/, frameNumber); 3820 if (NO_ERROR != rc) { 3821 LOGE("Error %d while setting framework input PP data", rc); 3822 free(src_frame); 3823 return rc; 3824 } 3825 LOGH("Post-process started"); 3826 m_postprocessor.processData(src_frame); 3827 } 3828 return rc; 3829} 3830 3831 3832 3833/*=========================================================================== 3834 * FUNCTION : dataNotifyCB 3835 * 3836 * DESCRIPTION: Channel Level callback used for super buffer data notify. 3837 * This function is registered with mm-camera-interface to handle 3838 * data notify 3839 * 3840 * PARAMETERS : 3841 * @recvd_frame : stream frame received 3842 * userdata : user data ptr 3843 * 3844 * RETURN : none 3845 *==========================================================================*/ 3846void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame, 3847 void *userdata) 3848{ 3849 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PIC_CH_DATA_NOTIFY_CB); 3850 LOGD("E\n"); 3851 QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata; 3852 3853 if (channel == NULL) { 3854 LOGE("invalid channel pointer"); 3855 return; 3856 } 3857 3858 if(channel->m_numStreams != 1) { 3859 LOGE("Error: Bug: This callback assumes one stream per channel"); 3860 return; 3861 } 3862 3863 3864 if(channel->mStreams[0] == NULL) { 3865 LOGE("Error: Invalid Stream object"); 3866 return; 3867 } 3868 3869 channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]); 3870 3871 LOGD("X\n"); 3872 return; 3873} 3874 3875/*=========================================================================== 3876 * FUNCTION : streamCbRoutine 3877 * 3878 * DESCRIPTION: 3879 * 3880 * PARAMETERS : 3881 * @super_frame : the super frame with filled buffer 3882 * @stream : stream on which the buffer was requested and filled 3883 * 3884 * RETURN : none 3885 *==========================================================================*/ 3886void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 3887 QCamera3Stream *stream) 3888{ 3889 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CAPTURE_CH_CB); 3890 //TODO 3891 //Used only for getting YUV. Jpeg callback will be sent back from channel 3892 //directly to HWI. Refer to func jpegEvtHandle 3893 3894 //Got the yuv callback. Calling yuv callback handler in PostProc 3895 uint8_t frameIndex; 3896 mm_camera_super_buf_t* frame = NULL; 3897 cam_dimension_t dim; 3898 cam_frame_len_offset_t offset; 3899 3900 memset(&dim, 0, sizeof(dim)); 3901 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 3902 3903 if (checkStreamCbErrors(super_frame, stream) != NO_ERROR) { 3904 LOGE("Error with the stream callback"); 3905 return; 3906 } 3907 3908 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx; 3909 LOGD("recvd buf_idx: %u for further processing", 3910 (uint32_t)frameIndex); 3911 if(frameIndex >= mNumSnapshotBufs) { 3912 LOGE("Error, Invalid index for buffer"); 3913 if(stream) { 3914 Mutex::Autolock lock(mFreeBuffersLock); 3915 mFreeBufferList.push_back(frameIndex); 3916 stream->bufDone(frameIndex); 3917 } 3918 return; 3919 } 3920 3921 if ((uint32_t)mYuvMemory->getFrameNumber(frameIndex) == EMPTY_FRAMEWORK_FRAME_NUMBER) { 3922 LOGD("Internal Request recycle frame"); 3923 Mutex::Autolock lock(mFreeBuffersLock); 3924 mFreeBufferList.push_back(frameIndex); 3925 return; 3926 } 3927 3928 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t)); 3929 if (frame == NULL) { 3930 LOGE("Error allocating memory to save received_frame structure."); 3931 if(stream) { 3932 Mutex::Autolock lock(mFreeBuffersLock); 3933 mFreeBufferList.push_back(frameIndex); 3934 stream->bufDone(frameIndex); 3935 } 3936 return; 3937 } 3938 *frame = *super_frame; 3939 stream->getFrameDimension(dim); 3940 stream->getFrameOffset(offset); 3941 dumpYUV(frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_INPUT_REPROCESS); 3942 3943 if (IS_BUFFER_ERROR(super_frame->bufs[0]->flags)) { 3944 mChannelCbBufErr(this, mYuvMemory->getFrameNumber(frameIndex), 3945 CAMERA3_BUFFER_STATUS_ERROR, mUserData); 3946 } 3947 3948 m_postprocessor.processData(frame); 3949 free(super_frame); 3950 return; 3951} 3952 3953QCamera3StreamMem* QCamera3PicChannel::getStreamBufs(uint32_t /*len*/) 3954{ 3955 return mYuvMemory; 3956} 3957 3958void QCamera3PicChannel::putStreamBufs() 3959{ 3960 QCamera3ProcessingChannel::putStreamBufs(); 3961 Mutex::Autolock lock(mFreeBuffersLock); 3962 mFreeBufferList.clear(); 3963 3964 if (nullptr != mYuvMemory) { 3965 uint32_t count = mYuvMemory->getCnt(); 3966 for (uint32_t i = 0; i < count; i++) { 3967 mFreeBufferList.push_back(i); 3968 } 3969 } 3970} 3971 3972int32_t QCamera3PicChannel::queueJpegSetting(uint32_t index, metadata_buffer_t *metadata) 3973{ 3974 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData; 3975 jpeg_settings_t *settings = 3976 (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t)); 3977 3978 if (!settings) { 3979 LOGE("out of memory allocating jpeg_settings"); 3980 return -ENOMEM; 3981 } 3982 3983 memset(settings, 0, sizeof(jpeg_settings_t)); 3984 3985 settings->out_buf_index = index; 3986 3987 settings->jpeg_orientation = 0; 3988 IF_META_AVAILABLE(int32_t, orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) { 3989 settings->jpeg_orientation = *orientation; 3990 } 3991 3992 settings->jpeg_quality = 85; 3993 IF_META_AVAILABLE(uint32_t, quality1, CAM_INTF_META_JPEG_QUALITY, metadata) { 3994 settings->jpeg_quality = (uint8_t) *quality1; 3995 } 3996 3997 IF_META_AVAILABLE(uint32_t, quality2, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) { 3998 settings->jpeg_thumb_quality = (uint8_t) *quality2; 3999 } 4000 4001 IF_META_AVAILABLE(cam_dimension_t, dimension, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) { 4002 settings->thumbnail_size = *dimension; 4003 } 4004 4005 settings->gps_timestamp_valid = 0; 4006 IF_META_AVAILABLE(int64_t, timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) { 4007 settings->gps_timestamp = *timestamp; 4008 settings->gps_timestamp_valid = 1; 4009 } 4010 4011 settings->gps_coordinates_valid = 0; 4012 IF_META_AVAILABLE(double, coordinates, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) { 4013 memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double)); 4014 settings->gps_coordinates_valid = 1; 4015 } 4016 4017 IF_META_AVAILABLE(uint8_t, proc_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) { 4018 memset(settings->gps_processing_method, 0, 4019 sizeof(settings->gps_processing_method)); 4020 strlcpy(settings->gps_processing_method, (const char *)proc_methods, 4021 sizeof(settings->gps_processing_method)); 4022 } 4023 4024 settings->hdr_snapshot = 0; 4025 IF_META_AVAILABLE(cam_hdr_param_t, hdr_info, CAM_INTF_PARM_HAL_BRACKETING_HDR, metadata) { 4026 if (hdr_info->hdr_enable) { 4027 settings->hdr_snapshot = 1; 4028 } 4029 } 4030 4031 4032 // Image description 4033 const char *eepromVersion = hal_obj->getEepromVersionInfo(); 4034 const uint32_t *ldafCalib = hal_obj->getLdafCalib(); 4035 const char *easelFwVersion = hal_obj->getEaselFwVersion(); 4036 if ((eepromVersion && strlen(eepromVersion)) || 4037 ldafCalib || easelFwVersion) { 4038 uint32_t len = 0; 4039 settings->image_desc_valid = true; 4040 if (eepromVersion && strlen(eepromVersion)) { 4041 len = snprintf(settings->image_desc, sizeof(settings->image_desc), 4042 "%s", eepromVersion); 4043 } 4044 if (ldafCalib) { 4045 len += snprintf(settings->image_desc + len, 4046 sizeof(settings->image_desc) - len, "L:%u-%u", 4047 ldafCalib[0], ldafCalib[1]); 4048 } 4049 if (easelFwVersion) { 4050 ALOGD("%s: Easel FW version %s", __FUNCTION__, easelFwVersion); 4051 if (len > 0 && len < sizeof(settings->image_desc)) { 4052 settings->image_desc[len] = ','; 4053 len++; 4054 } 4055 len += snprintf(settings->image_desc + len, 4056 sizeof(settings->image_desc) - len, "E-ver:%s", easelFwVersion); 4057 } 4058 } 4059 4060 return m_postprocessor.processJpegSettingData(settings); 4061} 4062 4063 4064void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height) 4065{ 4066 mYuvWidth = width; 4067 mYuvHeight = height; 4068} 4069 4070/*=========================================================================== 4071 * FUNCTION : getReprocessType 4072 * 4073 * DESCRIPTION: get the type of reprocess output supported by this channel 4074 * 4075 * PARAMETERS : NONE 4076 * 4077 * RETURN : reprocess_type_t : type of reprocess 4078 *==========================================================================*/ 4079reprocess_type_t QCamera3PicChannel::getReprocessType() 4080{ 4081 /* a picture channel could either use the postprocessor for reprocess+jpeg 4082 or only for reprocess */ 4083 reprocess_type_t expectedReprocess; 4084 if (mPostProcMask == CAM_QCOM_FEATURE_NONE || mInputBufferHint) { 4085 expectedReprocess = REPROCESS_TYPE_JPEG; 4086 } else { 4087 expectedReprocess = REPROCESS_TYPE_NONE; 4088 } 4089 LOGH("expectedReprocess from Pic Channel is %d", expectedReprocess); 4090 return expectedReprocess; 4091} 4092 4093 4094/*=========================================================================== 4095 * FUNCTION : timeoutFrame 4096 * 4097 * DESCRIPTION: Method to indicate to channel that a given frame has take too 4098 * long to be generated 4099 * 4100 * PARAMETERS : framenumber indicating the framenumber of the buffer timingout 4101 * 4102 * RETURN : int32_t type of status 4103 * NO_ERROR -- success 4104 * none-zero failure code 4105 *==========================================================================*/ 4106int32_t QCamera3PicChannel::timeoutFrame(uint32_t frameNumber) 4107{ 4108 int32_t bufIdx; 4109 4110 bufIdx = mYuvMemory->getBufferIndex(frameNumber); 4111 4112 if (bufIdx < 0) { 4113 LOGE("%s: Buffer not found for frame:%d", __func__, frameNumber); 4114 return -1; 4115 } 4116 4117 mStreams[0]->timeoutFrame(bufIdx); 4118 4119 return NO_ERROR; 4120} 4121 4122int32_t QCamera3PicChannel::getYuvBufferForRequest(mm_camera_buf_def_t *frame, 4123 uint32_t frameNumber) 4124{ 4125 uint32_t bufIdx; 4126 status_t rc; 4127 4128 Mutex::Autolock lock(mFreeBuffersLock); 4129 4130 // Get an available YUV buffer. 4131 if (mFreeBufferList.empty()) { 4132 // Allocate a buffer if no one is available. 4133 rc = mYuvMemory->allocateOne(mFrameLen, /*isCached*/false); 4134 if (rc < 0) { 4135 LOGE("Failed to allocate heap buffer. Fatal"); 4136 return rc; 4137 } else { 4138 bufIdx = (uint32_t)rc; 4139 } 4140 } else { 4141 List<uint32_t>::iterator it = mFreeBufferList.begin(); 4142 bufIdx = *it; 4143 mFreeBufferList.erase(it); 4144 } 4145 4146 mYuvMemory->markFrameNumber(bufIdx, frameNumber); 4147 4148 cam_frame_len_offset_t offset = {}; 4149 mStreams[0]->getFrameOffset(offset); 4150 4151 // Get a buffer from YUV memory. 4152 rc = mYuvMemory->getBufDef(offset, *frame, bufIdx, mMapStreamBuffers); 4153 if (rc != 0) { 4154 ALOGE("%s: Getting a frame failed: %s (%d).", __FUNCTION__, strerror(-rc), rc); 4155 return rc; 4156 } 4157 4158 // Set the frame's stream ID because it's not set in getBufDef. 4159 frame->stream_id = mStreams[0]->getMyHandle(); 4160 return 0; 4161} 4162 4163int32_t QCamera3PicChannel::returnYuvBuffer(mm_camera_buf_def_t *frame) 4164{ 4165 Mutex::Autolock lock(mFreeBuffersLock); 4166 mFreeBufferList.push_back(frame->buf_idx); 4167 return 0; 4168} 4169 4170int32_t QCamera3PicChannel::returnYuvBufferAndEncode(mm_camera_buf_def_t *frame, 4171 buffer_handle_t *outBuffer, uint32_t frameNumber, 4172 std::shared_ptr<metadata_buffer_t> metadata) 4173{ 4174 int32_t rc = OK; 4175 4176 // Picture stream must have been started before any request comes in. 4177 if (!m_bIsActive) { 4178 LOGE("Channel not started!!"); 4179 return NO_INIT; 4180 } 4181 4182 // Set up reprocess configuration 4183 reprocess_config_t reproc_cfg = {}; 4184 cam_dimension_t dim; 4185 dim.width = (int32_t)mYuvWidth; 4186 dim.height = (int32_t)mYuvHeight; 4187 setReprocConfig(reproc_cfg, nullptr, metadata.get(), mStreamFormat, dim); 4188 4189 // Override reprocess type to just JPEG encoding without reprocessing. 4190 reproc_cfg.reprocess_type = REPROCESS_TYPE_NONE; 4191 4192 // Get the index of the output jpeg buffer. 4193 int index = mMemory.getMatchBufIndex((void*)outBuffer); 4194 if(index < 0) { 4195 rc = registerBuffer(outBuffer, mIsType); 4196 if (OK != rc) { 4197 LOGE("On-the-fly buffer registration failed %d", 4198 rc); 4199 return rc; 4200 } 4201 4202 index = mMemory.getMatchBufIndex((void*)outBuffer); 4203 if (index < 0) { 4204 LOGE("Could not find object among registered buffers"); 4205 return DEAD_OBJECT; 4206 } 4207 } 4208 4209 rc = mMemory.markFrameNumber((uint32_t)index, frameNumber); 4210 if (rc != OK) { 4211 ALOGE("%s: Marking frame number (%u) for jpeg buffer (%d) failed: %s (%d)", __FUNCTION__, 4212 frameNumber, index, strerror(-rc), rc); 4213 return rc; 4214 } 4215 4216 // Start postprocessor 4217 startPostProc(reproc_cfg); 4218 4219 // Queue jpeg settings 4220 rc = queueJpegSetting((uint32_t)index, metadata.get()); 4221 if (rc != OK) { 4222 ALOGE("%s: Queueing Jpeg setting for frame number (%u) buffer index (%d) failed: %s (%d)", 4223 __FUNCTION__, frameNumber, index, strerror(-rc), rc); 4224 return rc; 4225 } 4226 4227 // Allocate a buffer for the YUV input. It will be freed in QCamera3PostProc. 4228 mm_camera_super_buf_t *src_frame = 4229 (mm_camera_super_buf_t *)calloc(1, sizeof(mm_camera_super_buf_t)); 4230 if (src_frame == nullptr) { 4231 LOGE("%s: No memory for src frame", __FUNCTION__); 4232 return NO_MEMORY; 4233 } 4234 src_frame->camera_handle = m_camHandle; 4235 src_frame->ch_id = getMyHandle(); 4236 src_frame->num_bufs = 1; 4237 src_frame->bufs[0] = frame; 4238 4239 // Start processing the YUV buffer. 4240 ALOGD("%s: %d: Post-process started", __FUNCTION__, __LINE__); 4241 rc = m_postprocessor.processData(src_frame); 4242 if (rc != OK) { 4243 ALOGE("%s: Post processing frame (frame number: %u, jpeg buffer: %d) failed: %s (%d)", 4244 __FUNCTION__, frameNumber, index, strerror(-rc), rc); 4245 return rc; 4246 } 4247 4248 // Allocate a buffer for the metadata. It will be freed in QCamera3PostProc. 4249 mm_camera_super_buf_t *metadataBuf = 4250 (mm_camera_super_buf_t *)calloc(1, sizeof(mm_camera_super_buf_t)); 4251 if (metadata == nullptr) { 4252 LOGE("%s: No memory for metadata", __FUNCTION__); 4253 return NO_MEMORY; 4254 } 4255 metadataBuf->camera_handle = m_camHandle; 4256 metadataBuf->ch_id = getMyHandle(); 4257 metadataBuf->num_bufs = 1; 4258 metadataBuf->bufs[0] = (mm_camera_buf_def_t *)calloc(1, sizeof(mm_camera_buf_def_t)); 4259 metadataBuf->bufs[0]->buffer = metadata.get(); 4260 4261 // Start processing the metadata 4262 rc = m_postprocessor.processPPMetadata(metadataBuf); 4263 if (rc != OK) { 4264 ALOGE("%s: Post processing metadata (frame number: %u, jpeg buffer: %d) failed: %s (%d)", 4265 __FUNCTION__, frameNumber, index, strerror(-rc), rc); 4266 return rc; 4267 } 4268 4269 // Queue the external metadata. 4270 { 4271 Mutex::Autolock lock(mPendingExternalMetadataLock); 4272 mPendingExternalMetadata.push_back(metadata); 4273 } 4274 4275 return OK; 4276} 4277 4278/*=========================================================================== 4279 * FUNCTION : QCamera3ReprocessChannel 4280 * 4281 * DESCRIPTION: constructor of QCamera3ReprocessChannel 4282 * 4283 * PARAMETERS : 4284 * @cam_handle : camera handle 4285 * @cam_ops : ptr to camera ops table 4286 * @pp_mask : post-proccess feature mask 4287 * 4288 * RETURN : none 4289 *==========================================================================*/ 4290QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle, 4291 uint32_t channel_handle, 4292 mm_camera_ops_t *cam_ops, 4293 channel_cb_routine cb_routine, 4294 channel_cb_buffer_err cb_buf_err, 4295 cam_padding_info_t *paddingInfo, 4296 cam_feature_mask_t postprocess_mask, 4297 void *userData, void *ch_hdl) : 4298 /* In case of framework reprocessing, pproc and jpeg operations could be 4299 * parallelized by allowing 1 extra buffer for reprocessing output: 4300 * ch_hdl->getNumBuffers() + 1 */ 4301 QCamera3Channel(cam_handle, channel_handle, cam_ops, cb_routine, cb_buf_err, paddingInfo, 4302 postprocess_mask, userData, 4303 ((QCamera3ProcessingChannel *)ch_hdl)->getNumBuffers() 4304 + (MAX_REPROCESS_PIPELINE_STAGES - 1)), 4305 inputChHandle(ch_hdl), 4306 mOfflineBuffersIndex(-1), 4307 mFrameLen(0), 4308 mReprocessType(REPROCESS_TYPE_NONE), 4309 m_pSrcChannel(NULL), 4310 m_pMetaChannel(NULL), 4311 mMemory(NULL), 4312 mGrallocMemory(0), 4313 mReprocessPerfMode(false) 4314{ 4315 memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles)); 4316 mOfflineBuffersIndex = mNumBuffers -1; 4317 mOfflineMetaIndex = (int32_t) (2*mNumBuffers -1); 4318} 4319 4320 4321/*=========================================================================== 4322 * FUNCTION : QCamera3ReprocessChannel 4323 * 4324 * DESCRIPTION: constructor of QCamera3ReprocessChannel 4325 * 4326 * PARAMETERS : 4327 * @cam_handle : camera handle 4328 * @cam_ops : ptr to camera ops table 4329 * @pp_mask : post-proccess feature mask 4330 * 4331 * RETURN : none 4332 *==========================================================================*/ 4333int32_t QCamera3ReprocessChannel::initialize(cam_is_type_t isType) 4334{ 4335 int32_t rc = NO_ERROR; 4336 mm_camera_channel_attr_t attr; 4337 4338 memset(&attr, 0, sizeof(mm_camera_channel_attr_t)); 4339 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS; 4340 attr.max_unmatched_frames = 1; 4341 4342 m_handle = m_camOps->add_channel(m_camHandle, 4343 &attr, 4344 NULL, 4345 this); 4346 if (m_handle == 0) { 4347 LOGE("Add channel failed"); 4348 return UNKNOWN_ERROR; 4349 } 4350 4351 mIsType = isType; 4352 return rc; 4353} 4354 4355/*=========================================================================== 4356 * FUNCTION : registerBuffer 4357 * 4358 * DESCRIPTION: register streaming buffer to the channel object 4359 * 4360 * PARAMETERS : 4361 * @buffer : buffer to be registered 4362 * @isType : the image stabilization type for the buffer 4363 * 4364 * RETURN : int32_t type of status 4365 * NO_ERROR -- success 4366 * none-zero failure code 4367 *==========================================================================*/ 4368int32_t QCamera3ReprocessChannel::registerBuffer(buffer_handle_t *buffer, 4369 cam_is_type_t isType) 4370{ 4371 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_REG_BUF); 4372 int rc = 0; 4373 mIsType = isType; 4374 cam_stream_type_t streamType; 4375 4376 if (buffer == NULL) { 4377 LOGE("Error: Cannot register a NULL buffer"); 4378 return BAD_VALUE; 4379 } 4380 4381 if ((uint32_t)mGrallocMemory.getCnt() > (mNumBuffers - 1)) { 4382 LOGE("Trying to register more buffers than initially requested"); 4383 return BAD_VALUE; 4384 } 4385 4386 if (0 == m_numStreams) { 4387 rc = initialize(mIsType); 4388 if (rc != NO_ERROR) { 4389 LOGE("Couldn't initialize camera stream %d", 4390 rc); 4391 return rc; 4392 } 4393 } 4394 4395 streamType = mStreams[0]->getMyType(); 4396 rc = mGrallocMemory.registerBuffer(buffer, streamType); 4397 if (ALREADY_EXISTS == rc) { 4398 return NO_ERROR; 4399 } else if (NO_ERROR != rc) { 4400 LOGE("Buffer %p couldn't be registered %d", buffer, rc); 4401 return rc; 4402 } 4403 4404 return rc; 4405} 4406 4407/*=========================================================================== 4408 * FUNCTION : QCamera3ReprocessChannel 4409 * 4410 * DESCRIPTION: constructor of QCamera3ReprocessChannel 4411 * 4412 * PARAMETERS : 4413 * @cam_handle : camera handle 4414 * @cam_ops : ptr to camera ops table 4415 * @pp_mask : post-proccess feature mask 4416 * 4417 * RETURN : none 4418 *==========================================================================*/ 4419void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 4420 QCamera3Stream *stream) 4421{ 4422 //Got the pproc data callback. Now send to jpeg encoding 4423 uint8_t frameIndex; 4424 uint32_t resultFrameNumber; 4425 ATRACE_CALL(); 4426 mm_camera_super_buf_t* frame = NULL; 4427 QCamera3ProcessingChannel *obj = (QCamera3ProcessingChannel *)inputChHandle; 4428 cam_dimension_t dim; 4429 cam_frame_len_offset_t offset; 4430 4431 memset(&dim, 0, sizeof(dim)); 4432 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 4433 if(!super_frame) { 4434 LOGE("Invalid Super buffer"); 4435 return; 4436 } 4437 4438 if(super_frame->num_bufs != 1) { 4439 LOGE("Multiple streams are not supported"); 4440 return; 4441 } 4442 if(super_frame->bufs[0] == NULL ) { 4443 LOGE("Error, Super buffer frame does not contain valid buffer"); 4444 return; 4445 } 4446 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx; 4447 4448 4449 if (mReprocessType == REPROCESS_TYPE_JPEG) { 4450 resultFrameNumber = mMemory->getFrameNumber(frameIndex); 4451 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t)); 4452 if (frame == NULL) { 4453 LOGE("Error allocating memory to save received_frame structure."); 4454 if(stream) { 4455 stream->bufDone(frameIndex); 4456 } 4457 return; 4458 } 4459 LOGI("bufIndex: %u recvd from post proc", 4460 (uint32_t)frameIndex); 4461 *frame = *super_frame; 4462 4463 stream->getFrameDimension(dim); 4464 stream->getFrameOffset(offset); 4465 dumpYUV(frame->bufs[0], dim, offset, QCAMERA_DUMP_FRM_INPUT_JPEG); 4466 // Release offline buffers. 4467 int32_t rc = obj->releaseOfflineMemory(resultFrameNumber); 4468 if (NO_ERROR != rc) { 4469 LOGE("Error releasing offline memory %d", rc); 4470 } 4471 /* Since reprocessing is done, send the callback to release the input buffer */ 4472 if (mChannelCB) { 4473 mChannelCB(NULL, NULL, resultFrameNumber, true, mUserData); 4474 } 4475 obj->m_postprocessor.processPPData(frame); 4476 } else { 4477 buffer_handle_t *resultBuffer; 4478 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx; 4479 resultBuffer = (buffer_handle_t *)mGrallocMemory.getBufferHandle(frameIndex); 4480 resultFrameNumber = mGrallocMemory.getFrameNumber(frameIndex); 4481 int32_t rc = stream->bufRelease(frameIndex); 4482 if (NO_ERROR != rc) { 4483 LOGE("Error %d releasing stream buffer %d", 4484 rc, frameIndex); 4485 } 4486 rc = mGrallocMemory.unregisterBuffer(frameIndex); 4487 if (NO_ERROR != rc) { 4488 LOGE("Error %d unregistering stream buffer %d", 4489 rc, frameIndex); 4490 } 4491 obj->reprocessCbRoutine(resultBuffer, resultFrameNumber); 4492 4493 obj->m_postprocessor.releaseOfflineBuffers(false); 4494 qcamera_hal3_pp_data_t *pp_job = obj->m_postprocessor.dequeuePPJob(resultFrameNumber); 4495 if (pp_job != NULL) { 4496 obj->m_postprocessor.releasePPJobData(pp_job); 4497 } 4498 free(pp_job); 4499 resetToCamPerfNormal(resultFrameNumber); 4500 } 4501 free(super_frame); 4502 return; 4503} 4504 4505/*=========================================================================== 4506 * FUNCTION : resetToCamPerfNormal 4507 * 4508 * DESCRIPTION: Set the perf mode to normal if all the priority frames 4509 * have been reprocessed 4510 * 4511 * PARAMETERS : 4512 * @frameNumber: Frame number of the reprocess completed frame 4513 * 4514 * RETURN : QCamera3StreamMem * 4515 *==========================================================================*/ 4516int32_t QCamera3ReprocessChannel::resetToCamPerfNormal(uint32_t frameNumber) 4517{ 4518 int32_t rc = NO_ERROR; 4519 bool resetToPerfNormal = false; 4520 { 4521 Mutex::Autolock lock(mPriorityFramesLock); 4522 /* remove the priority frame number from the list */ 4523 for (size_t i = 0; i < mPriorityFrames.size(); i++) { 4524 if (mPriorityFrames[i] == frameNumber) { 4525 mPriorityFrames.removeAt(i); 4526 } 4527 } 4528 /* reset the perf mode if pending priority frame list is empty */ 4529 if (mReprocessPerfMode && mPriorityFrames.empty()) { 4530 resetToPerfNormal = true; 4531 } 4532 } 4533 if (resetToPerfNormal) { 4534 QCamera3Stream *pStream = mStreams[0]; 4535 cam_stream_parm_buffer_t param; 4536 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t)); 4537 4538 param.type = CAM_STREAM_PARAM_TYPE_REQUEST_OPS_MODE; 4539 param.perf_mode = CAM_PERF_NORMAL; 4540 rc = pStream->setParameter(param); 4541 { 4542 Mutex::Autolock lock(mPriorityFramesLock); 4543 mReprocessPerfMode = false; 4544 } 4545 } 4546 return rc; 4547} 4548 4549/*=========================================================================== 4550 * FUNCTION : getStreamBufs 4551 * 4552 * DESCRIPTION: register the buffers of the reprocess channel 4553 * 4554 * PARAMETERS : none 4555 * 4556 * RETURN : QCamera3StreamMem * 4557 *==========================================================================*/ 4558QCamera3StreamMem* QCamera3ReprocessChannel::getStreamBufs(uint32_t len) 4559{ 4560 if (mReprocessType == REPROCESS_TYPE_JPEG) { 4561 mMemory = new QCamera3StreamMem(mNumBuffers); 4562 if (!mMemory) { 4563 LOGE("unable to create reproc memory"); 4564 return NULL; 4565 } 4566 mFrameLen = len; 4567 return mMemory; 4568 } 4569 return &mGrallocMemory; 4570} 4571 4572/*=========================================================================== 4573 * FUNCTION : putStreamBufs 4574 * 4575 * DESCRIPTION: release the reprocess channel buffers 4576 * 4577 * PARAMETERS : none 4578 * 4579 * RETURN : 4580 *==========================================================================*/ 4581void QCamera3ReprocessChannel::putStreamBufs() 4582{ 4583 if (mReprocessType == REPROCESS_TYPE_JPEG) { 4584 mMemory->deallocate(); 4585 delete mMemory; 4586 mMemory = NULL; 4587 mFreeBufferList.clear(); 4588 } else { 4589 mGrallocMemory.unregisterBuffers(); 4590 } 4591} 4592 4593/*=========================================================================== 4594 * FUNCTION : ~QCamera3ReprocessChannel 4595 * 4596 * DESCRIPTION: destructor of QCamera3ReprocessChannel 4597 * 4598 * PARAMETERS : none 4599 * 4600 * RETURN : none 4601 *==========================================================================*/ 4602QCamera3ReprocessChannel::~QCamera3ReprocessChannel() 4603{ 4604 destroy(); 4605 4606 if (m_handle) { 4607 m_camOps->delete_channel(m_camHandle, m_handle); 4608 LOGD("deleting channel %d", m_handle); 4609 m_handle = 0; 4610 } 4611} 4612 4613/*=========================================================================== 4614 * FUNCTION : start 4615 * 4616 * DESCRIPTION: start reprocess channel. 4617 * 4618 * PARAMETERS : 4619 * 4620 * RETURN : int32_t type of status 4621 * NO_ERROR -- success 4622 * none-zero failure code 4623 *==========================================================================*/ 4624int32_t QCamera3ReprocessChannel::start() 4625{ 4626 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_START); 4627 int32_t rc = NO_ERROR; 4628 4629 rc = QCamera3Channel::start(); 4630 4631 if (rc == NO_ERROR) { 4632 rc = m_camOps->start_channel(m_camHandle, m_handle, /*start_sensor_streaming*/true); 4633 4634 // Check failure 4635 if (rc != NO_ERROR) { 4636 LOGE("start_channel failed %d", rc); 4637 QCamera3Channel::stop(); 4638 } 4639 } 4640 return rc; 4641} 4642 4643/*=========================================================================== 4644 * FUNCTION : stop 4645 * 4646 * DESCRIPTION: stop reprocess channel. 4647 * 4648 * PARAMETERS : none 4649 * 4650 * RETURN : int32_t type of status 4651 * NO_ERROR -- success 4652 * none-zero failure code 4653 *==========================================================================*/ 4654int32_t QCamera3ReprocessChannel::stop() 4655{ 4656 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_REPROC_CH_STOP); 4657 int32_t rc = NO_ERROR; 4658 4659 rc = QCamera3Channel::stop(); 4660 rc |= m_camOps->stop_channel(m_camHandle, m_handle, /*stop_channel_immediately*/false); 4661 // Unmapping the buffers 4662 unmapOfflineBuffers(true); 4663 return rc; 4664} 4665 4666/*=========================================================================== 4667 * FUNCTION : getStreamBySrcHandle 4668 * 4669 * DESCRIPTION: find reprocess stream by its source stream handle 4670 * 4671 * PARAMETERS : 4672 * @srcHandle : source stream handle 4673 * 4674 * RETURN : ptr to reprocess stream if found. NULL if not found 4675 *==========================================================================*/ 4676QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle) 4677{ 4678 QCamera3Stream *pStream = NULL; 4679 4680 for (uint32_t i = 0; i < m_numStreams; i++) { 4681 if (mSrcStreamHandles[i] == srcHandle) { 4682 pStream = mStreams[i]; 4683 break; 4684 } 4685 } 4686 return pStream; 4687} 4688 4689/*=========================================================================== 4690 * FUNCTION : getSrcStreamBySrcHandle 4691 * 4692 * DESCRIPTION: find source stream by source stream handle 4693 * 4694 * PARAMETERS : 4695 * @srcHandle : source stream handle 4696 * 4697 * RETURN : ptr to reprocess stream if found. NULL if not found 4698 *==========================================================================*/ 4699QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle) 4700{ 4701 QCamera3Stream *pStream = NULL; 4702 4703 if (NULL == m_pSrcChannel) { 4704 return NULL; 4705 } 4706 4707 for (uint32_t i = 0; i < m_numStreams; i++) { 4708 if (mSrcStreamHandles[i] == srcHandle) { 4709 pStream = m_pSrcChannel->getStreamByIndex(i); 4710 break; 4711 } 4712 } 4713 return pStream; 4714} 4715 4716/*=========================================================================== 4717 * FUNCTION : unmapOfflineBuffers 4718 * 4719 * DESCRIPTION: Unmaps offline buffers 4720 * 4721 * PARAMETERS : none 4722 * 4723 * RETURN : int32_t type of status 4724 * NO_ERROR -- success 4725 * none-zero failure code 4726 *==========================================================================*/ 4727int32_t QCamera3ReprocessChannel::unmapOfflineBuffers(bool all) 4728{ 4729 int rc = NO_ERROR; 4730 Mutex::Autolock l(mOfflineBuffersLock); 4731 if (!mOfflineBuffers.empty()) { 4732 QCamera3Stream *stream = NULL; 4733 List<OfflineBuffer>::iterator it = mOfflineBuffers.begin(); 4734 for (; it != mOfflineBuffers.end(); it++) { 4735 stream = (*it).stream; 4736 if (NULL != stream) { 4737 rc = stream->unmapBuf((*it).type, 4738 (*it).index, 4739 -1); 4740 if (NO_ERROR != rc) { 4741 LOGE("Error during offline buffer unmap %d", 4742 rc); 4743 } 4744 LOGD("Unmapped buffer with index %d", (*it).index); 4745 } 4746 if (!all) { 4747 mOfflineBuffers.erase(it); 4748 break; 4749 } 4750 } 4751 if (all) { 4752 mOfflineBuffers.clear(); 4753 } 4754 } 4755 4756 if (!mOfflineMetaBuffers.empty()) { 4757 QCamera3Stream *stream = NULL; 4758 List<OfflineBuffer>::iterator it = mOfflineMetaBuffers.begin(); 4759 for (; it != mOfflineMetaBuffers.end(); it++) { 4760 stream = (*it).stream; 4761 if (NULL != stream) { 4762 rc = stream->unmapBuf((*it).type, 4763 (*it).index, 4764 -1); 4765 if (NO_ERROR != rc) { 4766 LOGE("Error during offline buffer unmap %d", 4767 rc); 4768 } 4769 LOGD("Unmapped meta buffer with index %d", (*it).index); 4770 } 4771 if (!all) { 4772 mOfflineMetaBuffers.erase(it); 4773 break; 4774 } 4775 } 4776 if (all) { 4777 mOfflineMetaBuffers.clear(); 4778 } 4779 } 4780 return rc; 4781} 4782 4783/*=========================================================================== 4784 * FUNCTION : bufDone 4785 * 4786 * DESCRIPTION: Return reprocess stream buffer to free buffer list. 4787 * Note that this function doesn't queue buffer back to kernel. 4788 * It's up to doReprocessOffline to do that instead. 4789 * PARAMETERS : 4790 * @recvd_frame : stream buf frame to be returned 4791 * 4792 * RETURN : int32_t type of status 4793 * NO_ERROR -- success 4794 * none-zero failure code 4795 *==========================================================================*/ 4796int32_t QCamera3ReprocessChannel::bufDone(mm_camera_super_buf_t *recvd_frame) 4797{ 4798 int rc = NO_ERROR; 4799 if (recvd_frame && recvd_frame->num_bufs == 1) { 4800 Mutex::Autolock lock(mFreeBuffersLock); 4801 uint32_t buf_idx = recvd_frame->bufs[0]->buf_idx; 4802 mFreeBufferList.push_back(buf_idx); 4803 4804 } else { 4805 LOGE("Fatal. Not supposed to be here"); 4806 rc = BAD_VALUE; 4807 } 4808 4809 return rc; 4810} 4811 4812/*=========================================================================== 4813 * FUNCTION : overrideMetadata 4814 * 4815 * DESCRIPTION: Override metadata entry such as rotation, crop, and CDS info. 4816 * 4817 * PARAMETERS : 4818 * @frame : input frame from source stream 4819 * meta_buffer: metadata buffer 4820 * @metadata : corresponding metadata 4821 * @fwk_frame : 4822 * 4823 * RETURN : int32_t type of status 4824 * NO_ERROR -- success 4825 * none-zero failure code 4826 *==========================================================================*/ 4827int32_t QCamera3ReprocessChannel::overrideMetadata(qcamera_hal3_pp_buffer_t *pp_buffer, 4828 mm_camera_buf_def_t *meta_buffer, jpeg_settings_t *jpeg_settings, 4829 qcamera_fwk_input_pp_data_t &fwk_frame) 4830{ 4831 int32_t rc = NO_ERROR; 4832 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData; 4833 if ((NULL == meta_buffer) || (NULL == pp_buffer) || (NULL == pp_buffer->input) || 4834 (NULL == hal_obj)) { 4835 return BAD_VALUE; 4836 } 4837 4838 metadata_buffer_t *meta = (metadata_buffer_t *)meta_buffer->buffer; 4839 mm_camera_super_buf_t *frame = pp_buffer->input; 4840 if (NULL == meta) { 4841 return BAD_VALUE; 4842 } 4843 4844 for (uint32_t i = 0; i < frame->num_bufs; i++) { 4845 QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id); 4846 QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id); 4847 4848 if (pStream != NULL && pSrcStream != NULL) { 4849 if (jpeg_settings) { 4850 // Find rotation info for reprocess stream 4851 cam_rotation_info_t rotation_info; 4852 memset(&rotation_info, 0, sizeof(rotation_info)); 4853 if (jpeg_settings->jpeg_orientation == 0) { 4854 rotation_info.rotation = ROTATE_0; 4855 } else if (jpeg_settings->jpeg_orientation == 90) { 4856 rotation_info.rotation = ROTATE_90; 4857 } else if (jpeg_settings->jpeg_orientation == 180) { 4858 rotation_info.rotation = ROTATE_180; 4859 } else if (jpeg_settings->jpeg_orientation == 270) { 4860 rotation_info.rotation = ROTATE_270; 4861 } 4862 4863 rotation_info.device_rotation = ROTATE_0; 4864 rotation_info.streamId = mStreams[0]->getMyServerID(); 4865 ADD_SET_PARAM_ENTRY_TO_BATCH(meta, CAM_INTF_PARM_ROTATION, rotation_info); 4866 } 4867 4868 // Find and insert crop info for reprocess stream 4869 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, meta) { 4870 if (MAX_NUM_STREAMS > crop_data->num_of_streams) { 4871 for (int j = 0; j < crop_data->num_of_streams; j++) { 4872 if (crop_data->crop_info[j].stream_id == 4873 pSrcStream->getMyServerID()) { 4874 4875 // Store crop/roi information for offline reprocess 4876 // in the reprocess stream slot 4877 crop_data->crop_info[crop_data->num_of_streams].crop = 4878 crop_data->crop_info[j].crop; 4879 crop_data->crop_info[crop_data->num_of_streams].roi_map = 4880 crop_data->crop_info[j].roi_map; 4881 crop_data->crop_info[crop_data->num_of_streams].stream_id = 4882 mStreams[0]->getMyServerID(); 4883 crop_data->num_of_streams++; 4884 4885 LOGD("Reprocess stream server id: %d", 4886 mStreams[0]->getMyServerID()); 4887 LOGD("Found offline reprocess crop %dx%d %dx%d", 4888 crop_data->crop_info[j].crop.left, 4889 crop_data->crop_info[j].crop.top, 4890 crop_data->crop_info[j].crop.width, 4891 crop_data->crop_info[j].crop.height); 4892 LOGD("Found offline reprocess roimap %dx%d %dx%d", 4893 crop_data->crop_info[j].roi_map.left, 4894 crop_data->crop_info[j].roi_map.top, 4895 crop_data->crop_info[j].roi_map.width, 4896 crop_data->crop_info[j].roi_map.height); 4897 4898 break; 4899 } 4900 } 4901 } else { 4902 LOGE("No space to add reprocess stream crop/roi information"); 4903 } 4904 } 4905 4906 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, meta) { 4907 uint8_t cnt = cdsInfo->num_of_streams; 4908 if (cnt <= MAX_NUM_STREAMS) { 4909 cam_stream_cds_info_t repro_cds_info; 4910 memset(&repro_cds_info, 0, sizeof(repro_cds_info)); 4911 repro_cds_info.stream_id = mStreams[0]->getMyServerID(); 4912 for (size_t i = 0; i < cnt; i++) { 4913 if (cdsInfo->cds_info[i].stream_id == 4914 pSrcStream->getMyServerID()) { 4915 repro_cds_info.cds_enable = 4916 cdsInfo->cds_info[i].cds_enable; 4917 break; 4918 } 4919 } 4920 cdsInfo->num_of_streams = 1; 4921 cdsInfo->cds_info[0] = repro_cds_info; 4922 } else { 4923 LOGE("No space to add reprocess stream cds information"); 4924 } 4925 } 4926 4927 fwk_frame.input_buffer = *frame->bufs[i]; 4928 fwk_frame.metadata_buffer = *meta_buffer; 4929 fwk_frame.output_buffer = pp_buffer->output; 4930 break; 4931 } else { 4932 LOGE("Source/Re-process streams are invalid"); 4933 rc |= BAD_VALUE; 4934 } 4935 } 4936 4937 return rc; 4938} 4939 4940/*=========================================================================== 4941* FUNCTION : overrideFwkMetadata 4942* 4943* DESCRIPTION: Override frameworks metadata such as rotation, crop, and CDS data. 4944* 4945* PARAMETERS : 4946* @frame : input frame for reprocessing 4947* 4948* RETURN : int32_t type of status 4949* NO_ERROR -- success 4950* none-zero failure code 4951*==========================================================================*/ 4952int32_t QCamera3ReprocessChannel::overrideFwkMetadata( 4953 qcamera_fwk_input_pp_data_t *frame) 4954{ 4955 if (NULL == frame) { 4956 LOGE("Incorrect input frame"); 4957 return BAD_VALUE; 4958 } 4959 4960 if (NULL == frame->metadata_buffer.buffer) { 4961 LOGE("No metadata available"); 4962 return BAD_VALUE; 4963 } 4964 metadata_buffer_t *meta = (metadata_buffer_t *) frame->metadata_buffer.buffer; 4965 4966 // Not doing rotation at all for YUV to YUV reprocess 4967 if (mReprocessType != REPROCESS_TYPE_JPEG) { 4968 LOGD("Override rotation to 0 for channel reprocess type %d", 4969 mReprocessType); 4970 cam_rotation_info_t rotation_info; 4971 memset(&rotation_info, 0, sizeof(rotation_info)); 4972 rotation_info.rotation = ROTATE_0; 4973 rotation_info.streamId = mStreams[0]->getMyServerID(); 4974 ADD_SET_PARAM_ENTRY_TO_BATCH(meta, CAM_INTF_PARM_ROTATION, rotation_info); 4975 } 4976 4977 // Find and insert crop info for reprocess stream 4978 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, meta) { 4979 if (1 == crop_data->num_of_streams) { 4980 // Store crop/roi information for offline reprocess 4981 // in the reprocess stream slot 4982 crop_data->crop_info[crop_data->num_of_streams].crop = 4983 crop_data->crop_info[0].crop; 4984 crop_data->crop_info[crop_data->num_of_streams].roi_map = 4985 crop_data->crop_info[0].roi_map; 4986 crop_data->crop_info[crop_data->num_of_streams].stream_id = 4987 mStreams[0]->getMyServerID(); 4988 crop_data->num_of_streams++; 4989 4990 LOGD("Reprocess stream server id: %d", 4991 mStreams[0]->getMyServerID()); 4992 LOGD("Found offline reprocess crop %dx%d %dx%d", 4993 crop_data->crop_info[0].crop.left, 4994 crop_data->crop_info[0].crop.top, 4995 crop_data->crop_info[0].crop.width, 4996 crop_data->crop_info[0].crop.height); 4997 LOGD("Found offline reprocess roi map %dx%d %dx%d", 4998 crop_data->crop_info[0].roi_map.left, 4999 crop_data->crop_info[0].roi_map.top, 5000 crop_data->crop_info[0].roi_map.width, 5001 crop_data->crop_info[0].roi_map.height); 5002 } else { 5003 LOGE("Incorrect number of offline crop data entries %d", 5004 crop_data->num_of_streams); 5005 return BAD_VALUE; 5006 } 5007 } else { 5008 LOGW("Crop data not present"); 5009 } 5010 5011 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, meta) { 5012 if (1 == cdsInfo->num_of_streams) { 5013 cdsInfo->cds_info[0].stream_id = mStreams[0]->getMyServerID(); 5014 } else { 5015 LOGE("Incorrect number of offline cds info entries %d", 5016 cdsInfo->num_of_streams); 5017 return BAD_VALUE; 5018 } 5019 } 5020 5021 return NO_ERROR; 5022} 5023 5024/*=========================================================================== 5025 * FUNCTION : doReprocessOffline 5026 * 5027 * DESCRIPTION: request to do a reprocess on the frame 5028 * 5029 * PARAMETERS : 5030 * @frame : input frame for reprocessing 5031 * @isPriorityFrame: Hint that this frame is of priority, equivalent to 5032 * real time, even though it is processed in offline mechanism 5033 * 5034 * RETURN : int32_t type of status 5035 * NO_ERROR -- success 5036 * none-zero failure code 5037 *==========================================================================*/ 5038 int32_t QCamera3ReprocessChannel::doReprocessOffline( 5039 qcamera_fwk_input_pp_data_t *frame, bool isPriorityFrame) 5040{ 5041 int32_t rc = 0; 5042 int index; 5043 OfflineBuffer mappedBuffer; 5044 ATRACE_CALL(); 5045 5046 if (m_numStreams < 1) { 5047 LOGE("No reprocess stream is created"); 5048 return -1; 5049 } 5050 5051 if (NULL == frame) { 5052 LOGE("Incorrect input frame"); 5053 return BAD_VALUE; 5054 } 5055 5056 if (NULL == frame->metadata_buffer.buffer) { 5057 LOGE("No metadata available"); 5058 return BAD_VALUE; 5059 } 5060 5061 if (0 > frame->input_buffer.fd) { 5062 LOGE("No input buffer available"); 5063 return BAD_VALUE; 5064 } 5065 5066 if ((0 == m_numStreams) || (NULL == mStreams[0])) { 5067 LOGE("Reprocess stream not initialized!"); 5068 return NO_INIT; 5069 } 5070 5071 QCamera3Stream *pStream = mStreams[0]; 5072 5073 //qbuf the output buffer if it was allocated by the framework 5074 if (mReprocessType != REPROCESS_TYPE_JPEG && frame->output_buffer != NULL) { 5075 index = mGrallocMemory.getMatchBufIndex((void*)frame->output_buffer); 5076 if(index < 0) { 5077 rc = registerBuffer(frame->output_buffer, mIsType); 5078 if (NO_ERROR != rc) { 5079 LOGE("On-the-fly buffer registration failed %d", 5080 rc); 5081 return rc; 5082 } 5083 5084 index = mGrallocMemory.getMatchBufIndex((void*)frame->output_buffer); 5085 if (index < 0) { 5086 LOGE("Could not find object among registered buffers"); 5087 return DEAD_OBJECT; 5088 } 5089 } 5090 rc = mGrallocMemory.markFrameNumber(index, frame->frameNumber); 5091 if(rc != NO_ERROR) { 5092 LOGE("Failed to mark frame#:%d, index:%d",frame->frameNumber,index); 5093 return rc; 5094 } 5095 if(!m_bIsActive) { 5096 rc = start(); 5097 if (NO_ERROR != rc) { 5098 return rc; 5099 } 5100 } else { 5101 rc = pStream->bufDone(index); 5102 if(rc != NO_ERROR) { 5103 LOGE("Failed to Q new buffer to stream %d", rc); 5104 mGrallocMemory.markFrameNumber(index, -1); 5105 return rc; 5106 } 5107 } 5108 5109 } else if (mReprocessType == REPROCESS_TYPE_JPEG) { 5110 Mutex::Autolock lock(mFreeBuffersLock); 5111 uint32_t bufIdx; 5112 if (mFreeBufferList.empty()) { 5113 rc = mMemory->allocateOne(mFrameLen); 5114 if (rc < 0) { 5115 LOGE("Failed allocating heap buffer. Fatal"); 5116 return BAD_VALUE; 5117 } else { 5118 bufIdx = (uint32_t)rc; 5119 } 5120 } else { 5121 bufIdx = *(mFreeBufferList.begin()); 5122 mFreeBufferList.erase(mFreeBufferList.begin()); 5123 } 5124 5125 mMemory->markFrameNumber(bufIdx, frame->frameNumber); 5126 rc = pStream->bufDone(bufIdx); 5127 if (rc != NO_ERROR) { 5128 LOGE("Failed to queue new buffer to stream"); 5129 return rc; 5130 } 5131 } 5132 5133 int32_t max_idx = (int32_t) (mNumBuffers - 1); 5134 //loop back the indices if max burst count reached 5135 if (mOfflineBuffersIndex == max_idx) { 5136 mOfflineBuffersIndex = -1; 5137 } 5138 uint32_t buf_idx = (uint32_t)(mOfflineBuffersIndex + 1); 5139 5140 //Do cache ops before sending for reprocess 5141 if (mMemory != NULL) { 5142 mMemory->cleanInvalidateCache(buf_idx); 5143 } 5144 5145 rc = pStream->mapBuf( 5146 CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, 5147 buf_idx, -1, 5148 frame->input_buffer.fd, frame->input_buffer.buffer, 5149 frame->input_buffer.frame_len); 5150 if (NO_ERROR == rc) { 5151 Mutex::Autolock l(mOfflineBuffersLock); 5152 mappedBuffer.index = buf_idx; 5153 mappedBuffer.stream = pStream; 5154 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF; 5155 mOfflineBuffers.push_back(mappedBuffer); 5156 mOfflineBuffersIndex = (int32_t)buf_idx; 5157 LOGD("Mapped buffer with index %d", mOfflineBuffersIndex); 5158 } 5159 5160 max_idx = (int32_t) ((mNumBuffers * 2) - 1); 5161 //loop back the indices if max burst count reached 5162 if (mOfflineMetaIndex == max_idx) { 5163 mOfflineMetaIndex = (int32_t) (mNumBuffers - 1); 5164 } 5165 uint32_t meta_buf_idx = (uint32_t)(mOfflineMetaIndex + 1); 5166 rc |= pStream->mapBuf( 5167 CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF, 5168 meta_buf_idx, -1, 5169 frame->metadata_buffer.fd, frame->metadata_buffer.buffer, 5170 frame->metadata_buffer.frame_len); 5171 if (NO_ERROR == rc) { 5172 Mutex::Autolock l(mOfflineBuffersLock); 5173 mappedBuffer.index = meta_buf_idx; 5174 mappedBuffer.stream = pStream; 5175 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF; 5176 mOfflineMetaBuffers.push_back(mappedBuffer); 5177 mOfflineMetaIndex = (int32_t)meta_buf_idx; 5178 LOGD("Mapped meta buffer with index %d", mOfflineMetaIndex); 5179 } 5180 5181 if (rc == NO_ERROR) { 5182 cam_stream_parm_buffer_t param; 5183 uint32_t numPendingPriorityFrames = 0; 5184 5185 if(isPriorityFrame && (mReprocessType != REPROCESS_TYPE_JPEG)) { 5186 Mutex::Autolock lock(mPriorityFramesLock); 5187 /* read the length before pushing the frame number to check if 5188 * vector is empty */ 5189 numPendingPriorityFrames = mPriorityFrames.size(); 5190 mPriorityFrames.push(frame->frameNumber); 5191 } 5192 5193 if(isPriorityFrame && !numPendingPriorityFrames && 5194 (mReprocessType != REPROCESS_TYPE_JPEG)) { 5195 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t)); 5196 param.type = CAM_STREAM_PARAM_TYPE_REQUEST_OPS_MODE; 5197 param.perf_mode = CAM_PERF_HIGH_PERFORMANCE; 5198 rc = pStream->setParameter(param); 5199 if (rc != NO_ERROR) { 5200 LOGE("%s: setParameter for CAM_PERF_HIGH_PERFORMANCE failed", 5201 __func__); 5202 } 5203 { 5204 Mutex::Autolock lock(mPriorityFramesLock); 5205 mReprocessPerfMode = true; 5206 } 5207 } 5208 5209 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t)); 5210 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS; 5211 param.reprocess.buf_index = buf_idx; 5212 param.reprocess.frame_idx = frame->input_buffer.frame_idx; 5213 param.reprocess.meta_present = 1; 5214 param.reprocess.meta_buf_index = meta_buf_idx; 5215 5216 LOGI("Offline reprocessing id = %d buf Id = %d meta index = %d", 5217 param.reprocess.frame_idx, param.reprocess.buf_index, 5218 param.reprocess.meta_buf_index); 5219 rc = pStream->setParameter(param); 5220 if (rc != NO_ERROR) { 5221 LOGE("stream setParameter for reprocess failed"); 5222 resetToCamPerfNormal(frame->frameNumber); 5223 } 5224 } else { 5225 LOGE("Input buffer memory map failed: %d", rc); 5226 } 5227 5228 return rc; 5229} 5230 5231/*=========================================================================== 5232 * FUNCTION : doReprocess 5233 * 5234 * DESCRIPTION: request to do a reprocess on the frame 5235 * 5236 * PARAMETERS : 5237 * @buf_fd : fd to the input buffer that needs reprocess 5238 * @buffer : Buffer ptr 5239 * @buf_lenght : length of the input buffer 5240 * @ret_val : result of reprocess. 5241 * Example: Could be faceID in case of register face image. 5242 * @meta_frame : metadata frame. 5243 * 5244 * RETURN : int32_t type of status 5245 * NO_ERROR -- success 5246 * none-zero failure code 5247 *==========================================================================*/ 5248int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd, void *buffer, size_t buf_length, 5249 int32_t &ret_val, mm_camera_super_buf_t *meta_frame) 5250{ 5251 int32_t rc = 0; 5252 if (m_numStreams < 1) { 5253 LOGE("No reprocess stream is created"); 5254 return -1; 5255 } 5256 if (meta_frame == NULL) { 5257 LOGE("Did not get corresponding metadata in time"); 5258 return -1; 5259 } 5260 5261 uint8_t buf_idx = 0; 5262 for (uint32_t i = 0; i < m_numStreams; i++) { 5263 rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, 5264 buf_idx, -1, 5265 buf_fd, buffer, buf_length); 5266 5267 //Do cache ops before sending for reprocess 5268 if (mMemory != NULL) { 5269 mMemory->cleanInvalidateCache(buf_idx); 5270 } 5271 5272 if (rc == NO_ERROR) { 5273 cam_stream_parm_buffer_t param; 5274 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t)); 5275 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS; 5276 param.reprocess.buf_index = buf_idx; 5277 param.reprocess.meta_present = 1; 5278 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID(); 5279 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx; 5280 5281 LOGI("Online reprocessing id = %d buf Id = %d meta index = %d", 5282 param.reprocess.frame_idx, param.reprocess.buf_index, 5283 param.reprocess.meta_buf_index); 5284 rc = mStreams[i]->setParameter(param); 5285 if (rc == NO_ERROR) { 5286 ret_val = param.reprocess.ret_val; 5287 } 5288 mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, 5289 buf_idx, -1); 5290 } 5291 } 5292 return rc; 5293} 5294 5295/*=========================================================================== 5296 * FUNCTION : addReprocStreamsFromSource 5297 * 5298 * DESCRIPTION: add reprocess streams from input source channel 5299 * 5300 * PARAMETERS : 5301 * @config : pp feature configuration 5302 * @src_config : source reprocess configuration 5303 * @isType : type of image stabilization required on this stream 5304 * @pMetaChannel : ptr to metadata channel to get corresp. metadata 5305 * 5306 * 5307 * RETURN : int32_t type of status 5308 * NO_ERROR -- success 5309 * none-zero failure code 5310 *==========================================================================*/ 5311int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config, 5312 const reprocess_config_t &src_config , cam_is_type_t is_type, 5313 QCamera3Channel *pMetaChannel) 5314{ 5315 int32_t rc = 0; 5316 cam_stream_reproc_config_t reprocess_config; 5317 cam_stream_type_t streamType; 5318 5319 cam_dimension_t streamDim = src_config.output_stream_dim; 5320 5321 if (NULL != src_config.src_channel) { 5322 QCamera3Stream *pSrcStream = src_config.src_channel->getStreamByIndex(0); 5323 if (pSrcStream == NULL) { 5324 LOGE("source channel doesn't have a stream"); 5325 return BAD_VALUE; 5326 } 5327 mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle(); 5328 } 5329 5330 streamType = CAM_STREAM_TYPE_OFFLINE_PROC; 5331 reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE; 5332 5333 reprocess_config.offline.input_fmt = src_config.stream_format; 5334 reprocess_config.offline.input_dim = src_config.input_stream_dim; 5335 reprocess_config.offline.input_buf_planes.plane_info = 5336 src_config.input_stream_plane_info.plane_info; 5337 reprocess_config.offline.num_of_bufs = (uint8_t)mNumBuffers; 5338 reprocess_config.offline.input_type = src_config.stream_type; 5339 5340 reprocess_config.pp_feature_config = pp_config; 5341 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle, 5342 m_handle, 5343 m_camOps, 5344 &mPaddingInfo, 5345 (QCamera3Channel*)this, 5346 false/*mapStreamBuffers*/); 5347 if (pStream == NULL) { 5348 LOGE("No mem for Stream"); 5349 return NO_MEMORY; 5350 } 5351 5352 rc = pStream->init(streamType, src_config.stream_format, 5353 streamDim, ROTATE_0, &reprocess_config, 5354 (uint8_t)mNumBuffers, 5355 reprocess_config.pp_feature_config.feature_mask, 5356 is_type, 5357 0,/* batchSize */ 5358 QCamera3Channel::streamCbRoutine, this); 5359 5360 if (rc == 0) { 5361 mStreams[m_numStreams] = pStream; 5362 m_numStreams++; 5363 } else { 5364 LOGE("failed to create reprocess stream"); 5365 delete pStream; 5366 } 5367 5368 if (rc == NO_ERROR) { 5369 m_pSrcChannel = src_config.src_channel; 5370 m_pMetaChannel = pMetaChannel; 5371 mReprocessType = src_config.reprocess_type; 5372 LOGD("mReprocessType is %d", mReprocessType); 5373 } 5374 mm_camera_req_buf_t buf; 5375 memset(&buf, 0x0, sizeof(buf)); 5376 buf.type = MM_CAMERA_REQ_SUPER_BUF; 5377 buf.num_buf_requested = 1; 5378 if(m_camOps->request_super_buf(m_camHandle,m_handle, &buf) < 0) { 5379 LOGE("Request for super buffer failed"); 5380 } 5381 return rc; 5382} 5383 5384/* QCamera3SupportChannel methods */ 5385 5386cam_dimension_t QCamera3SupportChannel::kDim = {640, 480}; 5387 5388QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle, 5389 uint32_t channel_handle, 5390 mm_camera_ops_t *cam_ops, 5391 cam_padding_info_t *paddingInfo, 5392 cam_feature_mask_t postprocess_mask, 5393 cam_stream_type_t streamType, 5394 cam_dimension_t *dim, 5395 cam_format_t streamFormat, 5396 uint8_t hw_analysis_supported, 5397 cam_color_filter_arrangement_t color_arrangement, 5398 void *userData, uint32_t numBuffers) : 5399 QCamera3Channel(cam_handle, channel_handle, cam_ops, 5400 NULL, NULL, paddingInfo, postprocess_mask, 5401 userData, numBuffers), 5402 mMemory(NULL), 5403 mHwAnalysisSupported(hw_analysis_supported), 5404 mColorArrangement(color_arrangement) 5405{ 5406 memcpy(&mDim, dim, sizeof(cam_dimension_t)); 5407 mStreamType = streamType; 5408 mStreamFormat = streamFormat; 5409} 5410 5411QCamera3SupportChannel::~QCamera3SupportChannel() 5412{ 5413 destroy(); 5414 5415 if (mMemory) { 5416 mMemory->deallocate(); 5417 delete mMemory; 5418 mMemory = NULL; 5419 } 5420} 5421 5422int32_t QCamera3SupportChannel::initialize(cam_is_type_t isType) 5423{ 5424 int32_t rc; 5425 5426 if (mMemory || m_numStreams > 0) { 5427 LOGE("metadata channel already initialized"); 5428 return -EINVAL; 5429 } 5430 5431 mIsType = isType; 5432 // Make Analysis same as Preview format 5433 if (!mHwAnalysisSupported && mStreamType == CAM_STREAM_TYPE_ANALYSIS && 5434 mColorArrangement != CAM_FILTER_ARRANGEMENT_Y) { 5435 mStreamFormat = getStreamDefaultFormat(CAM_STREAM_TYPE_PREVIEW, 5436 mDim.width, mDim.height, m_bUBWCenable, mIsType); 5437 } 5438 5439 rc = QCamera3Channel::addStream(mStreamType, 5440 mStreamFormat, mDim, ROTATE_0, MIN_STREAMING_BUFFER_NUM, 5441 mPostProcMask, mIsType); 5442 if (rc < 0) { 5443 LOGE("addStream failed"); 5444 } 5445 return rc; 5446} 5447 5448int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/, 5449 uint32_t /*frameNumber*/, 5450 int & /*indexUsed*/) 5451{ 5452 return NO_ERROR; 5453} 5454 5455void QCamera3SupportChannel::streamCbRoutine( 5456 mm_camera_super_buf_t *super_frame, 5457 QCamera3Stream * /*stream*/) 5458{ 5459 if (super_frame == NULL || super_frame->num_bufs != 1) { 5460 LOGE("super_frame is not valid"); 5461 return; 5462 } 5463 bufDone(super_frame); 5464 free(super_frame); 5465} 5466 5467QCamera3StreamMem* QCamera3SupportChannel::getStreamBufs(uint32_t len) 5468{ 5469 int rc; 5470 mMemory = new QCamera3StreamMem(mNumBuffers); 5471 if (!mMemory) { 5472 LOGE("unable to create heap memory"); 5473 return NULL; 5474 } 5475 rc = mMemory->allocateAll(len); 5476 if (rc < 0) { 5477 LOGE("unable to allocate heap memory"); 5478 delete mMemory; 5479 mMemory = NULL; 5480 return NULL; 5481 } 5482 return mMemory; 5483} 5484 5485void QCamera3SupportChannel::putStreamBufs() 5486{ 5487 mMemory->deallocate(); 5488 delete mMemory; 5489 mMemory = NULL; 5490} 5491 5492QCamera3DepthChannel::~QCamera3DepthChannel() { 5493 unmapAllBuffers(); 5494} 5495 5496/*=========================================================================== 5497 * FUNCTION : mapBuffer 5498 * 5499 * DESCRIPTION: Maps stream depth buffer 5500 * 5501 * PARAMETERS : 5502 * @buffer : Depth buffer 5503 * @frameNumber : Frame number 5504 * 5505 * 5506 * RETURN : int32_t type of status 5507 * NO_ERROR -- success 5508 * none-zero failure code 5509 *==========================================================================*/ 5510int32_t QCamera3DepthChannel::mapBuffer(buffer_handle_t *buffer, 5511 uint32_t frameNumber) { 5512 int32_t rc = NO_ERROR; 5513 5514 int32_t index = mGrallocMem.getMatchBufIndex((void*)buffer); 5515 if (0 > index) { 5516 rc = mGrallocMem.registerBuffer(buffer, CAM_STREAM_TYPE_DEFAULT); 5517 if (NO_ERROR != rc) { 5518 LOGE("Buffer registration failed %d", rc); 5519 return rc; 5520 } 5521 5522 index = mGrallocMem.getMatchBufIndex((void*)buffer); 5523 if (index < 0) { 5524 LOGE("Could not find object among registered buffers"); 5525 return DEAD_OBJECT; 5526 } 5527 } else { 5528 LOGE("Buffer: %p is already present at index: %d!", buffer, index); 5529 return ALREADY_EXISTS; 5530 } 5531 5532 rc = mGrallocMem.markFrameNumber((uint32_t)index, frameNumber); 5533 5534 return rc; 5535} 5536 5537/*=========================================================================== 5538 * FUNCTION : populateDepthData 5539 * 5540 * DESCRIPTION: Copies the incoming depth data in the respective depth buffer 5541 * 5542 * PARAMETERS : 5543 * @data : Incoming Depth data 5544 * @frameNumber : Frame number of incoming depth data 5545 * 5546 * 5547 * RETURN : int32_t type of status 5548 * NO_ERROR -- success 5549 * none-zero failure code 5550 *==========================================================================*/ 5551int32_t QCamera3DepthChannel::populateDepthData(const cam_depth_data_t &data, 5552 uint32_t frameNumber) { 5553 if (nullptr == mStream) { 5554 LOGE("Invalid depth stream!"); 5555 return BAD_VALUE; 5556 } 5557 5558 ssize_t length = data.length; 5559 int32_t index = mGrallocMem.getBufferIndex(frameNumber); 5560 if (0 > index) { 5561 LOGE("Frame number: %u not present!"); 5562 return BAD_VALUE; 5563 } 5564 5565 void *dst = mGrallocMem.getPtr(index); 5566 if (nullptr == dst) { 5567 LOGE("Invalid mapped buffer"); 5568 return BAD_VALUE; 5569 } 5570 5571 camera3_jpeg_blob_t jpegHeader; 5572 ssize_t headerSize = sizeof jpegHeader; 5573 buffer_handle_t *blobBufferHandle = static_cast<buffer_handle_t *> 5574 (mGrallocMem.getBufferHandle(index)); 5575 ssize_t maxBlobSize; 5576 if (nullptr != blobBufferHandle) { 5577 maxBlobSize = ((private_handle_t*)(*blobBufferHandle))->width; 5578 } else { 5579 LOGE("Couldn't query buffer handle!"); 5580 return BAD_VALUE; 5581 } 5582 5583 if ((length + headerSize) > maxBlobSize) { 5584 LOGE("Depth buffer size mismatch expected: %d actual: %d", 5585 (length + headerSize), maxBlobSize); 5586 return BAD_VALUE; 5587 } 5588 5589 if (0 < length) { 5590 memcpy(dst, data.depth_data, length); 5591 } 5592 5593 memset(&jpegHeader, 0, headerSize); 5594 jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID; 5595 jpegHeader.jpeg_size = length; 5596 size_t jpeg_eof_offset = static_cast<size_t> (maxBlobSize - headerSize); 5597 uint8_t *jpegBuffer = static_cast<uint8_t *> (dst); 5598 uint8_t *jpegEOF = &jpegBuffer[jpeg_eof_offset]; 5599 memcpy(jpegEOF, &jpegHeader, headerSize); 5600 5601 return NO_ERROR; 5602} 5603 5604/*=========================================================================== 5605 * FUNCTION : getOldestFrame 5606 * 5607 * DESCRIPTION: Return oldest mapped buffer 5608 * 5609 * PARAMETERS : 5610 * @frameNumber : Sets oldest frame number if present 5611 * 5612 * 5613 * RETURN : buffer_handle_t pointer 5614 * NULL in case of error 5615 *==========================================================================*/ 5616buffer_handle_t *QCamera3DepthChannel::getOldestFrame(uint32_t &frameNumber) { 5617 uint32_t oldestIndex = UINT32_MAX; 5618 int32_t frameNumberResult = mGrallocMem.getOldestFrameNumber(oldestIndex); 5619 if (0 > frameNumberResult) { 5620 LOGD("Invalid frame number!"); 5621 return nullptr; 5622 } 5623 frameNumber = static_cast<uint32_t> (frameNumberResult); 5624 5625 buffer_handle_t *ret = static_cast<buffer_handle_t *> 5626 (mGrallocMem.getBufferHandle(oldestIndex)); 5627 if (nullptr == ret) { 5628 LOGE("Invalid buffer handle!"); 5629 return nullptr; 5630 } 5631 5632 return ret; 5633} 5634 5635/*=========================================================================== 5636 * FUNCTION : unmapBuffer 5637 * 5638 * DESCRIPTION: Unmap a single buffer 5639 * 5640 * PARAMETERS : 5641 * @frameNumber : Frame number of buffer that should get unmapped 5642 * 5643 * 5644 * RETURN : int32_t type of status 5645 * NO_ERROR -- success 5646 * none-zero failure code 5647 *==========================================================================*/ 5648int32_t QCamera3DepthChannel::unmapBuffer(uint32_t frameNumber) { 5649 int32_t index = mGrallocMem.getBufferIndex(frameNumber); 5650 if (0 > index) { 5651 LOGE("Frame number: %u not present!", frameNumber); 5652 return BAD_VALUE; 5653 } 5654 5655 return mGrallocMem.unregisterBuffer(index); 5656} 5657 5658/*=========================================================================== 5659 * FUNCTION : unmapAllBuffers 5660 * 5661 * DESCRIPTION: This will unmap all buffers 5662 * 5663 * PARAMETERS : 5664 * 5665 * RETURN : int32_t type of status 5666 * NO_ERROR -- success 5667 * none-zero failure code 5668 *==========================================================================*/ 5669int32_t QCamera3DepthChannel::unmapAllBuffers() { 5670 mGrallocMem.unregisterBuffers(); 5671 5672 return NO_ERROR; 5673} 5674 5675}; // namespace qcamera 5676