QCamera3Channel.cpp revision 24ec300b427e94714369447e316bfdcf58df9522
1/* Copyright (c) 2012-2015, The Linux Foundataion. All rights reserved. 2* 3* Redistribution and use in source and binary forms, with or without 4* modification, are permitted provided that the following conditions are 5* met: 6* * Redistributions of source code must retain the above copyright 7* notice, this list of conditions and the following disclaimer. 8* * Redistributions in binary form must reproduce the above 9* copyright notice, this list of conditions and the following 10* disclaimer in the documentation and/or other materials provided 11* with the distribution. 12* * Neither the name of The Linux Foundation nor the names of its 13* contributors may be used to endorse or promote products derived 14* from this software without specific prior written permission. 15* 16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27* 28*/ 29 30#define ATRACE_TAG ATRACE_TAG_CAMERA 31#define LOG_TAG "QCamera3Channel" 32//#define LOG_NDEBUG 0 33#include <fcntl.h> 34#include <stdlib.h> 35#include <cstdlib> 36#include <stdio.h> 37#include <string.h> 38#include <hardware/camera3.h> 39#include <system/camera_metadata.h> 40#include <gralloc_priv.h> 41#include <utils/Log.h> 42#include <utils/Errors.h> 43#include <utils/Trace.h> 44#include <cutils/properties.h> 45#include "QCamera3Channel.h" 46#include "QCamera3HWI.h" 47 48using namespace android; 49 50 51namespace qcamera { 52static const char ExifAsciiPrefix[] = 53 { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 }; // "ASCII\0\0\0" 54static const char ExifUndefinedPrefix[] = 55 { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; // "\0\0\0\0\0\0\0\0" 56 57#define EXIF_ASCII_PREFIX_SIZE 8 //(sizeof(ExifAsciiPrefix)) 58#define FOCAL_LENGTH_DECIMAL_PRECISION 100 59 60#define VIDEO_FORMAT CAM_FORMAT_YUV_420_NV12 61#define SNAPSHOT_FORMAT CAM_FORMAT_YUV_420_NV21 62#define PREVIEW_FORMAT CAM_FORMAT_YUV_420_NV21 63#define DEFAULT_FORMAT CAM_FORMAT_YUV_420_NV21 64#define CALLBACK_FORMAT CAM_FORMAT_YUV_420_NV21 65#define RAW_FORMAT CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG 66 67/*=========================================================================== 68 * FUNCTION : QCamera3Channel 69 * 70 * DESCRIPTION: constrcutor of QCamera3Channel 71 * 72 * PARAMETERS : 73 * @cam_handle : camera handle 74 * @cam_ops : ptr to camera ops table 75 * 76 * RETURN : none 77 *==========================================================================*/ 78QCamera3Channel::QCamera3Channel(uint32_t cam_handle, 79 mm_camera_ops_t *cam_ops, 80 channel_cb_routine cb_routine, 81 cam_padding_info_t *paddingInfo, 82 uint32_t postprocess_mask, 83 void *userData, uint32_t numBuffers) 84{ 85 m_camHandle = cam_handle; 86 m_camOps = cam_ops; 87 m_bIsActive = false; 88 89 m_handle = 0; 90 m_numStreams = 0; 91 memset(mStreams, 0, sizeof(mStreams)); 92 mUserData = userData; 93 94 mStreamInfoBuf = NULL; 95 mChannelCB = cb_routine; 96 mPaddingInfo = paddingInfo; 97 98 mPostProcMask = postprocess_mask; 99 100 char prop[PROPERTY_VALUE_MAX]; 101 property_get("persist.camera.yuv.dump", prop, "0"); 102 mYUVDump = (uint8_t) atoi(prop); 103 mIsType = IS_TYPE_NONE; 104 mNumBuffers = numBuffers; 105} 106 107/*=========================================================================== 108 * FUNCTION : QCamera3Channel 109 * 110 * DESCRIPTION: default constrcutor of QCamera3Channel 111 * 112 * PARAMETERS : none 113 * 114 * RETURN : none 115 *==========================================================================*/ 116QCamera3Channel::QCamera3Channel() 117{ 118 m_camHandle = 0; 119 m_camOps = NULL; 120 m_bIsActive = false; 121 122 m_handle = 0; 123 m_numStreams = 0; 124 memset(mStreams, 0, sizeof(mStreams)); 125 mUserData = NULL; 126 127 mStreamInfoBuf = NULL; 128 mChannelCB = NULL; 129 mPaddingInfo = NULL; 130 131 mPostProcMask = 0; 132} 133 134/*=========================================================================== 135 * FUNCTION : ~QCamera3Channel 136 * 137 * DESCRIPTION: destructor of QCamera3Channel 138 * 139 * PARAMETERS : none 140 * 141 * RETURN : none 142 *==========================================================================*/ 143QCamera3Channel::~QCamera3Channel() 144{ 145 if (m_bIsActive) 146 stop(); 147 148 for (uint32_t i = 0; i < m_numStreams; i++) { 149 if (mStreams[i] != NULL) { 150 delete mStreams[i]; 151 mStreams[i] = 0; 152 } 153 } 154 if (m_handle) { 155 m_camOps->delete_channel(m_camHandle, m_handle); 156 ALOGE("%s: deleting channel %d", __func__, m_handle); 157 m_handle = 0; 158 } 159 m_numStreams = 0; 160} 161 162/*=========================================================================== 163 * FUNCTION : init 164 * 165 * DESCRIPTION: initialization of channel 166 * 167 * PARAMETERS : 168 * @attr : channel bundle attribute setting 169 * @dataCB : data notify callback 170 * @userData: user data ptr 171 * 172 * RETURN : int32_t type of status 173 * NO_ERROR -- success 174 * none-zero failure code 175 *==========================================================================*/ 176int32_t QCamera3Channel::init(mm_camera_channel_attr_t *attr, 177 mm_camera_buf_notify_t dataCB) 178{ 179 m_handle = m_camOps->add_channel(m_camHandle, 180 attr, 181 dataCB, 182 this); 183 if (m_handle == 0) { 184 ALOGE("%s: Add channel failed", __func__); 185 return UNKNOWN_ERROR; 186 } 187 return NO_ERROR; 188} 189 190/*=========================================================================== 191 * FUNCTION : addStream 192 * 193 * DESCRIPTION: add a stream into channel 194 * 195 * PARAMETERS : 196 * @streamType : stream type 197 * @streamFormat : stream format 198 * @streamDim : stream dimension 199 * @minStreamBufNum : minimal buffer count for particular stream type 200 * @postprocessMask : post-proccess feature mask 201 * @isType : type of image stabilization required on the stream 202 * 203 * RETURN : int32_t type of status 204 * NO_ERROR -- success 205 * none-zero failure code 206 *==========================================================================*/ 207int32_t QCamera3Channel::addStream(cam_stream_type_t streamType, 208 cam_format_t streamFormat, 209 cam_dimension_t streamDim, 210 uint8_t minStreamBufNum, 211 uint32_t postprocessMask, 212 cam_is_type_t isType) 213{ 214 int32_t rc = NO_ERROR; 215 216 if (m_numStreams >= 1) { 217 ALOGE("%s: Only one stream per channel supported in v3 Hal", __func__); 218 return BAD_VALUE; 219 } 220 221 if (m_numStreams >= MAX_STREAM_NUM_IN_BUNDLE) { 222 ALOGE("%s: stream number (%d) exceeds max limit (%d)", 223 __func__, m_numStreams, MAX_STREAM_NUM_IN_BUNDLE); 224 return BAD_VALUE; 225 } 226 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle, 227 m_handle, 228 m_camOps, 229 mPaddingInfo, 230 this); 231 if (pStream == NULL) { 232 ALOGE("%s: No mem for Stream", __func__); 233 return NO_MEMORY; 234 } 235 236 rc = pStream->init(streamType, streamFormat, streamDim, NULL, minStreamBufNum, 237 postprocessMask, isType, streamCbRoutine, this); 238 if (rc == 0) { 239 mStreams[m_numStreams] = pStream; 240 m_numStreams++; 241 } else { 242 delete pStream; 243 } 244 return rc; 245} 246 247/*=========================================================================== 248 * FUNCTION : start 249 * 250 * DESCRIPTION: start channel, which will start all streams belong to this channel 251 * 252 * PARAMETERS : 253 * 254 * RETURN : int32_t type of status 255 * NO_ERROR -- success 256 * none-zero failure code 257 *==========================================================================*/ 258int32_t QCamera3Channel::start() 259{ 260 ATRACE_CALL(); 261 int32_t rc = NO_ERROR; 262 263 if (m_numStreams > 1) { 264 ALOGE("%s: bundle not supported", __func__); 265 } else if (m_numStreams == 0) { 266 return NO_INIT; 267 } 268 269 if(m_bIsActive) { 270 ALOGD("%s: Attempt to start active channel", __func__); 271 return rc; 272 } 273 274 for (uint32_t i = 0; i < m_numStreams; i++) { 275 if (mStreams[i] != NULL) { 276 mStreams[i]->start(); 277 } 278 } 279 rc = m_camOps->start_channel(m_camHandle, m_handle); 280 281 if (rc != NO_ERROR) { 282 for (uint32_t i = 0; i < m_numStreams; i++) { 283 if (mStreams[i] != NULL) { 284 mStreams[i]->stop(); 285 } 286 } 287 } else { 288 m_bIsActive = true; 289 } 290 291 return rc; 292} 293 294/*=========================================================================== 295 * FUNCTION : stop 296 * 297 * DESCRIPTION: stop a channel, which will stop all streams belong to this channel 298 * 299 * PARAMETERS : none 300 * 301 * RETURN : int32_t type of status 302 * NO_ERROR -- success 303 * none-zero failure code 304 *==========================================================================*/ 305int32_t QCamera3Channel::stop() 306{ 307 ATRACE_CALL(); 308 int32_t rc = NO_ERROR; 309 if(!m_bIsActive) { 310 ALOGE("%s: Attempt to stop inactive channel", __func__); 311 return rc; 312 } 313 314 for (uint32_t i = 0; i < m_numStreams; i++) { 315 if (mStreams[i] != NULL) { 316 mStreams[i]->stop(); 317 } 318 } 319 320 rc = m_camOps->stop_channel(m_camHandle, m_handle); 321 322 m_bIsActive = false; 323 return rc; 324} 325 326/*=========================================================================== 327 * FUNCTION : bufDone 328 * 329 * DESCRIPTION: return a stream buf back to kernel 330 * 331 * PARAMETERS : 332 * @recvd_frame : stream buf frame to be returned 333 * 334 * RETURN : int32_t type of status 335 * NO_ERROR -- success 336 * none-zero failure code 337 *==========================================================================*/ 338int32_t QCamera3Channel::bufDone(mm_camera_super_buf_t *recvd_frame) 339{ 340 int32_t rc = NO_ERROR; 341 for (uint32_t i = 0; i < recvd_frame->num_bufs; i++) { 342 if (recvd_frame->bufs[i] != NULL) { 343 for (uint32_t j = 0; j < m_numStreams; j++) { 344 if (mStreams[j] != NULL && 345 mStreams[j]->getMyHandle() == recvd_frame->bufs[i]->stream_id) { 346 rc = mStreams[j]->bufDone(recvd_frame->bufs[i]->buf_idx); 347 break; // break loop j 348 } 349 } 350 } 351 } 352 353 return rc; 354} 355 356/*=========================================================================== 357 * FUNCTION : getStreamTypeMask 358 * 359 * DESCRIPTION: Get bit mask of all stream types in this channel 360 * 361 * PARAMETERS : None 362 * 363 * RETURN : Bit mask of all stream types in this channel 364 *==========================================================================*/ 365uint32_t QCamera3Channel::getStreamTypeMask() 366{ 367 uint32_t mask = 0; 368 for (uint32_t i = 0; i < m_numStreams; i++) { 369 mask |= (1U << mStreams[i]->getMyType()); 370 } 371 return mask; 372} 373 374/*=========================================================================== 375 * FUNCTION : getStreamID 376 * 377 * DESCRIPTION: Get StreamID of requested stream type 378 * 379 * PARAMETERS : streamMask 380 * 381 * RETURN : Stream ID 382 *==========================================================================*/ 383uint32_t QCamera3Channel::getStreamID(uint32_t streamMask) 384{ 385 uint32_t streamID = 0; 386 for (uint32_t i = 0; i < m_numStreams; i++) { 387 if (streamMask == (uint32_t )(0x1 << mStreams[i]->getMyType())) { 388 streamID = mStreams[i]->getMyServerID(); 389 break; 390 } 391 } 392 return streamID; 393} 394 395/*=========================================================================== 396 * FUNCTION : getStreamByHandle 397 * 398 * DESCRIPTION: return stream object by stream handle 399 * 400 * PARAMETERS : 401 * @streamHandle : stream handle 402 * 403 * RETURN : stream object. NULL if not found 404 *==========================================================================*/ 405QCamera3Stream *QCamera3Channel::getStreamByHandle(uint32_t streamHandle) 406{ 407 for (uint32_t i = 0; i < m_numStreams; i++) { 408 if (mStreams[i] != NULL && mStreams[i]->getMyHandle() == streamHandle) { 409 return mStreams[i]; 410 } 411 } 412 return NULL; 413} 414 415/*=========================================================================== 416 * FUNCTION : getStreamByIndex 417 * 418 * DESCRIPTION: return stream object by index 419 * 420 * PARAMETERS : 421 * @streamHandle : stream handle 422 * 423 * RETURN : stream object. NULL if not found 424 *==========================================================================*/ 425QCamera3Stream *QCamera3Channel::getStreamByIndex(uint32_t index) 426{ 427 if (index < m_numStreams) { 428 return mStreams[index]; 429 } 430 return NULL; 431} 432 433/*=========================================================================== 434 * FUNCTION : streamCbRoutine 435 * 436 * DESCRIPTION: callback routine for stream 437 * 438 * PARAMETERS : 439 * @streamHandle : stream handle 440 * 441 * RETURN : stream object. NULL if not found 442 *==========================================================================*/ 443void QCamera3Channel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 444 QCamera3Stream *stream, void *userdata) 445{ 446 QCamera3Channel *channel = (QCamera3Channel *)userdata; 447 if (channel == NULL) { 448 ALOGE("%s: invalid channel pointer", __func__); 449 return; 450 } 451 channel->streamCbRoutine(super_frame, stream); 452} 453 454/*=========================================================================== 455 * FUNCTION : dumpYUV 456 * 457 * DESCRIPTION: function to dump the YUV data from ISP/pproc 458 * 459 * PARAMETERS : 460 * @frame : frame to be dumped 461 * @dim : dimension of the stream 462 * @offset : offset of the data 463 * @name : 1 if it is ISP output/pproc input, 2 if it is pproc output 464 * 465 * RETURN : 466 *==========================================================================*/ 467void QCamera3Channel::dumpYUV(mm_camera_buf_def_t *frame, cam_dimension_t dim, 468 cam_frame_len_offset_t offset, uint8_t name) 469{ 470 char buf[FILENAME_MAX]; 471 memset(buf, 0, sizeof(buf)); 472 static int counter = 0; 473 /* Note that the image dimension will be the unrotated stream dimension. 474 * If you feel that the image would have been rotated during reprocess 475 * then swap the dimensions while opening the file 476 * */ 477 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"%d_%d_%d_%dx%d.yuv", 478 name, counter, frame->frame_idx, dim.width, dim.height); 479 counter++; 480 int file_fd = open(buf, O_RDWR | O_CREAT, 0644); 481 if (file_fd >= 0) { 482 ssize_t written_len = write(file_fd, frame->buffer, offset.frame_len); 483 ALOGE("%s: written number of bytes %d", __func__, written_len); 484 close(file_fd); 485 } else { 486 ALOGE("%s: failed to open file to dump image", __func__); 487 } 488} 489 490/*=========================================================================== 491 * FUNCTION : QCamera3RegularChannel 492 * 493 * DESCRIPTION: constructor of QCamera3RegularChannel 494 * 495 * PARAMETERS : 496 * @cam_handle : camera handle 497 * @cam_ops : ptr to camera ops table 498 * @cb_routine : callback routine to frame aggregator 499 * @stream : camera3_stream_t structure 500 * @stream_type: Channel stream type 501 * @postprocess_mask: feature mask for postprocessing 502 * @numBuffers : number of max dequeued buffers 503 * 504 * RETURN : none 505 *==========================================================================*/ 506QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle, 507 mm_camera_ops_t *cam_ops, 508 channel_cb_routine cb_routine, 509 cam_padding_info_t *paddingInfo, 510 void *userData, 511 camera3_stream_t *stream, 512 cam_stream_type_t stream_type, 513 uint32_t postprocess_mask, 514 uint32_t numBuffers) : 515 QCamera3Channel(cam_handle, cam_ops, cb_routine, 516 paddingInfo, postprocess_mask, userData, 517 numBuffers), 518 mCamera3Stream(stream), 519 mNumBufs(0), 520 mStreamType(stream_type), 521 mWidth(stream->width), 522 mHeight(stream->height) 523{ 524} 525 526/*=========================================================================== 527 * FUNCTION : QCamera3RegularChannel 528 * 529 * DESCRIPTION: constructor of QCamera3RegularChannel 530 * 531 * PARAMETERS : 532 * @cam_handle : camera handle 533 * @cam_ops : ptr to camera ops table 534 * @cb_routine : callback routine to frame aggregator 535 * @padding_info: padding information for stream 536 * @userData : pointer to hal object 537 * @stream : camera3_stream_t structure 538 * @stream_type : Channel stream type 539 * @postprocess_mask: bit mask for postprocessing 540 * @width : width overriding camera3_stream_t::width 541 * @height : height overriding camera3_stream_t::height 542 * @numBuffers : number of maximum dequeued buffers` 543 * 544 * RETURN : none 545 *==========================================================================*/ 546QCamera3RegularChannel::QCamera3RegularChannel(uint32_t cam_handle, 547 mm_camera_ops_t *cam_ops, 548 channel_cb_routine cb_routine, 549 cam_padding_info_t *paddingInfo, 550 void *userData, 551 camera3_stream_t *stream, 552 cam_stream_type_t stream_type, 553 uint32_t postprocess_mask, 554 uint32_t width, uint32_t height, 555 uint32_t numBuffers) : 556 QCamera3Channel(cam_handle, cam_ops, cb_routine, 557 paddingInfo, postprocess_mask, userData, 558 numBuffers), 559 mCamera3Stream(stream), 560 mNumBufs(0), 561 mStreamType(stream_type), 562 mWidth(width), 563 mHeight(height) 564{ 565} 566 567/*=========================================================================== 568 * FUNCTION : ~QCamera3RegularChannel 569 * 570 * DESCRIPTION: destructor of QCamera3RegularChannel 571 * 572 * PARAMETERS : none 573 * 574 * RETURN : none 575 *==========================================================================*/ 576QCamera3RegularChannel::~QCamera3RegularChannel() 577{ 578} 579 580/*=========================================================================== 581 * FUNCTION : initialize 582 * 583 * DESCRIPTION: Initialize and add camera channel & stream 584 * 585 * PARAMETERS : 586 * @isType : type of image stabilization required on this stream 587 * 588 * RETURN : int32_t type of status 589 * NO_ERROR -- success 590 * none-zero failure code 591 *==========================================================================*/ 592 593int32_t QCamera3RawChannel::initialize(cam_is_type_t isType) 594{ 595 return QCamera3RegularChannel::initialize(isType); 596} 597int32_t QCamera3RegularChannel::initialize(cam_is_type_t isType) 598{ 599 ATRACE_CALL(); 600 int32_t rc = NO_ERROR; 601 cam_format_t streamFormat; 602 cam_dimension_t streamDim; 603 604 if (NULL == mCamera3Stream) { 605 ALOGE("%s: Camera stream uninitialized", __func__); 606 return NO_INIT; 607 } 608 609 if (1 <= m_numStreams) { 610 // Only one stream per channel supported in v3 Hal 611 return NO_ERROR; 612 } 613 614 rc = init(NULL, NULL); 615 if (rc < 0) { 616 ALOGE("%s: init failed", __func__); 617 return rc; 618 } 619 620 mNumBufs = CAM_MAX_NUM_BUFS_PER_STREAM; 621 mIsType = isType; 622 623 if (mCamera3Stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) { 624 if (mStreamType == CAM_STREAM_TYPE_VIDEO) { 625 streamFormat = VIDEO_FORMAT; 626 } else if (mStreamType == CAM_STREAM_TYPE_PREVIEW) { 627 streamFormat = PREVIEW_FORMAT; 628 } else { 629 //TODO: Add a new flag in libgralloc for ZSL buffers, and its size needs 630 // to be properly aligned and padded. 631 streamFormat = DEFAULT_FORMAT; 632 } 633 } else if(mCamera3Stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) { 634 streamFormat = CALLBACK_FORMAT; 635 } else if (mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE || 636 mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW10 || 637 mCamera3Stream->format == HAL_PIXEL_FORMAT_RAW16) { 638 // Bayer pattern doesn't matter here. 639 // All CAMIF raw format uses 10bit. 640 streamFormat = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG; 641 } else { 642 //TODO: Fail for other types of streams for now 643 ALOGE("%s: format is not IMPLEMENTATION_DEFINED or flexible", __func__); 644 return -EINVAL; 645 } 646 647 streamDim.width = (int32_t)mWidth; 648 streamDim.height = (int32_t)mHeight; 649 650 rc = QCamera3Channel::addStream(mStreamType, 651 streamFormat, 652 streamDim, 653 mNumBufs, 654 mPostProcMask, 655 mIsType); 656 657 return rc; 658} 659 660/*=========================================================================== 661* FUNCTION : start 662* 663* DESCRIPTION: start a regular channel 664* 665* PARAMETERS : 666* 667* RETURN : int32_t type of status 668* NO_ERROR -- success 669* none-zero failure code 670*==========================================================================*/ 671int32_t QCamera3RegularChannel::start() 672{ 673 ATRACE_CALL(); 674 int32_t rc = NO_ERROR; 675 676 if (0 < mMemory.getCnt()) { 677 rc = QCamera3Channel::start(); 678 } 679 return rc; 680} 681 682/*=========================================================================== 683 * FUNCTION : request 684 * 685 * DESCRIPTION: process a request from camera service. Stream on if ncessary. 686 * 687 * PARAMETERS : 688 * @buffer : buffer to be filled for this request 689 * 690 * RETURN : 0 on a success start of capture 691 * -EINVAL on invalid input 692 * -ENODEV on serious error 693 *==========================================================================*/ 694int32_t QCamera3RegularChannel::request(buffer_handle_t *buffer, uint32_t frameNumber) 695{ 696 ATRACE_CALL(); 697 //FIX ME: Return buffer back in case of failures below. 698 699 int32_t rc = NO_ERROR; 700 int index; 701 702 if (NULL == buffer) { 703 ALOGE("%s: Invalid buffer in channel request", __func__); 704 return BAD_VALUE; 705 } 706 707 if(!m_bIsActive) { 708 rc = registerBuffer(buffer, mIsType); 709 if (NO_ERROR != rc) { 710 ALOGE("%s: On-the-fly buffer registration failed %d", 711 __func__, rc); 712 return rc; 713 } 714 715 rc = start(); 716 if (NO_ERROR != rc) { 717 return rc; 718 } 719 } else { 720 CDBG("%s: Request on an existing stream",__func__); 721 } 722 723 index = mMemory.getMatchBufIndex((void*)buffer); 724 if(index < 0) { 725 rc = registerBuffer(buffer, mIsType); 726 if (NO_ERROR != rc) { 727 ALOGE("%s: On-the-fly buffer registration failed %d", 728 __func__, rc); 729 return rc; 730 } 731 732 index = mMemory.getMatchBufIndex((void*)buffer); 733 if (index < 0) { 734 ALOGE("%s: Could not find object among registered buffers", 735 __func__); 736 return DEAD_OBJECT; 737 } 738 } 739 740 rc = mStreams[0]->bufDone((uint32_t)index); 741 if(rc != NO_ERROR) { 742 ALOGE("%s: Failed to Q new buffer to stream",__func__); 743 return rc; 744 } 745 746 rc = mMemory.markFrameNumber((uint32_t)index, frameNumber); 747 return rc; 748} 749 750/*=========================================================================== 751 * FUNCTION : registerBuffer 752 * 753 * DESCRIPTION: register streaming buffer to the channel object 754 * 755 * PARAMETERS : 756 * @buffer : buffer to be registered 757 * @isType : type of image stabilization required on this stream 758 * 759 * RETURN : int32_t type of status 760 * NO_ERROR -- success 761 * none-zero failure code 762 *==========================================================================*/ 763int32_t QCamera3RegularChannel::registerBuffer(buffer_handle_t *buffer, cam_is_type_t isType) 764{ 765 ATRACE_CALL(); 766 int rc = 0; 767 mIsType = isType; 768 cam_stream_type_t streamType; 769 770 if (0 == m_numStreams) { 771 rc = initialize(mIsType); 772 if (rc != NO_ERROR) { 773 ALOGE("%s: Couldn't initialize camera stream %d", 774 __func__, rc); 775 return rc; 776 } 777 } 778 779 if (((uint32_t)mMemory.getCnt() + 1) > mNumBufs) { 780 ALOGE("%s: Trying to register more buffers than initially requested", 781 __func__); 782 return BAD_VALUE; 783 } 784 785 streamType = mStreams[0]->getMyType(); 786 rc = mMemory.registerBuffer(buffer, streamType); 787 if (ALREADY_EXISTS == rc) { 788 return NO_ERROR; 789 } else if (NO_ERROR != rc) { 790 ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc); 791 return rc; 792 } 793 794 return rc; 795} 796 797void QCamera3RegularChannel::streamCbRoutine( 798 mm_camera_super_buf_t *super_frame, 799 QCamera3Stream *stream) 800{ 801 ATRACE_CALL(); 802 //FIXME Q Buf back in case of error? 803 uint8_t frameIndex; 804 buffer_handle_t *resultBuffer; 805 int32_t resultFrameNumber; 806 camera3_stream_buffer_t result; 807 808 if (NULL == stream) { 809 ALOGE("%s: Invalid stream", __func__); 810 return; 811 } 812 813 if(!super_frame) { 814 ALOGE("%s: Invalid Super buffer",__func__); 815 return; 816 } 817 818 if(super_frame->num_bufs != 1) { 819 ALOGE("%s: Multiple streams are not supported",__func__); 820 return; 821 } 822 if(super_frame->bufs[0] == NULL ) { 823 ALOGE("%s: Error, Super buffer frame does not contain valid buffer", 824 __func__); 825 return; 826 } 827 828 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx; 829 if(frameIndex >= mNumBufs) { 830 ALOGE("%s: Error, Invalid index for buffer",__func__); 831 stream->bufDone(frameIndex); 832 return; 833 } 834 835 ////Use below data to issue framework callback 836 resultBuffer = (buffer_handle_t *)mMemory.getBufferHandle(frameIndex); 837 resultFrameNumber = mMemory.getFrameNumber(frameIndex); 838 839 result.stream = mCamera3Stream; 840 result.buffer = resultBuffer; 841 result.status = CAMERA3_BUFFER_STATUS_OK; 842 result.acquire_fence = -1; 843 result.release_fence = -1; 844 int32_t rc = stream->bufRelease(frameIndex); 845 if (NO_ERROR != rc) { 846 ALOGE("%s: Error %d releasing stream buffer %d", 847 __func__, rc, frameIndex); 848 } 849 850 rc = mMemory.unregisterBuffer(frameIndex); 851 if (NO_ERROR != rc) { 852 ALOGE("%s: Error %d unregistering stream buffer %d", 853 __func__, rc, frameIndex); 854 } 855 856 if (0 <= resultFrameNumber) { 857 mChannelCB(NULL, &result, (uint32_t)resultFrameNumber, mUserData); 858 } else { 859 ALOGE("%s: Bad brame number", __func__); 860 } 861 free(super_frame); 862 return; 863} 864 865QCamera3Memory* QCamera3RegularChannel::getStreamBufs(uint32_t /*len*/) 866{ 867 return &mMemory; 868} 869 870void QCamera3RegularChannel::putStreamBufs() 871{ 872 mMemory.unregisterBuffers(); 873} 874 875QCamera3MetadataChannel::QCamera3MetadataChannel(uint32_t cam_handle, 876 mm_camera_ops_t *cam_ops, 877 channel_cb_routine cb_routine, 878 cam_padding_info_t *paddingInfo, 879 uint32_t postprocess_mask, 880 void *userData, uint32_t numBuffers) : 881 QCamera3Channel(cam_handle, cam_ops, 882 cb_routine, paddingInfo, postprocess_mask, 883 userData, numBuffers), 884 mMemory(NULL) 885{ 886} 887 888QCamera3MetadataChannel::~QCamera3MetadataChannel() 889{ 890 if (m_bIsActive) 891 stop(); 892 893 if (mMemory) { 894 mMemory->deallocate(); 895 delete mMemory; 896 mMemory = NULL; 897 } 898} 899 900int32_t QCamera3MetadataChannel::initialize(cam_is_type_t isType) 901{ 902 ATRACE_CALL(); 903 int32_t rc; 904 cam_dimension_t streamDim; 905 906 if (mMemory || m_numStreams > 0) { 907 ALOGE("%s: metadata channel already initialized", __func__); 908 return -EINVAL; 909 } 910 911 rc = init(NULL, NULL); 912 if (rc < 0) { 913 ALOGE("%s: init failed", __func__); 914 return rc; 915 } 916 917 streamDim.width = (int32_t)sizeof(metadata_buffer_t), 918 streamDim.height = 1; 919 920 mIsType = isType; 921 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_METADATA, CAM_FORMAT_MAX, 922 streamDim, (uint8_t)mNumBuffers, mPostProcMask, mIsType); 923 if (rc < 0) { 924 ALOGE("%s: addStream failed", __func__); 925 } 926 return rc; 927} 928 929int32_t QCamera3MetadataChannel::request(buffer_handle_t * /*buffer*/, 930 uint32_t /*frameNumber*/) 931{ 932 if (!m_bIsActive) { 933 return start(); 934 } 935 else 936 return 0; 937} 938 939void QCamera3MetadataChannel::streamCbRoutine( 940 mm_camera_super_buf_t *super_frame, 941 QCamera3Stream * /*stream*/) 942{ 943 ATRACE_CALL(); 944 uint32_t requestNumber = 0; 945 if (super_frame == NULL || super_frame->num_bufs != 1) { 946 ALOGE("%s: super_frame is not valid", __func__); 947 return; 948 } 949 mChannelCB(super_frame, NULL, requestNumber, mUserData); 950} 951 952QCamera3Memory* QCamera3MetadataChannel::getStreamBufs(uint32_t len) 953{ 954 int rc; 955 if (len < sizeof(metadata_buffer_t)) { 956 ALOGE("%s: Metadata buffer size less than structure %d vs %d", 957 __func__, 958 len, 959 sizeof(metadata_buffer_t)); 960 return NULL; 961 } 962 mMemory = new QCamera3HeapMemory(); 963 if (!mMemory) { 964 ALOGE("%s: unable to create metadata memory", __func__); 965 return NULL; 966 } 967 rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true); 968 if (rc < 0) { 969 ALOGE("%s: unable to allocate metadata memory", __func__); 970 delete mMemory; 971 mMemory = NULL; 972 return NULL; 973 } 974 clear_metadata_buffer((metadata_buffer_t*)mMemory->getPtr(0)); 975 return mMemory; 976} 977 978void QCamera3MetadataChannel::putStreamBufs() 979{ 980 mMemory->deallocate(); 981 delete mMemory; 982 mMemory = NULL; 983} 984/*************************************************************************************/ 985// RAW Channel related functions 986QCamera3RawChannel::QCamera3RawChannel(uint32_t cam_handle, 987 mm_camera_ops_t *cam_ops, 988 channel_cb_routine cb_routine, 989 cam_padding_info_t *paddingInfo, 990 void *userData, 991 camera3_stream_t *stream, 992 uint32_t postprocess_mask, 993 bool raw_16, uint32_t numBuffers) : 994 QCamera3RegularChannel(cam_handle, cam_ops, 995 cb_routine, paddingInfo, userData, stream, 996 CAM_STREAM_TYPE_RAW, postprocess_mask, numBuffers), 997 mIsRaw16(raw_16) 998{ 999 char prop[PROPERTY_VALUE_MAX]; 1000 property_get("persist.camera.raw.debug.dump", prop, "0"); 1001 mRawDump = atoi(prop); 1002} 1003 1004QCamera3RawChannel::~QCamera3RawChannel() 1005{ 1006} 1007 1008void QCamera3RawChannel::streamCbRoutine( 1009 mm_camera_super_buf_t *super_frame, 1010 QCamera3Stream * stream) 1011{ 1012 ATRACE_CALL(); 1013 /* Move this back down once verified */ 1014 if (mRawDump) 1015 dumpRawSnapshot(super_frame->bufs[0]); 1016 1017 if (mIsRaw16) { 1018 if (RAW_FORMAT == CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG) 1019 convertMipiToRaw16(super_frame->bufs[0]); 1020 else 1021 convertLegacyToRaw16(super_frame->bufs[0]); 1022 } 1023 1024 //Make sure cache coherence because extra processing is done 1025 mMemory.cleanInvalidateCache(super_frame->bufs[0]->buf_idx); 1026 1027 QCamera3RegularChannel::streamCbRoutine(super_frame, stream); 1028 return; 1029} 1030 1031void QCamera3RawChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame) 1032{ 1033 QCamera3Stream *stream = getStreamByIndex(0); 1034 if (stream != NULL) { 1035 char buf[FILENAME_MAX]; 1036 memset(buf, 0, sizeof(buf)); 1037 cam_dimension_t dim; 1038 memset(&dim, 0, sizeof(dim)); 1039 stream->getFrameDimension(dim); 1040 1041 cam_frame_len_offset_t offset; 1042 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 1043 stream->getFrameOffset(offset); 1044 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"r_%d_%dx%d.raw", 1045 frame->frame_idx, offset.mp[0].stride, offset.mp[0].scanline); 1046 1047 int file_fd = open(buf, O_RDWR| O_CREAT, 0644); 1048 if (file_fd >= 0) { 1049 ssize_t written_len = write(file_fd, frame->buffer, frame->frame_len); 1050 ALOGE("%s: written number of bytes %zd", __func__, written_len); 1051 close(file_fd); 1052 } else { 1053 ALOGE("%s: failed to open file to dump image", __func__); 1054 } 1055 } else { 1056 ALOGE("%s: Could not find stream", __func__); 1057 } 1058 1059} 1060 1061void QCamera3RawChannel::convertLegacyToRaw16(mm_camera_buf_def_t *frame) 1062{ 1063 // Convert image buffer from Opaque raw format to RAW16 format 1064 // 10bit Opaque raw is stored in the format of: 1065 // 0000 - p5 - p4 - p3 - p2 - p1 - p0 1066 // where p0 to p5 are 6 pixels (each is 10bit)_and most significant 1067 // 4 bits are 0s. Each 64bit word contains 6 pixels. 1068 1069 QCamera3Stream *stream = getStreamByIndex(0); 1070 if (stream != NULL) { 1071 cam_dimension_t dim; 1072 memset(&dim, 0, sizeof(dim)); 1073 stream->getFrameDimension(dim); 1074 1075 cam_frame_len_offset_t offset; 1076 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 1077 stream->getFrameOffset(offset); 1078 1079 uint32_t raw16_stride = (uint32_t)PAD_TO_SIZE(dim.width, 32); 1080 uint16_t* raw16_buffer = (uint16_t *)frame->buffer; 1081 1082 // In-place format conversion. 1083 // Raw16 format always occupy more memory than opaque raw10. 1084 // Convert to Raw16 by iterating through all pixels from bottom-right 1085 // to top-left of the image. 1086 // One special notes: 1087 // 1. Cross-platform raw16's stride is 16 pixels. 1088 // 2. Opaque raw10's stride is 6 pixels, and aligned to 16 bytes. 1089 for (int32_t ys = dim.height - 1; ys >= 0; ys--) { 1090 uint32_t y = (uint32_t)ys; 1091 uint64_t* row_start = (uint64_t *)frame->buffer + 1092 y * (uint32_t)offset.mp[0].stride_in_bytes / 8; 1093 for (int32_t xs = dim.width - 1; xs >= 0; xs--) { 1094 uint32_t x = (uint32_t)xs; 1095 uint16_t raw16_pixel = 0x3FF & (row_start[x/6] >> (10*(x%6))); 1096 raw16_buffer[y*raw16_stride+x] = raw16_pixel; 1097 } 1098 } 1099 } else { 1100 ALOGE("%s: Could not find stream", __func__); 1101 } 1102 1103} 1104 1105void QCamera3RawChannel::convertMipiToRaw16(mm_camera_buf_def_t *frame) 1106{ 1107 // Convert image buffer from mipi10 raw format to RAW16 format 1108 // mipi10 opaque raw is stored in the format of: 1109 // P3(1:0) P2(1:0) P1(1:0) P0(1:0) P3(9:2) P2(9:2) P1(9:2) P0(9:2) 1110 // 4 pixels occupy 5 bytes, no padding needed 1111 1112 QCamera3Stream *stream = getStreamByIndex(0); 1113 if (stream != NULL) { 1114 cam_dimension_t dim; 1115 memset(&dim, 0, sizeof(dim)); 1116 stream->getFrameDimension(dim); 1117 1118 cam_frame_len_offset_t offset; 1119 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 1120 stream->getFrameOffset(offset); 1121 1122 uint32_t raw16_stride = (uint32_t)PAD_TO_SIZE(dim.width, 32); 1123 uint16_t* raw16_buffer = (uint16_t *)frame->buffer; 1124 1125 // In-place format conversion. 1126 // Raw16 format always occupy more memory than opaque raw10. 1127 // Convert to Raw16 by iterating through all pixels from bottom-right 1128 // to top-left of the image. 1129 // One special notes: 1130 // 1. Cross-platform raw16's stride is 16 pixels. 1131 // 2. mipi raw10's stride is 4 pixels, and aligned to 16 bytes. 1132 for (int32_t ys = dim.height - 1; ys >= 0; ys--) { 1133 uint32_t y = (uint32_t)ys; 1134 uint8_t* row_start = (uint8_t *)frame->buffer + 1135 y * (uint32_t)offset.mp[0].stride_in_bytes; 1136 for (int32_t xs = dim.width - 1; xs >= 0; xs--) { 1137 uint32_t x = (uint32_t)xs; 1138 uint8_t upper_8bit = row_start[5*(x/4)+x%4]; 1139 uint8_t lower_2bit = ((row_start[5*(x/4)+4] >> (x%4)) & 0x3); 1140 uint16_t raw16_pixel = 1141 (uint16_t)(((uint16_t)upper_8bit)<<2 | 1142 (uint16_t)lower_2bit); 1143 raw16_buffer[y*raw16_stride+x] = raw16_pixel; 1144 } 1145 } 1146 } else { 1147 ALOGE("%s: Could not find stream", __func__); 1148 } 1149 1150} 1151 1152 1153/*************************************************************************************/ 1154// RAW Dump Channel related functions 1155 1156/*=========================================================================== 1157 * FUNCTION : QCamera3RawDumpChannel 1158 * 1159 * DESCRIPTION: Constructor for RawDumpChannel 1160 * 1161 * PARAMETERS : 1162 * @cam_handle : Handle for Camera 1163 * @cam_ops : Function pointer table 1164 * @rawDumpSize : Dimensions for the Raw stream 1165 * @paddinginfo : Padding information for stream 1166 * @userData : Cookie for parent 1167 * @pp mask : PP feature mask for this stream 1168 * @numBuffers : number of max dequeued buffers 1169 * 1170 * RETURN : NA 1171 *==========================================================================*/ 1172QCamera3RawDumpChannel::QCamera3RawDumpChannel(uint32_t cam_handle, 1173 mm_camera_ops_t *cam_ops, 1174 cam_dimension_t rawDumpSize, 1175 cam_padding_info_t *paddingInfo, 1176 void *userData, 1177 uint32_t postprocess_mask, uint32_t numBuffers) : 1178 QCamera3Channel(cam_handle, cam_ops, NULL, 1179 paddingInfo, postprocess_mask, 1180 userData, numBuffers), 1181 mDim(rawDumpSize), 1182 mMemory(NULL) 1183{ 1184 char prop[PROPERTY_VALUE_MAX]; 1185 property_get("persist.camera.raw.dump", prop, "0"); 1186 mRawDump = atoi(prop); 1187} 1188 1189/*=========================================================================== 1190 * FUNCTION : QCamera3RawDumpChannel 1191 * 1192 * DESCRIPTION: Destructor for RawDumpChannel 1193 * 1194 * PARAMETERS : 1195 * 1196 * RETURN : NA 1197 *==========================================================================*/ 1198 1199QCamera3RawDumpChannel::~QCamera3RawDumpChannel() 1200{ 1201} 1202 1203/*=========================================================================== 1204 * FUNCTION : dumpRawSnapshot 1205 * 1206 * DESCRIPTION: Helper function to dump Raw frames 1207 * 1208 * PARAMETERS : 1209 * @frame : stream buf frame to be dumped 1210 * 1211 * RETURN : NA 1212 *==========================================================================*/ 1213void QCamera3RawDumpChannel::dumpRawSnapshot(mm_camera_buf_def_t *frame) 1214{ 1215 QCamera3Stream *stream = getStreamByIndex(0); 1216 if (stream != NULL) { 1217 char buf[FILENAME_MAX]; 1218 struct timeval tv; 1219 struct tm timeinfo_data; 1220 struct tm *timeinfo; 1221 1222 cam_dimension_t dim; 1223 memset(&dim, 0, sizeof(dim)); 1224 stream->getFrameDimension(dim); 1225 1226 cam_frame_len_offset_t offset; 1227 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 1228 stream->getFrameOffset(offset); 1229 1230 gettimeofday(&tv, NULL); 1231 timeinfo = localtime_r(&tv.tv_sec, &timeinfo_data); 1232 1233 if (NULL != timeinfo) { 1234 memset(buf, 0, sizeof(buf)); 1235 snprintf(buf, sizeof(buf), 1236 QCAMERA_DUMP_FRM_LOCATION 1237 "%04d-%02d-%02d-%02d-%02d-%02d-%06ld_%d_%dx%d.raw", 1238 timeinfo->tm_year + 1900, timeinfo->tm_mon + 1, 1239 timeinfo->tm_mday, timeinfo->tm_hour, 1240 timeinfo->tm_min, timeinfo->tm_sec,tv.tv_usec, 1241 frame->frame_idx, dim.width, dim.height); 1242 1243 int file_fd = open(buf, O_RDWR| O_CREAT, 0777); 1244 if (file_fd >= 0) { 1245 ssize_t written_len = 1246 write(file_fd, frame->buffer, offset.frame_len); 1247 CDBG("%s: written number of bytes %zd", __func__, written_len); 1248 close(file_fd); 1249 } else { 1250 ALOGE("%s: failed to open file to dump image", __func__); 1251 } 1252 } else { 1253 ALOGE("%s: localtime_r() error", __func__); 1254 } 1255 } else { 1256 ALOGE("%s: Could not find stream", __func__); 1257 } 1258 1259} 1260 1261/*=========================================================================== 1262 * FUNCTION : streamCbRoutine 1263 * 1264 * DESCRIPTION: Callback routine invoked for each frame generated for 1265 * Rawdump channel 1266 * 1267 * PARAMETERS : 1268 * @super_frame : stream buf frame generated 1269 * @stream : Underlying Stream object cookie 1270 * 1271 * RETURN : NA 1272 *==========================================================================*/ 1273void QCamera3RawDumpChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 1274 QCamera3Stream *stream) 1275{ 1276 CDBG("%s: E",__func__); 1277 if (super_frame == NULL || super_frame->num_bufs != 1) { 1278 ALOGE("%s: super_frame is not valid", __func__); 1279 return; 1280 } 1281 1282 if (mRawDump) 1283 dumpRawSnapshot(super_frame->bufs[0]); 1284 1285 bufDone(super_frame); 1286 free(super_frame); 1287} 1288 1289/*=========================================================================== 1290 * FUNCTION : getStreamBufs 1291 * 1292 * DESCRIPTION: Callback function provided to interface to get buffers. 1293 * 1294 * PARAMETERS : 1295 * @len : Length of each buffer to be allocated 1296 * 1297 * RETURN : NULL on buffer allocation failure 1298 * QCamera3Memory object on sucess 1299 *==========================================================================*/ 1300QCamera3Memory* QCamera3RawDumpChannel::getStreamBufs(uint32_t len) 1301{ 1302 int rc; 1303 mMemory = new QCamera3HeapMemory(); 1304 1305 if (!mMemory) { 1306 ALOGE("%s: unable to create heap memory", __func__); 1307 return NULL; 1308 } 1309 rc = mMemory->allocate(mNumBuffers, (size_t)len, true); 1310 if (rc < 0) { 1311 ALOGE("%s: unable to allocate heap memory", __func__); 1312 delete mMemory; 1313 mMemory = NULL; 1314 return NULL; 1315 } 1316 return mMemory; 1317} 1318 1319/*=========================================================================== 1320 * FUNCTION : putStreamBufs 1321 * 1322 * DESCRIPTION: Callback function provided to interface to return buffers. 1323 * Although no handles are actually returned, implicitl assumption 1324 * that interface will no longer use buffers and channel can 1325 * deallocated if necessary. 1326 * 1327 * PARAMETERS : NA 1328 * 1329 * RETURN : NA 1330 *==========================================================================*/ 1331void QCamera3RawDumpChannel::putStreamBufs() 1332{ 1333 mMemory->deallocate(); 1334 delete mMemory; 1335 mMemory = NULL; 1336} 1337 1338/*=========================================================================== 1339 * FUNCTION : request 1340 * 1341 * DESCRIPTION: Request function used as trigger 1342 * 1343 * PARAMETERS : 1344 * @recvd_frame : buffer- this will be NULL since this is internal channel 1345 * @frameNumber : Undefined again since this is internal stream 1346 * 1347 * RETURN : int32_t type of status 1348 * NO_ERROR -- success 1349 * none-zero failure code 1350 *==========================================================================*/ 1351int32_t QCamera3RawDumpChannel::request(buffer_handle_t * /*buffer*/, 1352 uint32_t /*frameNumber*/) 1353{ 1354 if (!m_bIsActive) { 1355 return QCamera3Channel::start(); 1356 } 1357 else 1358 return 0; 1359} 1360 1361/*=========================================================================== 1362 * FUNCTION : intialize 1363 * 1364 * DESCRIPTION: Initializes channel params and creates underlying stream 1365 * 1366 * PARAMETERS : 1367 * @isType : type of image stabilization required on this stream 1368 * 1369 * RETURN : int32_t type of status 1370 * NO_ERROR -- success 1371 * none-zero failure code 1372 *==========================================================================*/ 1373int32_t QCamera3RawDumpChannel::initialize(cam_is_type_t isType) 1374{ 1375 int32_t rc; 1376 1377 rc = init(NULL, NULL); 1378 if (rc < 0) { 1379 ALOGE("%s: init failed", __func__); 1380 return rc; 1381 } 1382 mIsType = isType; 1383 rc = QCamera3Channel::addStream(CAM_STREAM_TYPE_RAW, 1384 CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG, mDim, (uint8_t)mNumBuffers, 1385 mPostProcMask, mIsType); 1386 if (rc < 0) { 1387 ALOGE("%s: addStream failed", __func__); 1388 } 1389 return rc; 1390} 1391/*************************************************************************************/ 1392 1393/*=========================================================================== 1394 * FUNCTION : jpegEvtHandle 1395 * 1396 * DESCRIPTION: Function registerd to mm-jpeg-interface to handle jpeg events. 1397 Construct result payload and call mChannelCb to deliver buffer 1398 to framework. 1399 * 1400 * PARAMETERS : 1401 * @status : status of jpeg job 1402 * @client_hdl: jpeg client handle 1403 * @jobId : jpeg job Id 1404 * @p_ouput : ptr to jpeg output result struct 1405 * @userdata : user data ptr 1406 * 1407 * RETURN : none 1408 *==========================================================================*/ 1409void QCamera3PicChannel::jpegEvtHandle(jpeg_job_status_t status, 1410 uint32_t /*client_hdl*/, 1411 uint32_t jobId, 1412 mm_jpeg_output_t *p_output, 1413 void *userdata) 1414{ 1415 ATRACE_CALL(); 1416 buffer_handle_t *resultBuffer = NULL; 1417 buffer_handle_t *jpegBufferHandle = NULL; 1418 int resultStatus = CAMERA3_BUFFER_STATUS_OK; 1419 camera3_stream_buffer_t result; 1420 camera3_jpeg_blob_t jpegHeader; 1421 1422 QCamera3PicChannel *obj = (QCamera3PicChannel *)userdata; 1423 if (obj) { 1424 //Construct payload for process_capture_result. Call mChannelCb 1425 1426 qcamera_hal3_jpeg_data_t *job = obj->m_postprocessor.findJpegJobByJobId(jobId); 1427 1428 if ((job == NULL) || (status == JPEG_JOB_STATUS_ERROR)) { 1429 ALOGE("%s: Error in jobId: (%d) with status: %d", __func__, jobId, status); 1430 resultStatus = CAMERA3_BUFFER_STATUS_ERROR; 1431 } 1432 1433 if (NULL != job) { 1434 uint32_t bufIdx = (uint32_t)job->jpeg_settings->out_buf_index; 1435 CDBG("%s: jpeg out_buf_index: %d", __func__, bufIdx); 1436 1437 //Construct jpeg transient header of type camera3_jpeg_blob_t 1438 //Append at the end of jpeg image of buf_filled_len size 1439 1440 jpegHeader.jpeg_blob_id = CAMERA3_JPEG_BLOB_ID; 1441 if (JPEG_JOB_STATUS_DONE == status) { 1442 jpegHeader.jpeg_size = (uint32_t)p_output->buf_filled_len; 1443 char* jpeg_buf = (char *)p_output->buf_vaddr; 1444 1445 ssize_t maxJpegSize = -1; 1446 1447 // Gralloc buffer may have additional padding for 4K page size 1448 // Follow size guidelines based on spec since framework relies 1449 // on that to reach end of buffer and with it the header 1450 1451 //Handle same as resultBuffer, but for readablity 1452 jpegBufferHandle = 1453 (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx); 1454 1455 if (NULL != jpegBufferHandle) { 1456 maxJpegSize = ((private_handle_t*)(*jpegBufferHandle))->width; 1457 if (maxJpegSize > obj->mMemory.getSize(bufIdx)) { 1458 maxJpegSize = obj->mMemory.getSize(bufIdx); 1459 } 1460 1461 size_t jpeg_eof_offset = 1462 (size_t)(maxJpegSize - (ssize_t)sizeof(jpegHeader)); 1463 char *jpeg_eof = &jpeg_buf[jpeg_eof_offset]; 1464 memcpy(jpeg_eof, &jpegHeader, sizeof(jpegHeader)); 1465 obj->mMemory.cleanInvalidateCache(bufIdx); 1466 } else { 1467 ALOGE("%s: JPEG buffer not found and index: %d", 1468 __func__, 1469 bufIdx); 1470 resultStatus = CAMERA3_BUFFER_STATUS_ERROR; 1471 } 1472 } 1473 1474 ////Use below data to issue framework callback 1475 resultBuffer = 1476 (buffer_handle_t *)obj->mMemory.getBufferHandle(bufIdx); 1477 int32_t resultFrameNumber = obj->mMemory.getFrameNumber(bufIdx); 1478 int32_t rc = obj->mMemory.unregisterBuffer(bufIdx); 1479 if (NO_ERROR != rc) { 1480 ALOGE("%s: Error %d unregistering stream buffer %d", 1481 __func__, rc, bufIdx); 1482 } 1483 1484 result.stream = obj->mCamera3Stream; 1485 result.buffer = resultBuffer; 1486 result.status = resultStatus; 1487 result.acquire_fence = -1; 1488 result.release_fence = -1; 1489 1490 // Release any snapshot buffers before calling 1491 // the user callback. The callback can potentially 1492 // unblock pending requests to snapshot stream. 1493 int32_t snapshotIdx = -1; 1494 mm_camera_super_buf_t* src_frame = NULL; 1495 1496 if (job->src_reproc_frame) 1497 src_frame = job->src_reproc_frame; 1498 else 1499 src_frame = job->src_frame; 1500 1501 if (src_frame) { 1502 if (obj->mStreams[0]->getMyHandle() == 1503 src_frame->bufs[0]->stream_id) { 1504 snapshotIdx = (int32_t)src_frame->bufs[0]->buf_idx; 1505 } else { 1506 ALOGE("%s: Snapshot stream id %d and source frame %d don't match!", 1507 __func__, obj->mStreams[0]->getMyHandle(), 1508 src_frame->bufs[0]->stream_id); 1509 } 1510 } 1511 if (0 <= snapshotIdx) { 1512 Mutex::Autolock lock(obj->mFreeBuffersLock); 1513 obj->mFreeBufferList.push_back((uint32_t)snapshotIdx); 1514 } else { 1515 ALOGE("%s: Snapshot buffer not found!", __func__); 1516 } 1517 1518 CDBG("%s: Issue Callback", __func__); 1519 obj->mChannelCB(NULL, 1520 &result, 1521 (uint32_t)resultFrameNumber, 1522 obj->mUserData); 1523 1524 // release internal data for jpeg job 1525 if ((NULL != job->fwk_frame) || (NULL != job->fwk_src_buffer)) { 1526 obj->mOfflineMetaMemory.deallocate(); 1527 obj->mOfflineMemory.unregisterBuffers(); 1528 } 1529 obj->m_postprocessor.releaseOfflineBuffers(); 1530 obj->m_postprocessor.releaseJpegJobData(job); 1531 free(job); 1532 } 1533 1534 return; 1535 // } 1536 } else { 1537 ALOGE("%s: Null userdata in jpeg callback", __func__); 1538 } 1539} 1540 1541QCamera3PicChannel::QCamera3PicChannel(uint32_t cam_handle, 1542 mm_camera_ops_t *cam_ops, 1543 channel_cb_routine cb_routine, 1544 cam_padding_info_t *paddingInfo, 1545 void *userData, 1546 camera3_stream_t *stream, 1547 uint32_t postprocess_mask, 1548 bool is4KVideo, 1549 QCamera3Channel *metadataChannel, 1550 uint32_t numBuffers) : 1551 QCamera3Channel(cam_handle, cam_ops, cb_routine, 1552 paddingInfo, postprocess_mask, userData, numBuffers), 1553 m_postprocessor(this), 1554 mCamera3Stream(stream), 1555 mNumBufsRegistered(CAM_MAX_NUM_BUFS_PER_STREAM), 1556 mNumSnapshotBufs(0), 1557 mCurrentBufIndex(0U), 1558 mPostProcStarted(false), 1559 mInputBufferConfig(false), 1560 mYuvMemory(NULL), 1561 m_pMetaChannel(metadataChannel), 1562 mMetaFrame(NULL) 1563{ 1564 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData; 1565 m_max_pic_dim = hal_obj->calcMaxJpegDim(); 1566 mYuvWidth = stream->width; 1567 mYuvHeight = stream->height; 1568 mStreamType = CAM_STREAM_TYPE_SNAPSHOT; 1569 // Use same pixelformat for 4K video case 1570 mStreamFormat = is4KVideo ? VIDEO_FORMAT : SNAPSHOT_FORMAT; 1571 int32_t rc = m_postprocessor.init(&mMemory, jpegEvtHandle, mPostProcMask, 1572 this); 1573 if (rc != 0) { 1574 ALOGE("Init Postprocessor failed"); 1575 } 1576} 1577 1578/*=========================================================================== 1579 * FUNCTION : stop 1580 * 1581 * DESCRIPTION: stop pic channel, which will stop all streams within, including 1582 * the reprocessing channel in postprocessor and YUV stream. 1583 * 1584 * PARAMETERS : none 1585 * 1586 * RETURN : int32_t type of status 1587 * NO_ERROR -- success 1588 * none-zero failure code 1589 *==========================================================================*/ 1590int32_t QCamera3PicChannel::stop() 1591{ 1592 int32_t rc = NO_ERROR; 1593 if(!m_bIsActive) { 1594 ALOGE("%s: Attempt to stop inactive channel",__func__); 1595 return rc; 1596 } 1597 1598 m_postprocessor.stop(); 1599 mPostProcStarted = false; 1600 rc |= QCamera3Channel::stop(); 1601 return rc; 1602} 1603 1604QCamera3PicChannel::~QCamera3PicChannel() 1605{ 1606 stop(); 1607 1608 int32_t rc = m_postprocessor.deinit(); 1609 if (rc != 0) { 1610 ALOGE("De-init Postprocessor failed"); 1611 } 1612 1613 if (0 < mOfflineMetaMemory.getCnt()) { 1614 mOfflineMetaMemory.deallocate(); 1615 } 1616 if (0 < mOfflineMemory.getCnt()) { 1617 mOfflineMemory.unregisterBuffers(); 1618 } 1619} 1620 1621int32_t QCamera3PicChannel::initialize(cam_is_type_t isType) 1622{ 1623 int32_t rc = NO_ERROR; 1624 cam_dimension_t streamDim; 1625 cam_stream_type_t streamType; 1626 cam_format_t streamFormat; 1627 mm_camera_channel_attr_t attr; 1628 1629 if (NULL == mCamera3Stream) { 1630 ALOGE("%s: Camera stream uninitialized", __func__); 1631 return NO_INIT; 1632 } 1633 1634 if (1 <= m_numStreams) { 1635 // Only one stream per channel supported in v3 Hal 1636 return NO_ERROR; 1637 } 1638 1639 memset(&attr, 0, sizeof(mm_camera_channel_attr_t)); 1640 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST; 1641 attr.look_back = 1; 1642 attr.post_frame_skip = 1; 1643 attr.water_mark = 1; 1644 attr.max_unmatched_frames = 1; 1645 1646 rc = init(&attr, NULL); 1647 if (rc < 0) { 1648 ALOGE("%s: init failed", __func__); 1649 return rc; 1650 } 1651 mIsType = isType; 1652 streamType = mStreamType; 1653 streamFormat = mStreamFormat; 1654 streamDim.width = (int32_t)mYuvWidth; 1655 streamDim.height = (int32_t)mYuvHeight; 1656 1657 mNumSnapshotBufs = mCamera3Stream->max_buffers; 1658 rc = QCamera3Channel::addStream(streamType, streamFormat, streamDim, 1659 (uint8_t)mCamera3Stream->max_buffers, mPostProcMask, mIsType); 1660 1661 Mutex::Autolock lock(mFreeBuffersLock); 1662 mFreeBufferList.clear(); 1663 for (uint32_t i = 0; i < mCamera3Stream->max_buffers; i++) { 1664 mFreeBufferList.push_back(i); 1665 } 1666 1667 return rc; 1668} 1669 1670int32_t QCamera3PicChannel::request(buffer_handle_t *buffer, 1671 uint32_t frameNumber, 1672 camera3_stream_buffer_t *pInputBuffer, 1673 metadata_buffer_t *metadata) 1674{ 1675 ATRACE_CALL(); 1676 //FIX ME: Return buffer back in case of failures below. 1677 1678 int32_t rc = NO_ERROR; 1679 1680 reprocess_config_t reproc_cfg; 1681 memset(&reproc_cfg, 0, sizeof(reprocess_config_t)); 1682 reproc_cfg.padding = mPaddingInfo; 1683 //to ensure a big enough buffer size set the height and width 1684 //padding to max(height padding, width padding) 1685 if (reproc_cfg.padding->height_padding > reproc_cfg.padding->width_padding) { 1686 reproc_cfg.padding->width_padding = reproc_cfg.padding->height_padding; 1687 } else { 1688 reproc_cfg.padding->height_padding = reproc_cfg.padding->width_padding; 1689 } 1690 1691 reproc_cfg.input_stream_dim.width = (int32_t)mYuvWidth; 1692 reproc_cfg.input_stream_dim.height = (int32_t)mYuvHeight; 1693 if (NULL == pInputBuffer) 1694 reproc_cfg.src_channel = this; 1695 1696 reproc_cfg.output_stream_dim.width = (int32_t)mCamera3Stream->width; 1697 reproc_cfg.output_stream_dim.height = (int32_t)mCamera3Stream->height; 1698 reproc_cfg.stream_type = mStreamType; 1699 reproc_cfg.stream_format = mStreamFormat; 1700 rc = mm_stream_calc_offset_snapshot(mStreamFormat, &reproc_cfg.input_stream_dim, 1701 reproc_cfg.padding, &reproc_cfg.input_stream_plane_info); 1702 if (rc != 0) { 1703 ALOGE("%s: Snapshot stream plane info calculation failed!", __func__); 1704 return rc; 1705 } 1706 1707 // Picture stream has already been started before any request comes in 1708 if (!m_bIsActive) { 1709 ALOGE("%s: Channel not started!!", __func__); 1710 return NO_INIT; 1711 } 1712 1713 int index = mMemory.getMatchBufIndex((void*)buffer); 1714 1715 if(index < 0) { 1716 rc = registerBuffer(buffer, mIsType); 1717 if (NO_ERROR != rc) { 1718 ALOGE("%s: On-the-fly buffer registration failed %d", 1719 __func__, rc); 1720 return rc; 1721 } 1722 1723 index = mMemory.getMatchBufIndex((void*)buffer); 1724 if (index < 0) { 1725 ALOGE("%s: Could not find object among registered buffers",__func__); 1726 return DEAD_OBJECT; 1727 } 1728 } 1729 CDBG("%s: buffer index %d, frameNumber: %u", __func__, index, frameNumber); 1730 1731 rc = mMemory.markFrameNumber((uint32_t)index, frameNumber); 1732 1733 //Start the postprocessor for jpeg encoding. Pass mMemory as destination buffer 1734 mCurrentBufIndex = (uint32_t)index; 1735 1736 // Start postprocessor 1737 // This component needs to be re-configured 1738 // once we switch from input(framework) buffer 1739 // reprocess to standard capture! 1740 bool restartNeeded = ((!mInputBufferConfig) != (NULL != pInputBuffer)); 1741 if((!mPostProcStarted) || restartNeeded) { 1742 m_postprocessor.start(reproc_cfg, metadata); 1743 mPostProcStarted = true; 1744 mInputBufferConfig = (NULL == pInputBuffer); 1745 } 1746 1747 // Queue jpeg settings 1748 rc = queueJpegSetting((uint32_t)index, metadata); 1749 1750 if (pInputBuffer == NULL) { 1751 Mutex::Autolock lock(mFreeBuffersLock); 1752 if (!mFreeBufferList.empty()) { 1753 List<uint32_t>::iterator it = mFreeBufferList.begin(); 1754 uint32_t freeBuffer = *it; 1755 mStreams[0]->bufDone(freeBuffer); 1756 mFreeBufferList.erase(it); 1757 } else { 1758 ALOGE("%s: No snapshot buffers available!", __func__); 1759 rc = NOT_ENOUGH_DATA; 1760 } 1761 } else { 1762 if (0 < mOfflineMetaMemory.getCnt()) { 1763 mOfflineMetaMemory.deallocate(); 1764 } 1765 if (0 < mOfflineMemory.getCnt()) { 1766 mOfflineMemory.unregisterBuffers(); 1767 } 1768 1769 int input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer); 1770 if(input_index < 0) { 1771 rc = mOfflineMemory.registerBuffer(pInputBuffer->buffer, mStreamType); 1772 if (NO_ERROR != rc) { 1773 ALOGE("%s: On-the-fly input buffer registration failed %d", 1774 __func__, rc); 1775 return rc; 1776 } 1777 1778 input_index = mOfflineMemory.getMatchBufIndex((void*)pInputBuffer->buffer); 1779 if (input_index < 0) { 1780 ALOGE("%s: Could not find object among registered buffers",__func__); 1781 return DEAD_OBJECT; 1782 } 1783 } 1784 qcamera_fwk_input_pp_data_t *src_frame = NULL; 1785 src_frame = (qcamera_fwk_input_pp_data_t *)malloc( 1786 sizeof(qcamera_fwk_input_pp_data_t)); 1787 if (src_frame == NULL) { 1788 ALOGE("%s: No memory for src frame", __func__); 1789 return NO_MEMORY; 1790 } 1791 memset(src_frame, 0, sizeof(qcamera_fwk_input_pp_data_t)); 1792 src_frame->src_frame = *pInputBuffer; 1793 rc = mOfflineMemory.getBufDef(reproc_cfg.input_stream_plane_info.plane_info, 1794 src_frame->input_buffer, (uint32_t)input_index); 1795 if (rc != 0) { 1796 free(src_frame); 1797 return rc; 1798 } 1799 if (mYUVDump) { 1800 dumpYUV(&src_frame->input_buffer, reproc_cfg.input_stream_dim, 1801 reproc_cfg.input_stream_plane_info.plane_info, 1); 1802 } 1803 cam_dimension_t dim = {(int)sizeof(metadata_buffer_t), 1}; 1804 cam_stream_buf_plane_info_t meta_planes; 1805 rc = mm_stream_calc_offset_metadata(&dim, mPaddingInfo, &meta_planes); 1806 if (rc != 0) { 1807 ALOGE("%s: Metadata stream plane info calculation failed!", __func__); 1808 free(src_frame); 1809 return rc; 1810 } 1811 1812 rc = mOfflineMetaMemory.allocate(1, sizeof(metadata_buffer_t), false); 1813 if (NO_ERROR != rc) { 1814 ALOGE("%s: Couldn't allocate offline metadata buffer!", __func__); 1815 free(src_frame); 1816 return rc; 1817 } 1818 mm_camera_buf_def_t meta_buf; 1819 cam_frame_len_offset_t offset = meta_planes.plane_info; 1820 rc = mOfflineMetaMemory.getBufDef(offset, meta_buf, 0); 1821 if (NO_ERROR != rc) { 1822 free(src_frame); 1823 return rc; 1824 } 1825 memcpy(meta_buf.buffer, metadata, sizeof(metadata_buffer_t)); 1826 src_frame->metadata_buffer = meta_buf; 1827 src_frame->reproc_config = reproc_cfg; 1828 1829 CDBG_HIGH("%s: Post-process started", __func__); 1830 CDBG_HIGH("%s: Issue call to reprocess", __func__); 1831 1832 m_postprocessor.processData(src_frame); 1833 } 1834 return rc; 1835} 1836 1837 1838/*=========================================================================== 1839 * FUNCTION : metadataBufDone 1840 * 1841 * DESCRIPTION: Buffer done method for a metadata buffer 1842 * 1843 * PARAMETERS : 1844 * @recvd_frame : received metadata frame 1845 * 1846 * RETURN : int32_t type of status 1847 * NO_ERROR -- success 1848 * none-zero failure code 1849 *==========================================================================*/ 1850int32_t QCamera3PicChannel::metadataBufDone(mm_camera_super_buf_t *recvd_frame) 1851{ 1852 int32_t rc = NO_ERROR;; 1853 if ((NULL == m_pMetaChannel) || (NULL == recvd_frame)) { 1854 ALOGE("%s: Metadata channel or metadata buffer invalid", __func__); 1855 return BAD_VALUE; 1856 } 1857 1858 rc = ((QCamera3MetadataChannel*)m_pMetaChannel)->bufDone(recvd_frame); 1859 1860 return rc; 1861} 1862 1863/*=========================================================================== 1864 * FUNCTION : dataNotifyCB 1865 * 1866 * DESCRIPTION: Channel Level callback used for super buffer data notify. 1867 * This function is registered with mm-camera-interface to handle 1868 * data notify 1869 * 1870 * PARAMETERS : 1871 * @recvd_frame : stream frame received 1872 * userdata : user data ptr 1873 * 1874 * RETURN : none 1875 *==========================================================================*/ 1876void QCamera3PicChannel::dataNotifyCB(mm_camera_super_buf_t *recvd_frame, 1877 void *userdata) 1878{ 1879 ATRACE_CALL(); 1880 CDBG("%s: E\n", __func__); 1881 QCamera3PicChannel *channel = (QCamera3PicChannel *)userdata; 1882 1883 if (channel == NULL) { 1884 ALOGE("%s: invalid channel pointer", __func__); 1885 return; 1886 } 1887 1888 if(channel->m_numStreams != 1) { 1889 ALOGE("%s: Error: Bug: This callback assumes one stream per channel",__func__); 1890 return; 1891 } 1892 1893 1894 if(channel->mStreams[0] == NULL) { 1895 ALOGE("%s: Error: Invalid Stream object",__func__); 1896 return; 1897 } 1898 1899 channel->QCamera3PicChannel::streamCbRoutine(recvd_frame, channel->mStreams[0]); 1900 1901 CDBG("%s: X\n", __func__); 1902 return; 1903} 1904 1905/*=========================================================================== 1906 * FUNCTION : registerBuffer 1907 * 1908 * DESCRIPTION: register streaming buffer to the channel object 1909 * 1910 * PARAMETERS : 1911 * @buffer : buffer to be registered 1912 * @isType : type of image stabilization required on this channel 1913 * 1914 * RETURN : int32_t type of status 1915 * NO_ERROR -- success 1916 * none-zero failure code 1917 *==========================================================================*/ 1918int32_t QCamera3PicChannel::registerBuffer(buffer_handle_t *buffer, cam_is_type_t isType) 1919{ 1920 int rc = 0; 1921 mIsType = isType; 1922 if ((uint32_t)mMemory.getCnt() > (mNumBufsRegistered - 1)) { 1923 ALOGE("%s: Trying to register more buffers than initially requested", 1924 __func__); 1925 return BAD_VALUE; 1926 } 1927 1928 if (0 == m_numStreams) { 1929 rc = initialize(mIsType); 1930 if (rc != NO_ERROR) { 1931 ALOGE("%s: Couldn't initialize camera stream %d", 1932 __func__, rc); 1933 return rc; 1934 } 1935 } 1936 1937 rc = mMemory.registerBuffer(buffer, mStreamType); 1938 if (ALREADY_EXISTS == rc) { 1939 return NO_ERROR; 1940 } else if (NO_ERROR != rc) { 1941 ALOGE("%s: Buffer %p couldn't be registered %d", __func__, buffer, rc); 1942 return rc; 1943 } 1944 1945 CDBG("%s: X",__func__); 1946 1947 return rc; 1948} 1949 1950void QCamera3PicChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 1951 QCamera3Stream *stream) 1952{ 1953 ATRACE_CALL(); 1954 //TODO 1955 //Used only for getting YUV. Jpeg callback will be sent back from channel 1956 //directly to HWI. Refer to func jpegEvtHandle 1957 1958 //Got the yuv callback. Calling yuv callback handler in PostProc 1959 uint8_t frameIndex; 1960 mm_camera_super_buf_t* frame = NULL; 1961 if(!super_frame) { 1962 ALOGE("%s: Invalid Super buffer",__func__); 1963 return; 1964 } 1965 1966 if(super_frame->num_bufs != 1) { 1967 ALOGE("%s: Multiple streams are not supported",__func__); 1968 return; 1969 } 1970 if(super_frame->bufs[0] == NULL ) { 1971 ALOGE("%s: Error, Super buffer frame does not contain valid buffer", 1972 __func__); 1973 return; 1974 } 1975 1976 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx; 1977 CDBG("%s: recvd buf_idx: %u for further processing", 1978 __func__, (uint32_t)frameIndex); 1979 if(frameIndex >= mNumSnapshotBufs) { 1980 ALOGE("%s: Error, Invalid index for buffer",__func__); 1981 if(stream) { 1982 Mutex::Autolock lock(mFreeBuffersLock); 1983 mFreeBufferList.push_back(frameIndex); 1984 stream->bufDone(frameIndex); 1985 } 1986 return; 1987 } 1988 1989 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t)); 1990 if (frame == NULL) { 1991 ALOGE("%s: Error allocating memory to save received_frame structure.", 1992 __func__); 1993 if(stream) { 1994 Mutex::Autolock lock(mFreeBuffersLock); 1995 mFreeBufferList.push_back(frameIndex); 1996 stream->bufDone(frameIndex); 1997 } 1998 return; 1999 } 2000 *frame = *super_frame; 2001 2002 if (mYUVDump) { 2003 cam_dimension_t dim; 2004 memset(&dim, 0, sizeof(dim)); 2005 stream->getFrameDimension(dim); 2006 cam_frame_len_offset_t offset; 2007 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 2008 stream->getFrameOffset(offset); 2009 dumpYUV(frame->bufs[0], dim, offset, 1); 2010 } 2011 2012 m_postprocessor.processData(frame); 2013 free(super_frame); 2014 return; 2015} 2016 2017QCamera3Memory* QCamera3PicChannel::getStreamBufs(uint32_t len) 2018{ 2019 int rc = 0; 2020 2021 mYuvMemory = new QCamera3HeapMemory(); 2022 if (!mYuvMemory) { 2023 ALOGE("%s: unable to create metadata memory", __func__); 2024 return NULL; 2025 } 2026 2027 //Queue YUV buffers in the beginning mQueueAll = true 2028 rc = mYuvMemory->allocate(mCamera3Stream->max_buffers, len, false); 2029 if (rc < 0) { 2030 ALOGE("%s: unable to allocate metadata memory", __func__); 2031 delete mYuvMemory; 2032 mYuvMemory = NULL; 2033 return NULL; 2034 } 2035 return mYuvMemory; 2036} 2037 2038void QCamera3PicChannel::putStreamBufs() 2039{ 2040 mMemory.unregisterBuffers(); 2041 2042 mYuvMemory->deallocate(); 2043 delete mYuvMemory; 2044 mYuvMemory = NULL; 2045} 2046 2047int32_t QCamera3PicChannel::queueReprocMetadata(mm_camera_super_buf_t *metadata) 2048{ 2049 return m_postprocessor.processPPMetadata(metadata); 2050} 2051 2052int32_t QCamera3PicChannel::queueJpegSetting(uint32_t index, metadata_buffer_t *metadata) 2053{ 2054 jpeg_settings_t *settings = 2055 (jpeg_settings_t *)malloc(sizeof(jpeg_settings_t)); 2056 2057 if (!settings) { 2058 ALOGE("%s: out of memory allocating jpeg_settings", __func__); 2059 return -ENOMEM; 2060 } 2061 2062 memset(settings, 0, sizeof(jpeg_settings_t)); 2063 2064 settings->out_buf_index = index; 2065 2066 settings->jpeg_orientation = 0; 2067 IF_META_AVAILABLE(int32_t, orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) { 2068 settings->jpeg_orientation = *orientation; 2069 } 2070 2071 settings->jpeg_quality = 85; 2072 IF_META_AVAILABLE(uint32_t, quality1, CAM_INTF_META_JPEG_QUALITY, metadata) { 2073 settings->jpeg_quality = (uint8_t) *quality1; 2074 } 2075 2076 IF_META_AVAILABLE(uint32_t, quality2, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) { 2077 settings->jpeg_thumb_quality = (uint8_t) *quality2; 2078 } 2079 2080 IF_META_AVAILABLE(cam_dimension_t, dimension, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) { 2081 settings->thumbnail_size = *dimension; 2082 } 2083 2084 settings->gps_timestamp_valid = 0; 2085 IF_META_AVAILABLE(int64_t, timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) { 2086 settings->gps_timestamp = *timestamp; 2087 settings->gps_timestamp_valid = 1; 2088 } 2089 2090 settings->gps_coordinates_valid = 0; 2091 IF_META_AVAILABLE(double, coordinates, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) { 2092 memcpy(settings->gps_coordinates, coordinates, 3*sizeof(double)); 2093 settings->gps_coordinates_valid = 1; 2094 } 2095 2096 IF_META_AVAILABLE(uint8_t, proc_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) { 2097 memset(settings->gps_processing_method, 0, 2098 sizeof(settings->gps_processing_method)); 2099 strlcpy(settings->gps_processing_method, (const char *)proc_methods, 2100 sizeof(settings->gps_processing_method)+1); 2101 } 2102 2103 return m_postprocessor.processJpegSettingData(settings); 2104} 2105 2106/*=========================================================================== 2107 * FUNCTION : getRational 2108 * 2109 * DESCRIPTION: compose rational struct 2110 * 2111 * PARAMETERS : 2112 * @rat : ptr to struct to store rational info 2113 * @num :num of the rational 2114 * @denom : denom of the rational 2115 * 2116 * RETURN : int32_t type of status 2117 * NO_ERROR -- success 2118 * none-zero failure code 2119 *==========================================================================*/ 2120int32_t getRational(rat_t *rat, int num, int denom) 2121{ 2122 if ((0 > num) || (0 > denom)) { 2123 ALOGE("%s: Negative values", __func__); 2124 return BAD_VALUE; 2125 } 2126 if (NULL == rat) { 2127 ALOGE("%s: NULL rat input", __func__); 2128 return BAD_VALUE; 2129 } 2130 rat->num = (uint32_t)num; 2131 rat->denom = (uint32_t)denom; 2132 return NO_ERROR; 2133} 2134 2135/*=========================================================================== 2136 * FUNCTION : parseGPSCoordinate 2137 * 2138 * DESCRIPTION: parse GPS coordinate string 2139 * 2140 * PARAMETERS : 2141 * @coord_str : [input] coordinate string 2142 * @coord : [output] ptr to struct to store coordinate 2143 * 2144 * RETURN : int32_t type of status 2145 * NO_ERROR -- success 2146 * none-zero failure code 2147 *==========================================================================*/ 2148int parseGPSCoordinate(const char *coord_str, rat_t* coord) 2149{ 2150 if(coord == NULL) { 2151 ALOGE("%s: error, invalid argument coord == NULL", __func__); 2152 return BAD_VALUE; 2153 } 2154 double degF = atof(coord_str); 2155 if (degF < 0) { 2156 degF = -degF; 2157 } 2158 double minF = (degF - (int) degF) * 60; 2159 double secF = (minF - (int) minF) * 60; 2160 2161 getRational(&coord[0], (int)degF, 1); 2162 getRational(&coord[1], (int)minF, 1); 2163 getRational(&coord[2], (int)(secF * 10000), 10000); 2164 return NO_ERROR; 2165} 2166 2167/*=========================================================================== 2168 * FUNCTION : getExifDateTime 2169 * 2170 * DESCRIPTION: query exif date time 2171 * 2172 * PARAMETERS : 2173 * @dateTime : string to store exif date time 2174 * @subsecTime : string to store exif subsec time 2175 * 2176 * RETURN : int32_t type of status 2177 * NO_ERROR -- success 2178 * none-zero failure code 2179 *==========================================================================*/ 2180int32_t getExifDateTime(String8 &dateTime, String8 &subsecTime) 2181{ 2182 int32_t ret = NO_ERROR; 2183 2184 //get time and date from system 2185 struct timeval tv; 2186 struct tm timeinfo_data; 2187 2188 int res = gettimeofday(&tv, NULL); 2189 if (0 == res) { 2190 struct tm *timeinfo = localtime_r(&tv.tv_sec, &timeinfo_data); 2191 if (NULL != timeinfo) { 2192 //Write datetime according to EXIF Spec 2193 //"YYYY:MM:DD HH:MM:SS" (20 chars including \0) 2194 dateTime = String8::format("%04d:%02d:%02d %02d:%02d:%02d", 2195 timeinfo->tm_year + 1900, timeinfo->tm_mon + 1, 2196 timeinfo->tm_mday, timeinfo->tm_hour, 2197 timeinfo->tm_min, timeinfo->tm_sec); 2198 //Write subsec according to EXIF Sepc 2199 subsecTime = String8::format("%06ld", tv.tv_usec); 2200 } else { 2201 ALOGE("%s: localtime_r() error", __func__); 2202 ret = UNKNOWN_ERROR; 2203 } 2204 } else if (-1 == res) { 2205 ALOGE("%s: gettimeofday() error: %s", __func__, strerror(errno)); 2206 ret = UNKNOWN_ERROR; 2207 } else { 2208 ALOGE("%s: gettimeofday() unexpected return code: %d", __func__, res); 2209 ret = UNKNOWN_ERROR; 2210 } 2211 2212 return ret; 2213} 2214 2215/*=========================================================================== 2216 * FUNCTION : getExifFocalLength 2217 * 2218 * DESCRIPTION: get exif focal lenght 2219 * 2220 * PARAMETERS : 2221 * @focalLength : ptr to rational strcut to store focal lenght 2222 * 2223 * RETURN : int32_t type of status 2224 * NO_ERROR -- success 2225 * none-zero failure code 2226 *==========================================================================*/ 2227int32_t getExifFocalLength(rat_t *focalLength, float value) 2228{ 2229 int focalLengthValue = 2230 (int)(value * FOCAL_LENGTH_DECIMAL_PRECISION); 2231 return getRational(focalLength, focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION); 2232} 2233 2234/*=========================================================================== 2235 * FUNCTION : getExifExpTimeInfo 2236 * 2237 * DESCRIPTION: get exif exposure time information 2238 * 2239 * PARAMETERS : 2240 * @expoTimeInfo : expousure time value 2241 * RETURN : nt32_t type of status 2242 * NO_ERROR -- success 2243 * none-zero failure code 2244 *==========================================================================*/ 2245int32_t getExifExpTimeInfo(rat_t *expoTimeInfo, int64_t value) 2246{ 2247 2248 int64_t cal_exposureTime; 2249 if (value != 0) 2250 cal_exposureTime = value; 2251 else 2252 cal_exposureTime = 60; 2253 2254 return getRational(expoTimeInfo, 1, (int)cal_exposureTime); 2255} 2256 2257/*=========================================================================== 2258 * FUNCTION : getExifGpsProcessingMethod 2259 * 2260 * DESCRIPTION: get GPS processing method 2261 * 2262 * PARAMETERS : 2263 * @gpsProcessingMethod : string to store GPS process method 2264 * @count : lenght of the string 2265 * 2266 * RETURN : int32_t type of status 2267 * NO_ERROR -- success 2268 * none-zero failure code 2269 *==========================================================================*/ 2270int32_t getExifGpsProcessingMethod(char *gpsProcessingMethod, 2271 uint32_t &count, char* value) 2272{ 2273 if(value != NULL) { 2274 memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE); 2275 count = EXIF_ASCII_PREFIX_SIZE; 2276 strlcpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, 2277 value, 2278 strlen(value)+1); 2279 count += (uint32_t)strlen(value); 2280 gpsProcessingMethod[count++] = '\0'; // increase 1 for the last NULL char 2281 return NO_ERROR; 2282 } else { 2283 return BAD_VALUE; 2284 } 2285} 2286 2287/*=========================================================================== 2288 * FUNCTION : getExifLatitude 2289 * 2290 * DESCRIPTION: get exif latitude 2291 * 2292 * PARAMETERS : 2293 * @latitude : ptr to rational struct to store latitude info 2294 * @ladRef : charater to indicate latitude reference 2295 * 2296 * RETURN : int32_t type of status 2297 * NO_ERROR -- success 2298 * none-zero failure code 2299 *==========================================================================*/ 2300int32_t getExifLatitude(rat_t *latitude, 2301 char *latRef, double value) 2302{ 2303 char str[30]; 2304 snprintf(str, sizeof(str), "%f", value); 2305 if(str != NULL) { 2306 parseGPSCoordinate(str, latitude); 2307 2308 //set Latitude Ref 2309 float latitudeValue = strtof(str, 0); 2310 if(latitudeValue < 0.0f) { 2311 latRef[0] = 'S'; 2312 } else { 2313 latRef[0] = 'N'; 2314 } 2315 latRef[1] = '\0'; 2316 return NO_ERROR; 2317 }else{ 2318 return BAD_VALUE; 2319 } 2320} 2321 2322/*=========================================================================== 2323 * FUNCTION : getExifLongitude 2324 * 2325 * DESCRIPTION: get exif longitude 2326 * 2327 * PARAMETERS : 2328 * @longitude : ptr to rational struct to store longitude info 2329 * @lonRef : charater to indicate longitude reference 2330 * 2331 * RETURN : int32_t type of status 2332 * NO_ERROR -- success 2333 * none-zero failure code 2334 *==========================================================================*/ 2335int32_t getExifLongitude(rat_t *longitude, 2336 char *lonRef, double value) 2337{ 2338 char str[30]; 2339 snprintf(str, sizeof(str), "%f", value); 2340 if(str != NULL) { 2341 parseGPSCoordinate(str, longitude); 2342 2343 //set Longitude Ref 2344 float longitudeValue = strtof(str, 0); 2345 if(longitudeValue < 0.0f) { 2346 lonRef[0] = 'W'; 2347 } else { 2348 lonRef[0] = 'E'; 2349 } 2350 lonRef[1] = '\0'; 2351 return NO_ERROR; 2352 }else{ 2353 return BAD_VALUE; 2354 } 2355} 2356 2357/*=========================================================================== 2358 * FUNCTION : getExifAltitude 2359 * 2360 * DESCRIPTION: get exif altitude 2361 * 2362 * PARAMETERS : 2363 * @altitude : ptr to rational struct to store altitude info 2364 * @altRef : charater to indicate altitude reference 2365 * @argValue : altitude value 2366 * 2367 * RETURN : int32_t type of status 2368 * NO_ERROR -- success 2369 * none-zero failure code 2370 *==========================================================================*/ 2371int32_t getExifAltitude(rat_t *altitude, char *altRef, double argValue) 2372{ 2373 char str[30]; 2374 snprintf(str, sizeof(str), "%f", argValue); 2375 if (str != NULL) { 2376 double value = atof(str); 2377 *altRef = 0; 2378 if(value < 0){ 2379 *altRef = 1; 2380 value = -value; 2381 } 2382 return getRational(altitude, (int)(value * 1000), 1000); 2383 } else { 2384 return BAD_VALUE; 2385 } 2386} 2387 2388/*=========================================================================== 2389 * FUNCTION : getExifGpsDateTimeStamp 2390 * 2391 * DESCRIPTION: get exif GPS date time stamp 2392 * 2393 * PARAMETERS : 2394 * @gpsDateStamp : GPS date time stamp string 2395 * @bufLen : length of the string 2396 * @gpsTimeStamp : ptr to rational struct to store time stamp info 2397 * 2398 * RETURN : int32_t type of status 2399 * NO_ERROR -- success 2400 * none-zero failure code 2401 *==========================================================================*/ 2402int32_t getExifGpsDateTimeStamp(char *gpsDateStamp, 2403 uint32_t bufLen, 2404 rat_t *gpsTimeStamp, int64_t value) 2405{ 2406 char str[30]; 2407 snprintf(str, sizeof(str), "%lld", (long long int)value); 2408 if(str != NULL) { 2409 time_t unixTime = (time_t)atol(str); 2410 struct tm *UTCTimestamp = gmtime(&unixTime); 2411 if (UTCTimestamp != NULL) { 2412 strftime(gpsDateStamp, bufLen, "%Y:%m:%d", UTCTimestamp); 2413 2414 getRational(&gpsTimeStamp[0], UTCTimestamp->tm_hour, 1); 2415 getRational(&gpsTimeStamp[1], UTCTimestamp->tm_min, 1); 2416 getRational(&gpsTimeStamp[2], UTCTimestamp->tm_sec, 1); 2417 return NO_ERROR; 2418 } else { 2419 ALOGE("%s: Could not get the timestamp", __func__); 2420 return BAD_VALUE; 2421 } 2422 } else { 2423 return BAD_VALUE; 2424 } 2425} 2426 2427int32_t getExifExposureValue(srat_t* exposure_val, int32_t exposure_comp, 2428 cam_rational_type_t step) 2429{ 2430 exposure_val->num = exposure_comp * step.numerator; 2431 exposure_val->denom = step.denominator; 2432 return 0; 2433} 2434/*=========================================================================== 2435 * FUNCTION : getExifData 2436 * 2437 * DESCRIPTION: get exif data to be passed into jpeg encoding 2438 * 2439 * PARAMETERS : none 2440 * 2441 * RETURN : exif data from user setting and GPS 2442 *==========================================================================*/ 2443QCamera3Exif *QCamera3PicChannel::getExifData(metadata_buffer_t *metadata, 2444 jpeg_settings_t *jpeg_settings) 2445{ 2446 QCamera3Exif *exif = new QCamera3Exif(); 2447 if (exif == NULL) { 2448 ALOGE("%s: No memory for QCamera3Exif", __func__); 2449 return NULL; 2450 } 2451 2452 int32_t rc = NO_ERROR; 2453 uint32_t count = 0; 2454 2455 // add exif entries 2456 String8 dateTime; 2457 String8 subsecTime; 2458 rc = getExifDateTime(dateTime, subsecTime); 2459 if (rc == NO_ERROR) { 2460 exif->addEntry(EXIFTAGID_DATE_TIME, EXIF_ASCII, 2461 (uint32_t)(dateTime.length() + 1), (void *)dateTime.string()); 2462 exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, EXIF_ASCII, 2463 (uint32_t)(dateTime.length() + 1), (void *)dateTime.string()); 2464 exif->addEntry(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED, EXIF_ASCII, 2465 (uint32_t)(dateTime.length() + 1), (void *)dateTime.string()); 2466 exif->addEntry(EXIFTAGID_SUBSEC_TIME, EXIF_ASCII, 2467 (uint32_t)(subsecTime.length() + 1), (void *)subsecTime.string()); 2468 exif->addEntry(EXIFTAGID_SUBSEC_TIME_ORIGINAL, EXIF_ASCII, 2469 (uint32_t)(subsecTime.length() + 1), (void *)subsecTime.string()); 2470 exif->addEntry(EXIFTAGID_SUBSEC_TIME_DIGITIZED, EXIF_ASCII, 2471 (uint32_t)(subsecTime.length() + 1), (void *)subsecTime.string()); 2472 } else { 2473 ALOGE("%s: getExifDateTime failed", __func__); 2474 } 2475 2476 2477 if (metadata != NULL) { 2478 IF_META_AVAILABLE(float, focal_length, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) { 2479 rat_t focalLength; 2480 rc = getExifFocalLength(&focalLength, *focal_length); 2481 if (rc == NO_ERROR) { 2482 exif->addEntry(EXIFTAGID_FOCAL_LENGTH, 2483 EXIF_RATIONAL, 2484 1, 2485 (void *)&(focalLength)); 2486 } else { 2487 ALOGE("%s: getExifFocalLength failed", __func__); 2488 } 2489 } 2490 2491 IF_META_AVAILABLE(int32_t, isoSpeed, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) { 2492 int16_t fwk_isoSpeed = (int16_t) *isoSpeed; 2493 exif->addEntry(EXIFTAGID_ISO_SPEED_RATING, EXIF_SHORT, 1, (void *) &(fwk_isoSpeed)); 2494 } 2495 2496 2497 IF_META_AVAILABLE(int64_t, sensor_exposure_time, 2498 CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) { 2499 rat_t sensorExpTime; 2500 rc = getExifExpTimeInfo(&sensorExpTime, *sensor_exposure_time); 2501 if (rc == NO_ERROR){ 2502 exif->addEntry(EXIFTAGID_EXPOSURE_TIME, 2503 EXIF_RATIONAL, 2504 1, 2505 (void *)&(sensorExpTime)); 2506 } else { 2507 ALOGE("%s: getExifExpTimeInfo failed", __func__); 2508 } 2509 } 2510 2511 char* jpeg_gps_processing_method = jpeg_settings->gps_processing_method; 2512 if (strlen(jpeg_gps_processing_method) > 0) { 2513 char gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + 2514 GPS_PROCESSING_METHOD_SIZE]; 2515 count = 0; 2516 rc = getExifGpsProcessingMethod(gpsProcessingMethod, 2517 count, 2518 jpeg_gps_processing_method); 2519 if(rc == NO_ERROR) { 2520 exif->addEntry(EXIFTAGID_GPS_PROCESSINGMETHOD, 2521 EXIF_ASCII, 2522 count, 2523 (void *)gpsProcessingMethod); 2524 } else { 2525 ALOGE("%s: getExifGpsProcessingMethod failed", __func__); 2526 } 2527 } 2528 2529 if (jpeg_settings->gps_coordinates_valid) { 2530 2531 //latitude 2532 rat_t latitude[3]; 2533 char latRef[2]; 2534 rc = getExifLatitude(latitude, latRef, 2535 jpeg_settings->gps_coordinates[0]); 2536 if(rc == NO_ERROR) { 2537 exif->addEntry(EXIFTAGID_GPS_LATITUDE, 2538 EXIF_RATIONAL, 2539 3, 2540 (void *)latitude); 2541 exif->addEntry(EXIFTAGID_GPS_LATITUDE_REF, 2542 EXIF_ASCII, 2543 2, 2544 (void *)latRef); 2545 } else { 2546 ALOGE("%s: getExifLatitude failed", __func__); 2547 } 2548 2549 //longitude 2550 rat_t longitude[3]; 2551 char lonRef[2]; 2552 rc = getExifLongitude(longitude, lonRef, 2553 jpeg_settings->gps_coordinates[1]); 2554 if(rc == NO_ERROR) { 2555 exif->addEntry(EXIFTAGID_GPS_LONGITUDE, 2556 EXIF_RATIONAL, 2557 3, 2558 (void *)longitude); 2559 2560 exif->addEntry(EXIFTAGID_GPS_LONGITUDE_REF, 2561 EXIF_ASCII, 2562 2, 2563 (void *)lonRef); 2564 } else { 2565 ALOGE("%s: getExifLongitude failed", __func__); 2566 } 2567 2568 //altitude 2569 rat_t altitude; 2570 char altRef; 2571 rc = getExifAltitude(&altitude, &altRef, 2572 jpeg_settings->gps_coordinates[2]); 2573 if(rc == NO_ERROR) { 2574 exif->addEntry(EXIFTAGID_GPS_ALTITUDE, 2575 EXIF_RATIONAL, 2576 1, 2577 (void *)&(altitude)); 2578 2579 exif->addEntry(EXIFTAGID_GPS_ALTITUDE_REF, 2580 EXIF_BYTE, 2581 1, 2582 (void *)&altRef); 2583 } else { 2584 ALOGE("%s: getExifAltitude failed", __func__); 2585 } 2586 } 2587 2588 if (jpeg_settings->gps_timestamp_valid) { 2589 2590 char gpsDateStamp[20]; 2591 rat_t gpsTimeStamp[3]; 2592 rc = getExifGpsDateTimeStamp(gpsDateStamp, 20, gpsTimeStamp, 2593 jpeg_settings->gps_timestamp); 2594 if(rc == NO_ERROR) { 2595 exif->addEntry(EXIFTAGID_GPS_DATESTAMP, EXIF_ASCII, 2596 (uint32_t)(strlen(gpsDateStamp) + 1), 2597 (void *)gpsDateStamp); 2598 2599 exif->addEntry(EXIFTAGID_GPS_TIMESTAMP, 2600 EXIF_RATIONAL, 2601 3, 2602 (void *)gpsTimeStamp); 2603 } else { 2604 ALOGE("%s: getExifGpsDataTimeStamp failed", __func__); 2605 } 2606 } 2607 2608 IF_META_AVAILABLE(int32_t, exposure_comp, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) { 2609 IF_META_AVAILABLE(cam_rational_type_t, comp_step, CAM_INTF_PARM_EV_STEP, metadata) { 2610 srat_t exposure_val; 2611 rc = getExifExposureValue(&exposure_val, *exposure_comp, *comp_step); 2612 if(rc == NO_ERROR) { 2613 exif->addEntry(EXIFTAGID_EXPOSURE_BIAS_VALUE, 2614 EXIF_SRATIONAL, 2615 1, 2616 (void *)(&exposure_val)); 2617 } else { 2618 ALOGE("%s: getExifExposureValue failed ", __func__); 2619 } 2620 } 2621 } 2622 } else { 2623 ALOGE("%s: no metadata provided ", __func__); 2624 } 2625 2626 char value[PROPERTY_VALUE_MAX]; 2627 if (property_get("ro.product.manufacturer", value, "QCOM-AA") > 0) { 2628 exif->addEntry(EXIFTAGID_MAKE, EXIF_ASCII, 2629 (uint32_t)(strlen(value) + 1), (void *)value); 2630 } else { 2631 ALOGE("%s: getExifMaker failed", __func__); 2632 } 2633 2634 if (property_get("ro.product.model", value, "QCAM-AA") > 0) { 2635 exif->addEntry(EXIFTAGID_MODEL, EXIF_ASCII, 2636 (uint32_t)(strlen(value) + 1), (void *)value); 2637 } else { 2638 ALOGE("%s: getExifModel failed", __func__); 2639 } 2640 2641 if (property_get("ro.build.description", value, "QCAM-AA") > 0) { 2642 exif->addEntry(EXIFTAGID_SOFTWARE, EXIF_ASCII, 2643 (uint32_t)(strlen(value) + 1), (void *)value); 2644 } else { 2645 ALOGE("%s: getExifSoftware failed", __func__); 2646 } 2647 2648 return exif; 2649} 2650 2651void QCamera3PicChannel::overrideYuvSize(uint32_t width, uint32_t height) 2652{ 2653 mYuvWidth = width; 2654 mYuvHeight = height; 2655} 2656 2657/*=========================================================================== 2658 * FUNCTION : QCamera3ReprocessChannel 2659 * 2660 * DESCRIPTION: constructor of QCamera3ReprocessChannel 2661 * 2662 * PARAMETERS : 2663 * @cam_handle : camera handle 2664 * @cam_ops : ptr to camera ops table 2665 * @pp_mask : post-proccess feature mask 2666 * 2667 * RETURN : none 2668 *==========================================================================*/ 2669QCamera3ReprocessChannel::QCamera3ReprocessChannel(uint32_t cam_handle, 2670 mm_camera_ops_t *cam_ops, 2671 channel_cb_routine cb_routine, 2672 cam_padding_info_t *paddingInfo, 2673 uint32_t postprocess_mask, 2674 void *userData, void *ch_hdl) : 2675 QCamera3Channel(cam_handle, cam_ops, cb_routine, paddingInfo, postprocess_mask, 2676 userData, ((QCamera3PicChannel *)ch_hdl)->getNumBuffers()), 2677 picChHandle(ch_hdl), 2678 mOfflineBuffersIndex(-1), 2679 m_pSrcChannel(NULL), 2680 m_pMetaChannel(NULL), 2681 mMemory(NULL) 2682{ 2683 memset(mSrcStreamHandles, 0, sizeof(mSrcStreamHandles)); 2684 mOfflineMetaIndex = (int32_t) (mNumBuffers -1); 2685} 2686 2687 2688/*=========================================================================== 2689 * FUNCTION : QCamera3ReprocessChannel 2690 * 2691 * DESCRIPTION: constructor of QCamera3ReprocessChannel 2692 * 2693 * PARAMETERS : 2694 * @cam_handle : camera handle 2695 * @cam_ops : ptr to camera ops table 2696 * @pp_mask : post-proccess feature mask 2697 * 2698 * RETURN : none 2699 *==========================================================================*/ 2700int32_t QCamera3ReprocessChannel::initialize(cam_is_type_t isType) 2701{ 2702 int32_t rc = NO_ERROR; 2703 mm_camera_channel_attr_t attr; 2704 2705 memset(&attr, 0, sizeof(mm_camera_channel_attr_t)); 2706 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS; 2707 attr.max_unmatched_frames = 1; 2708 2709 rc = init(&attr, NULL); 2710 if (rc < 0) { 2711 ALOGE("%s: init failed", __func__); 2712 } 2713 mIsType = isType; 2714 return rc; 2715} 2716 2717 2718/*=========================================================================== 2719 * FUNCTION : QCamera3ReprocessChannel 2720 * 2721 * DESCRIPTION: constructor of QCamera3ReprocessChannel 2722 * 2723 * PARAMETERS : 2724 * @cam_handle : camera handle 2725 * @cam_ops : ptr to camera ops table 2726 * @pp_mask : post-proccess feature mask 2727 * 2728 * RETURN : none 2729 *==========================================================================*/ 2730void QCamera3ReprocessChannel::streamCbRoutine(mm_camera_super_buf_t *super_frame, 2731 QCamera3Stream *stream) 2732{ 2733 //Got the pproc data callback. Now send to jpeg encoding 2734 uint8_t frameIndex; 2735 mm_camera_super_buf_t* frame = NULL; 2736 QCamera3PicChannel *obj = (QCamera3PicChannel *)picChHandle; 2737 2738 if(!super_frame) { 2739 ALOGE("%s: Invalid Super buffer",__func__); 2740 return; 2741 } 2742 2743 if(super_frame->num_bufs != 1) { 2744 ALOGE("%s: Multiple streams are not supported",__func__); 2745 return; 2746 } 2747 if(super_frame->bufs[0] == NULL ) { 2748 ALOGE("%s: Error, Super buffer frame does not contain valid buffer", 2749 __func__); 2750 return; 2751 } 2752 2753 frameIndex = (uint8_t)super_frame->bufs[0]->buf_idx; 2754 frame = (mm_camera_super_buf_t *)malloc(sizeof(mm_camera_super_buf_t)); 2755 if (frame == NULL) { 2756 ALOGE("%s: Error allocating memory to save received_frame structure.", 2757 __func__); 2758 if(stream) { 2759 stream->bufDone(frameIndex); 2760 } 2761 return; 2762 } 2763 CDBG("%s: bufIndex: %u recvd from post proc", 2764 __func__, (uint32_t)frameIndex); 2765 *frame = *super_frame; 2766 if (mYUVDump) { 2767 cam_dimension_t dim; 2768 memset(&dim, 0, sizeof(dim)); 2769 stream->getFrameDimension(dim); 2770 cam_frame_len_offset_t offset; 2771 memset(&offset, 0, sizeof(cam_frame_len_offset_t)); 2772 stream->getFrameOffset(offset); 2773 dumpYUV(frame->bufs[0], dim, offset, 2); 2774 } 2775 obj->m_postprocessor.processPPData(frame); 2776 free(super_frame); 2777 return; 2778} 2779 2780/*=========================================================================== 2781 * FUNCTION : QCamera3ReprocessChannel 2782 * 2783 * DESCRIPTION: default constructor of QCamera3ReprocessChannel 2784 * 2785 * PARAMETERS : none 2786 * 2787 * RETURN : none 2788 *==========================================================================*/ 2789QCamera3ReprocessChannel::QCamera3ReprocessChannel() : 2790 m_pSrcChannel(NULL), 2791 m_pMetaChannel(NULL) 2792{ 2793} 2794 2795/*=========================================================================== 2796 * FUNCTION : getStreamBufs 2797 * 2798 * DESCRIPTION: register the buffers of the reprocess channel 2799 * 2800 * PARAMETERS : none 2801 * 2802 * RETURN : QCamera3Memory * 2803 *==========================================================================*/ 2804QCamera3Memory* QCamera3ReprocessChannel::getStreamBufs(uint32_t len) 2805{ 2806 int rc = 0; 2807 2808 mMemory = new QCamera3HeapMemory(); 2809 if (!mMemory) { 2810 ALOGE("%s: unable to create reproc memory", __func__); 2811 return NULL; 2812 } 2813 2814 rc = mMemory->allocate(mNumBuffers, len, true); 2815 if (rc < 0) { 2816 ALOGE("%s: unable to allocate reproc memory", __func__); 2817 delete mMemory; 2818 mMemory = NULL; 2819 return NULL; 2820 } 2821 return mMemory; 2822} 2823 2824/*=========================================================================== 2825 * FUNCTION : getStreamBufs 2826 * 2827 * DESCRIPTION: register the buffers of the reprocess channel 2828 * 2829 * PARAMETERS : none 2830 * 2831 * RETURN : 2832 *==========================================================================*/ 2833void QCamera3ReprocessChannel::putStreamBufs() 2834{ 2835 mMemory->deallocate(); 2836 delete mMemory; 2837 mMemory = NULL; 2838} 2839 2840/*=========================================================================== 2841 * FUNCTION : ~QCamera3ReprocessChannel 2842 * 2843 * DESCRIPTION: destructor of QCamera3ReprocessChannel 2844 * 2845 * PARAMETERS : none 2846 * 2847 * RETURN : none 2848 *==========================================================================*/ 2849QCamera3ReprocessChannel::~QCamera3ReprocessChannel() 2850{ 2851} 2852 2853/*=========================================================================== 2854 * FUNCTION : getStreamBySrcHandle 2855 * 2856 * DESCRIPTION: find reprocess stream by its source stream handle 2857 * 2858 * PARAMETERS : 2859 * @srcHandle : source stream handle 2860 * 2861 * RETURN : ptr to reprocess stream if found. NULL if not found 2862 *==========================================================================*/ 2863QCamera3Stream * QCamera3ReprocessChannel::getStreamBySrcHandle(uint32_t srcHandle) 2864{ 2865 QCamera3Stream *pStream = NULL; 2866 2867 for (uint32_t i = 0; i < m_numStreams; i++) { 2868 if (mSrcStreamHandles[i] == srcHandle) { 2869 pStream = mStreams[i]; 2870 break; 2871 } 2872 } 2873 return pStream; 2874} 2875 2876/*=========================================================================== 2877 * FUNCTION : getSrcStreamBySrcHandle 2878 * 2879 * DESCRIPTION: find source stream by source stream handle 2880 * 2881 * PARAMETERS : 2882 * @srcHandle : source stream handle 2883 * 2884 * RETURN : ptr to reprocess stream if found. NULL if not found 2885 *==========================================================================*/ 2886QCamera3Stream * QCamera3ReprocessChannel::getSrcStreamBySrcHandle(uint32_t srcHandle) 2887{ 2888 QCamera3Stream *pStream = NULL; 2889 2890 if (NULL == m_pSrcChannel) { 2891 return NULL; 2892 } 2893 2894 for (uint32_t i = 0; i < m_numStreams; i++) { 2895 if (mSrcStreamHandles[i] == srcHandle) { 2896 pStream = m_pSrcChannel->getStreamByIndex(i); 2897 break; 2898 } 2899 } 2900 return pStream; 2901} 2902 2903/*=========================================================================== 2904 * FUNCTION : stop 2905 * 2906 * DESCRIPTION: stop channel 2907 * 2908 * PARAMETERS : none 2909 * 2910 * RETURN : int32_t type of status 2911 * NO_ERROR -- success 2912 * none-zero failure code 2913 *==========================================================================*/ 2914int32_t QCamera3ReprocessChannel::stop() 2915{ 2916 unmapOfflineBuffers(true); 2917 2918 return QCamera3Channel::stop(); 2919} 2920 2921/*=========================================================================== 2922 * FUNCTION : unmapOfflineBuffers 2923 * 2924 * DESCRIPTION: Unmaps offline buffers 2925 * 2926 * PARAMETERS : none 2927 * 2928 * RETURN : int32_t type of status 2929 * NO_ERROR -- success 2930 * none-zero failure code 2931 *==========================================================================*/ 2932int32_t QCamera3ReprocessChannel::unmapOfflineBuffers(bool all) 2933{ 2934 int rc = NO_ERROR; 2935 if (!mOfflineBuffers.empty()) { 2936 QCamera3Stream *stream = NULL; 2937 List<OfflineBuffer>::iterator it = mOfflineBuffers.begin(); 2938 for (; it != mOfflineBuffers.end(); it++) { 2939 stream = (*it).stream; 2940 if (NULL != stream) { 2941 rc = stream->unmapBuf((*it).type, 2942 (*it).index, 2943 -1); 2944 if (NO_ERROR != rc) { 2945 ALOGE("%s: Error during offline buffer unmap %d", 2946 __func__, rc); 2947 } 2948 CDBG("%s: Unmapped buffer with index %d", __func__, (*it).index); 2949 } 2950 if (!all) { 2951 mOfflineBuffers.erase(it); 2952 break; 2953 } 2954 } 2955 if (all) { 2956 mOfflineBuffers.clear(); 2957 } 2958 } 2959 2960 if (!mOfflineMetaBuffers.empty()) { 2961 QCamera3Stream *stream = NULL; 2962 List<OfflineBuffer>::iterator it = mOfflineMetaBuffers.begin(); 2963 for (; it != mOfflineMetaBuffers.end(); it++) { 2964 stream = (*it).stream; 2965 if (NULL != stream) { 2966 rc = stream->unmapBuf((*it).type, 2967 (*it).index, 2968 -1); 2969 if (NO_ERROR != rc) { 2970 ALOGE("%s: Error during offline buffer unmap %d", 2971 __func__, rc); 2972 } 2973 CDBG("%s: Unmapped meta buffer with index %d", __func__, (*it).index); 2974 } 2975 if (!all) { 2976 mOfflineMetaBuffers.erase(it); 2977 break; 2978 } 2979 } 2980 if (all) { 2981 mOfflineMetaBuffers.clear(); 2982 } 2983 } 2984 return rc; 2985} 2986 2987 2988/*=========================================================================== 2989 * FUNCTION : extractFrameAndRotation 2990 * 2991 * DESCRIPTION: Extract output rotation and frame data if present 2992 * 2993 * PARAMETERS : 2994 * @frame : input frame from source stream 2995 * meta_buffer: metadata buffer 2996 * @metadata : corresponding metadata 2997 * @fwk_frame : 2998 * 2999 * RETURN : int32_t type of status 3000 * NO_ERROR -- success 3001 * none-zero failure code 3002 *==========================================================================*/ 3003int32_t QCamera3ReprocessChannel::extractFrameCropAndRotation(mm_camera_super_buf_t *frame, 3004 mm_camera_buf_def_t *meta_buffer, jpeg_settings_t *jpeg_settings, 3005 qcamera_fwk_input_pp_data_t &fwk_frame) 3006{ 3007 int32_t rc = NO_ERROR; 3008 QCamera3HardwareInterface* hal_obj = (QCamera3HardwareInterface*)mUserData; 3009 if ((NULL == meta_buffer) || (NULL == frame) || (NULL == jpeg_settings) || 3010 (NULL == hal_obj)) { 3011 return BAD_VALUE; 3012 } 3013 3014 metadata_buffer_t *meta = (metadata_buffer_t *)meta_buffer->buffer; 3015 if (NULL == meta) { 3016 return BAD_VALUE; 3017 } 3018 3019 for (uint32_t i = 0; i < frame->num_bufs; i++) { 3020 QCamera3Stream *pStream = getStreamBySrcHandle(frame->bufs[i]->stream_id); 3021 QCamera3Stream *pSrcStream = getSrcStreamBySrcHandle(frame->bufs[i]->stream_id); 3022 3023 if (pStream != NULL && pSrcStream != NULL) { 3024 // Find rotation info for reprocess stream 3025 cam_rotation_info_t rotation_info; 3026 memset(&rotation_info, 0, sizeof(rotation_info)); 3027 if (jpeg_settings->jpeg_orientation == 0) { 3028 rotation_info.rotation = ROTATE_0; 3029 } else if (jpeg_settings->jpeg_orientation == 90) { 3030 rotation_info.rotation = ROTATE_90; 3031 } else if (jpeg_settings->jpeg_orientation == 180) { 3032 rotation_info.rotation = ROTATE_180; 3033 } else if (jpeg_settings->jpeg_orientation == 270) { 3034 rotation_info.rotation = ROTATE_270; 3035 } 3036 rotation_info.streamId = mStreams[0]->getMyServerID(); 3037 ADD_SET_PARAM_ENTRY_TO_BATCH(meta, CAM_INTF_PARM_ROTATION, rotation_info); 3038 3039 // Find and insert crop info for reprocess stream 3040 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, meta) { 3041 if (MAX_NUM_STREAMS > crop_data->num_of_streams) { 3042 for (int j = 0; j < crop_data->num_of_streams; j++) { 3043 if (crop_data->crop_info[j].stream_id == 3044 pSrcStream->getMyServerID()) { 3045 3046 // Store crop/roi information for offline reprocess 3047 // in the reprocess stream slot 3048 crop_data->crop_info[crop_data->num_of_streams].crop = 3049 crop_data->crop_info[j].crop; 3050 crop_data->crop_info[crop_data->num_of_streams].roi_map = 3051 crop_data->crop_info[j].roi_map; 3052 crop_data->crop_info[crop_data->num_of_streams].stream_id = 3053 mStreams[0]->getMyServerID(); 3054 crop_data->num_of_streams++; 3055 3056 CDBG("%s: Reprocess stream server id: %d", 3057 __func__, mStreams[0]->getMyServerID()); 3058 CDBG("%s: Found offline reprocess crop %dx%d %dx%d", 3059 __func__, 3060 crop_data->crop_info[j].crop.left, 3061 crop_data->crop_info[j].crop.top, 3062 crop_data->crop_info[j].crop.width, 3063 crop_data->crop_info[j].crop.height); 3064 CDBG("%s: Found offline reprocess roimap %dx%d %dx%d", 3065 __func__, 3066 crop_data->crop_info[j].roi_map.left, 3067 crop_data->crop_info[j].roi_map.top, 3068 crop_data->crop_info[j].roi_map.width, 3069 crop_data->crop_info[j].roi_map.height); 3070 3071 break; 3072 } 3073 } 3074 } else { 3075 ALOGE("%s: No space to add reprocess stream crop/roi information", 3076 __func__); 3077 } 3078 } 3079 3080 fwk_frame.input_buffer = *frame->bufs[i]; 3081 fwk_frame.metadata_buffer = *meta_buffer; 3082 break; 3083 } else { 3084 ALOGE("%s: Source/Re-process streams are invalid", __func__); 3085 rc |= BAD_VALUE; 3086 } 3087 } 3088 3089 return rc; 3090} 3091 3092/*=========================================================================== 3093* FUNCTION : extractCrop 3094* 3095* DESCRIPTION: Extract framework output crop if present 3096* 3097* PARAMETERS : 3098* @frame : input frame for reprocessing 3099* 3100* RETURN : int32_t type of status 3101* NO_ERROR -- success 3102* none-zero failure code 3103*==========================================================================*/ 3104int32_t QCamera3ReprocessChannel::extractCrop(qcamera_fwk_input_pp_data_t *frame) 3105{ 3106 if (NULL == frame) { 3107 ALOGE("%s: Incorrect input frame", __func__); 3108 return BAD_VALUE; 3109 } 3110 3111 3112 if (NULL == frame->metadata_buffer.buffer) { 3113 ALOGE("%s: No metadata available", __func__); 3114 return BAD_VALUE; 3115 } 3116 3117 // Find and insert crop info for reprocess stream 3118 metadata_buffer_t *meta = (metadata_buffer_t *) frame->metadata_buffer.buffer; 3119 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, meta) { 3120 if (1 == crop_data->num_of_streams) { 3121 // Store crop/roi information for offline reprocess 3122 // in the reprocess stream slot 3123 crop_data->crop_info[crop_data->num_of_streams].crop = 3124 crop_data->crop_info[0].crop; 3125 crop_data->crop_info[crop_data->num_of_streams].roi_map = 3126 crop_data->crop_info[0].roi_map; 3127 crop_data->crop_info[crop_data->num_of_streams].stream_id = 3128 mStreams[0]->getMyServerID(); 3129 crop_data->num_of_streams++; 3130 3131 CDBG("%s: Reprocess stream server id: %d", 3132 __func__, mStreams[0]->getMyServerID()); 3133 CDBG("%s: Found offline reprocess crop %dx%d %dx%d", __func__, 3134 crop_data->crop_info[0].crop.left, 3135 crop_data->crop_info[0].crop.top, 3136 crop_data->crop_info[0].crop.width, 3137 crop_data->crop_info[0].crop.height); 3138 CDBG("%s: Found offline reprocess roi map %dx%d %dx%d", __func__, 3139 crop_data->crop_info[0].roi_map.left, 3140 crop_data->crop_info[0].roi_map.top, 3141 crop_data->crop_info[0].roi_map.width, 3142 crop_data->crop_info[0].roi_map.height); 3143 } else { 3144 ALOGE("%s: Incorrect number of offline crop data entries %d", 3145 __func__, 3146 crop_data->num_of_streams); 3147 return BAD_VALUE; 3148 } 3149 } else { 3150 CDBG_HIGH("%s: Crop data not present", __func__); 3151 } 3152 3153 return NO_ERROR; 3154} 3155 3156/*=========================================================================== 3157 * FUNCTION : doReprocessOffline 3158 * 3159 * DESCRIPTION: request to do a reprocess on the frame 3160 * 3161 * PARAMETERS : 3162 * @frame : input frame for reprocessing 3163 * 3164 * RETURN : int32_t type of status 3165 * NO_ERROR -- success 3166 * none-zero failure code 3167 *==========================================================================*/ 3168 int32_t QCamera3ReprocessChannel::doReprocessOffline(qcamera_fwk_input_pp_data_t *frame) 3169{ 3170 int32_t rc = 0; 3171 OfflineBuffer mappedBuffer; 3172 3173 if (m_numStreams < 1) { 3174 ALOGE("%s: No reprocess stream is created", __func__); 3175 return -1; 3176 } 3177 3178 if (NULL == frame) { 3179 ALOGE("%s: Incorrect input frame", __func__); 3180 return BAD_VALUE; 3181 } 3182 3183 if (NULL == frame->metadata_buffer.buffer) { 3184 ALOGE("%s: No metadata available", __func__); 3185 return BAD_VALUE; 3186 } 3187 3188 if (NULL == frame->input_buffer.buffer) { 3189 ALOGE("%s: No input buffer available", __func__); 3190 return BAD_VALUE; 3191 } 3192 3193 if ((0 == m_numStreams) || (NULL == mStreams[0])) { 3194 ALOGE("%s: Reprocess stream not initialized!", __func__); 3195 return NO_INIT; 3196 } 3197 3198 QCamera3Stream *pStream = mStreams[0]; 3199 int32_t max_idx = (int32_t) (mNumBuffers - 1); 3200 //loop back the indices if max burst count reached 3201 if (mOfflineBuffersIndex == max_idx) { 3202 mOfflineBuffersIndex = -1; 3203 } 3204 uint32_t buf_idx = (uint32_t)(mOfflineBuffersIndex + 1); 3205 rc = pStream->mapBuf( 3206 CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, 3207 buf_idx, -1, 3208 frame->input_buffer.fd, frame->input_buffer.frame_len); 3209 if (NO_ERROR == rc) { 3210 mappedBuffer.index = buf_idx; 3211 mappedBuffer.stream = pStream; 3212 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF; 3213 mOfflineBuffers.push_back(mappedBuffer); 3214 mOfflineBuffersIndex = (int32_t)buf_idx; 3215 CDBG("%s: Mapped buffer with index %d", __func__, mOfflineBuffersIndex); 3216 } 3217 3218 max_idx = (int32_t) ((mNumBuffers * 2) - 1); 3219 //loop back the indices if max burst count reached 3220 if (mOfflineMetaIndex == max_idx) { 3221 mOfflineMetaIndex = (int32_t) (mNumBuffers - 1); 3222 } 3223 uint32_t meta_buf_idx = (uint32_t)(mOfflineMetaIndex + 1); 3224 rc |= pStream->mapBuf( 3225 CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF, 3226 meta_buf_idx, -1, 3227 frame->metadata_buffer.fd, frame->metadata_buffer.frame_len); 3228 if (NO_ERROR == rc) { 3229 mappedBuffer.index = meta_buf_idx; 3230 mappedBuffer.stream = pStream; 3231 mappedBuffer.type = CAM_MAPPING_BUF_TYPE_OFFLINE_META_BUF; 3232 mOfflineMetaBuffers.push_back(mappedBuffer); 3233 mOfflineMetaIndex = (int32_t)meta_buf_idx; 3234 CDBG("%s: Mapped meta buffer with index %d", __func__, mOfflineMetaIndex); 3235 } 3236 3237 if (rc == NO_ERROR) { 3238 cam_stream_parm_buffer_t param; 3239 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t)); 3240 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS; 3241 param.reprocess.buf_index = buf_idx; 3242 param.reprocess.frame_idx = frame->input_buffer.frame_idx; 3243 param.reprocess.meta_present = 1; 3244 param.reprocess.meta_buf_index = meta_buf_idx; 3245 rc = pStream->setParameter(param); 3246 if (rc != NO_ERROR) { 3247 ALOGE("%s: stream setParameter for reprocess failed", __func__); 3248 } 3249 } else { 3250 ALOGE("%s: Input buffer memory map failed: %d", __func__, rc); 3251 } 3252 3253 return rc; 3254} 3255 3256/*=========================================================================== 3257 * FUNCTION : doReprocess 3258 * 3259 * DESCRIPTION: request to do a reprocess on the frame 3260 * 3261 * PARAMETERS : 3262 * @buf_fd : fd to the input buffer that needs reprocess 3263 * @buf_lenght : length of the input buffer 3264 * @ret_val : result of reprocess. 3265 * Example: Could be faceID in case of register face image. 3266 * @meta_frame : metadata frame. 3267 * 3268 * RETURN : int32_t type of status 3269 * NO_ERROR -- success 3270 * none-zero failure code 3271 *==========================================================================*/ 3272int32_t QCamera3ReprocessChannel::doReprocess(int buf_fd, size_t buf_length, 3273 int32_t &ret_val, mm_camera_super_buf_t *meta_frame) 3274{ 3275 int32_t rc = 0; 3276 if (m_numStreams < 1) { 3277 ALOGE("%s: No reprocess stream is created", __func__); 3278 return -1; 3279 } 3280 if (meta_frame == NULL) { 3281 ALOGE("%s: Did not get corresponding metadata in time", __func__); 3282 return -1; 3283 } 3284 3285 uint8_t buf_idx = 0; 3286 for (uint32_t i = 0; i < m_numStreams; i++) { 3287 rc = mStreams[i]->mapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, 3288 buf_idx, -1, 3289 buf_fd, buf_length); 3290 3291 if (rc == NO_ERROR) { 3292 cam_stream_parm_buffer_t param; 3293 memset(¶m, 0, sizeof(cam_stream_parm_buffer_t)); 3294 param.type = CAM_STREAM_PARAM_TYPE_DO_REPROCESS; 3295 param.reprocess.buf_index = buf_idx; 3296 param.reprocess.meta_present = 1; 3297 param.reprocess.meta_stream_handle = m_pMetaChannel->mStreams[0]->getMyServerID(); 3298 param.reprocess.meta_buf_index = meta_frame->bufs[0]->buf_idx; 3299 rc = mStreams[i]->setParameter(param); 3300 if (rc == NO_ERROR) { 3301 ret_val = param.reprocess.ret_val; 3302 } 3303 mStreams[i]->unmapBuf(CAM_MAPPING_BUF_TYPE_OFFLINE_INPUT_BUF, 3304 buf_idx, -1); 3305 } 3306 } 3307 return rc; 3308} 3309 3310/*=========================================================================== 3311 * FUNCTION : addReprocStreamsFromSource 3312 * 3313 * DESCRIPTION: add reprocess streams from input source channel 3314 * 3315 * PARAMETERS : 3316 * @config : pp feature configuration 3317 * @src_config : source reprocess configuration 3318 * @isType : type of image stabilization required on this stream 3319 * @pMetaChannel : ptr to metadata channel to get corresp. metadata 3320 * 3321 * 3322 * RETURN : int32_t type of status 3323 * NO_ERROR -- success 3324 * none-zero failure code 3325 *==========================================================================*/ 3326int32_t QCamera3ReprocessChannel::addReprocStreamsFromSource(cam_pp_feature_config_t &pp_config, 3327 const reprocess_config_t &src_config , cam_is_type_t is_type, 3328 QCamera3Channel *pMetaChannel) 3329{ 3330 int32_t rc = 0; 3331 cam_stream_reproc_config_t reprocess_config; 3332 cam_stream_type_t streamType; 3333 3334 cam_dimension_t streamDim = src_config.output_stream_dim; 3335 3336 if (NULL != src_config.src_channel) { 3337 QCamera3Stream *pSrcStream = src_config.src_channel->getStreamByIndex(0); 3338 if (pSrcStream == NULL) { 3339 ALOGE("%s: source channel doesn't have a stream", __func__); 3340 return BAD_VALUE; 3341 } 3342 mSrcStreamHandles[m_numStreams] = pSrcStream->getMyHandle(); 3343 } 3344 3345 streamType = CAM_STREAM_TYPE_OFFLINE_PROC; 3346 reprocess_config.pp_type = CAM_OFFLINE_REPROCESS_TYPE; 3347 3348 reprocess_config.offline.input_fmt = src_config.stream_format; 3349 reprocess_config.offline.input_dim = src_config.input_stream_dim; 3350 reprocess_config.offline.input_buf_planes.plane_info = 3351 src_config.input_stream_plane_info.plane_info; 3352 reprocess_config.offline.num_of_bufs = (uint8_t)mNumBuffers; 3353 reprocess_config.offline.input_type = src_config.stream_type; 3354 3355 reprocess_config.pp_feature_config = pp_config; 3356 QCamera3Stream *pStream = new QCamera3Stream(m_camHandle, 3357 m_handle, 3358 m_camOps, 3359 mPaddingInfo, 3360 (QCamera3Channel*)this); 3361 if (pStream == NULL) { 3362 ALOGE("%s: No mem for Stream", __func__); 3363 return NO_MEMORY; 3364 } 3365 3366 rc = pStream->init(streamType, src_config.stream_format, 3367 streamDim, &reprocess_config, 3368 (uint8_t)mNumBuffers, 3369 reprocess_config.pp_feature_config.feature_mask, 3370 is_type, 3371 QCamera3Channel::streamCbRoutine, this); 3372 3373 if (rc == 0) { 3374 mStreams[m_numStreams] = pStream; 3375 m_numStreams++; 3376 } else { 3377 ALOGE("%s: failed to create reprocess stream", __func__); 3378 delete pStream; 3379 } 3380 3381 if (rc == NO_ERROR) { 3382 m_pSrcChannel = src_config.src_channel; 3383 m_pMetaChannel = pMetaChannel; 3384 } 3385 if(m_camOps->request_super_buf(m_camHandle,m_handle,1,0) < 0) { 3386 ALOGE("%s: Request for super buffer failed",__func__); 3387 } 3388 return rc; 3389} 3390 3391cam_dimension_t QCamera3SupportChannel::kDim = {640, 480}; 3392 3393QCamera3SupportChannel::QCamera3SupportChannel(uint32_t cam_handle, 3394 mm_camera_ops_t *cam_ops, 3395 cam_padding_info_t *paddingInfo, 3396 uint32_t postprocess_mask, 3397 cam_stream_type_t streamType, 3398 cam_dimension_t *dim, 3399 void *userData, uint32_t numBuffers) : 3400 QCamera3Channel(cam_handle, cam_ops, 3401 NULL, paddingInfo, postprocess_mask, 3402 userData, numBuffers), 3403 mMemory(NULL) 3404{ 3405 memcpy(&mDim, dim, sizeof(cam_dimension_t)); 3406 mStreamType = streamType; 3407} 3408 3409QCamera3SupportChannel::~QCamera3SupportChannel() 3410{ 3411 if (m_bIsActive) 3412 stop(); 3413 3414 if (mMemory) { 3415 mMemory->deallocate(); 3416 delete mMemory; 3417 mMemory = NULL; 3418 } 3419} 3420 3421int32_t QCamera3SupportChannel::initialize(cam_is_type_t isType) 3422{ 3423 int32_t rc; 3424 3425 if (mMemory || m_numStreams > 0) { 3426 ALOGE("%s: metadata channel already initialized", __func__); 3427 return -EINVAL; 3428 } 3429 3430 rc = init(NULL, NULL); 3431 if (rc < 0) { 3432 ALOGE("%s: init failed", __func__); 3433 return rc; 3434 } 3435 mIsType = isType; 3436 rc = QCamera3Channel::addStream(mStreamType, 3437 CAM_FORMAT_YUV_420_NV21, mDim, MIN_STREAMING_BUFFER_NUM, 3438 mPostProcMask, mIsType); 3439 if (rc < 0) { 3440 ALOGE("%s: addStream failed", __func__); 3441 } 3442 return rc; 3443} 3444 3445int32_t QCamera3SupportChannel::request(buffer_handle_t * /*buffer*/, 3446 uint32_t /*frameNumber*/) 3447{ 3448 return NO_ERROR; 3449} 3450 3451void QCamera3SupportChannel::streamCbRoutine( 3452 mm_camera_super_buf_t *super_frame, 3453 QCamera3Stream * /*stream*/) 3454{ 3455 if (super_frame == NULL || super_frame->num_bufs != 1) { 3456 ALOGE("%s: super_frame is not valid", __func__); 3457 return; 3458 } 3459 bufDone(super_frame); 3460 free(super_frame); 3461} 3462 3463QCamera3Memory* QCamera3SupportChannel::getStreamBufs(uint32_t len) 3464{ 3465 int rc; 3466 3467 mMemory = new QCamera3HeapMemory(); 3468 if (!mMemory) { 3469 ALOGE("%s: unable to create heap memory", __func__); 3470 return NULL; 3471 } 3472 rc = mMemory->allocate(MIN_STREAMING_BUFFER_NUM, len, true); 3473 if (rc < 0) { 3474 ALOGE("%s: unable to allocate heap memory", __func__); 3475 delete mMemory; 3476 mMemory = NULL; 3477 return NULL; 3478 } 3479 return mMemory; 3480} 3481 3482void QCamera3SupportChannel::putStreamBufs() 3483{ 3484 mMemory->deallocate(); 3485 delete mMemory; 3486 mMemory = NULL; 3487} 3488 3489}; // namespace qcamera 3490