QCamera3HWI.cpp revision acddfe2d2fd672d8dbf21caedf4e0c64571b049d
14e180b6a0b4720a9b8e9e959a882386f690f08ffTorne (Richard Coles)/* Copyright (c) 2012-2013, The Linux Foundataion. All rights reserved. 25821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* 35821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* Redistribution and use in source and binary forms, with or without 45821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* modification, are permitted provided that the following conditions are 54e180b6a0b4720a9b8e9e959a882386f690f08ffTorne (Richard Coles)* met: 64e180b6a0b4720a9b8e9e959a882386f690f08ffTorne (Richard Coles)* * Redistributions of source code must retain the above copyright 75821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* notice, this list of conditions and the following disclaimer. 85821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* * Redistributions in binary form must reproduce the above 95821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* copyright notice, this list of conditions and the following 105821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* disclaimer in the documentation and/or other materials provided 115821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* with the distribution. 125821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* * Neither the name of The Linux Foundation nor the names of its 132a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)* contributors may be used to endorse or promote products derived 145821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* from this software without specific prior written permission. 15c2e0dbddbe15c98d52c4786dac06cb8952a8ae6dTorne (Richard Coles)* 164e180b6a0b4720a9b8e9e959a882386f690f08ffTorne (Richard Coles)* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED 175821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF 185821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT 195821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS 205821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 215821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 225821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR 235821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 245821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE 25c2e0dbddbe15c98d52c4786dac06cb8952a8ae6dTorne (Richard Coles)* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN 265821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 275821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)* 285821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)*/ 295821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 305821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#define LOG_TAG "QCamera3HWI" 315821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 325821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#include <cutils/properties.h> 335821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#include <hardware/camera3.h> 345821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#include <camera/CameraMetadata.h> 355821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#include <stdlib.h> 365821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#include <utils/Log.h> 375821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#include <utils/Errors.h> 385821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#include <ui/Fence.h> 39868fa2fe829687343ffae624259930155e16dbd8Torne (Richard Coles)#include <gralloc_priv.h> 402a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)#include "QCamera3HWI.h" 415821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#include "QCamera3Mem.h" 42a3f6a49ab37290eeeb8db0f41ec0f1cb74a68be7Torne (Richard Coles)#include "QCamera3Channel.h" 435821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#include "QCamera3PostProc.h" 445821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 455821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)using namespace android; 46c2e0dbddbe15c98d52c4786dac06cb8952a8ae6dTorne (Richard Coles) 475821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)namespace qcamera { 482a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 495821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS]; 505821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)parm_buffer_t *prevSettings; 515821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS]; 525821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 5358537e28ecd584eab876aee8be7156509866d23aTorne (Richard Coles)pthread_mutex_t QCamera3HardwareInterface::mCameraSessionLock = 545821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) PTHREAD_MUTEX_INITIALIZER; 554e180b6a0b4720a9b8e9e959a882386f690f08ffTorne (Richard Coles)unsigned int QCamera3HardwareInterface::mCameraSessionActive = 0; 56a3f6a49ab37290eeeb8db0f41ec0f1cb74a68be7Torne (Richard Coles) 575821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::EFFECT_MODES_MAP[] = { 585821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF }, 595821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO }, 605821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE }, 61c2e0dbddbe15c98d52c4786dac06cb8952a8ae6dTorne (Richard Coles) { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE }, 625821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA }, 635821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE }, 645821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD }, 65eb525c5499e34cc9c4b825d6d9e75bb07cc06aceBen Murdoch { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD }, 662a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles) { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA } 675821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)}; 685821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 692a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = { 705821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF }, 713551c9c881056c480085172ff9840cab31610854Torne (Richard Coles) { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO }, 722a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles) { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT }, 735821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT }, 7458537e28ecd584eab876aee8be7156509866d23aTorne (Richard Coles) { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT}, 755821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT }, 762a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles) { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT }, 7758e6fbe4ee35d65e14b626c557d37565bf8ad179Ben Murdoch { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT }, 78a36e5920737c6adbddd3e43b760e5de8431db6e0Torne (Richard Coles) { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE } 795821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)}; 80c2e0dbddbe15c98d52c4786dac06cb8952a8ae6dTorne (Richard Coles) 815821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::SCENE_MODES_MAP[] = { 827d4cd473f85ac64c3747c96c277f9e506a0d2246Torne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION }, 835821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT }, 845821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE }, 855821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT }, 865821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT }, 87424c4d7b64af9d0d8fd9624f381f469654d5e3d2Torne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE }, 88f2477e01787aa58f445919b809d89e252beef54fTorne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH }, 897dbb3d5cf0c15f500944d211057644d6a2f37371Ben Murdoch { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW }, 905821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET }, 9158537e28ecd584eab876aee8be7156509866d23aTorne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE }, 925821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS }, 932a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS }, 94f2477e01787aa58f445919b809d89e252beef54fTorne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY }, 955821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT }, 965821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE} 97f2477e01787aa58f445919b809d89e252beef54fTorne (Richard Coles)}; 98c2e0dbddbe15c98d52c4786dac06cb8952a8ae6dTorne (Richard Coles) 9990dce4d38c5ff5333bea97d859d4e484e27edf0cTorne (Richard Coles)const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FOCUS_MODES_MAP[] = { 10058537e28ecd584eab876aee8be7156509866d23aTorne (Richard Coles) { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED }, 1015821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO }, 102a36e5920737c6adbddd3e43b760e5de8431db6e0Torne (Richard Coles) { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO }, 1035821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF }, 1045821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE }, 105c2e0dbddbe15c98d52c4786dac06cb8952a8ae6dTorne (Richard Coles) { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO } 106bb1529ce867d8845a77ec7cdf3e3003ef1771a40Ben Murdoch}; 1075821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 1085821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = { 1095821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF }, 1105821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ }, 1115821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ }, 11258537e28ecd584eab876aee8be7156509866d23aTorne (Richard Coles) { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO } 113868fa2fe829687343ffae624259930155e16dbd8Torne (Richard Coles)}; 1142a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles) 1152a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = { 1165821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF }, 117f2477e01787aa58f445919b809d89e252beef54fTorne (Richard Coles) { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF }, 1185821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO}, 1192a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles) { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON }, 1205821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO} 1212a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)}; 1227d4cd473f85ac64c3747c96c277f9e506a0d2246Torne (Richard Coles) 1234e180b6a0b4720a9b8e9e959a882386f690f08ffTorne (Richard Coles)const QCamera3HardwareInterface::QCameraMap QCamera3HardwareInterface::FLASH_MODES_MAP[] = { 1245821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF }, 1253551c9c881056c480085172ff9840cab31610854Torne (Richard Coles) { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE }, 1265821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH } 1275821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)}; 128d0247b1b59f9c528cb6df88b4f2b9afaf80d181eTorne (Richard Coles) 1295821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)const int32_t available_thumbnail_sizes[] = {512, 288, 480, 288, 256, 154, 432, 288, 130f2477e01787aa58f445919b809d89e252beef54fTorne (Richard Coles) 320, 240, 176, 144, 0, 0}; 1315821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 1323551c9c881056c480085172ff9840cab31610854Torne (Richard Coles)camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = { 13358537e28ecd584eab876aee8be7156509866d23aTorne (Richard Coles) initialize: QCamera3HardwareInterface::initialize, 1345821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) configure_streams: QCamera3HardwareInterface::configure_streams, 135f2477e01787aa58f445919b809d89e252beef54fTorne (Richard Coles) register_stream_buffers: QCamera3HardwareInterface::register_stream_buffers, 1365821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) construct_default_request_settings: QCamera3HardwareInterface::construct_default_request_settings, 1372a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles) process_capture_request: QCamera3HardwareInterface::process_capture_request, 1385821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) get_metadata_vendor_tag_ops: QCamera3HardwareInterface::get_metadata_vendor_tag_ops, 1395821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) dump: QCamera3HardwareInterface::dump, 1402a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles)}; 141ca12bfac764ba476d6cd062bf1dde12cc64c3f40Ben Murdoch 142558790d6acca3451cf3a6b497803a5f07d0bec58Ben Murdoch 1435821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)/*=========================================================================== 1445821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * FUNCTION : QCamera3HardwareInterface 145d0247b1b59f9c528cb6df88b4f2b9afaf80d181eTorne (Richard Coles) * 1465821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * DESCRIPTION: constructor of QCamera3HardwareInterface 1475821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 1485821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * PARAMETERS : 1495821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * @cameraId : camera ID 1505821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 1515821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * RETURN : none 1522a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles) *==========================================================================*/ 1535821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)QCamera3HardwareInterface::QCamera3HardwareInterface(int cameraId) 15458537e28ecd584eab876aee8be7156509866d23aTorne (Richard Coles) : mCameraId(cameraId), 155d0247b1b59f9c528cb6df88b4f2b9afaf80d181eTorne (Richard Coles) mCameraHandle(NULL), 1565821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mCameraOpened(false), 157868fa2fe829687343ffae624259930155e16dbd8Torne (Richard Coles) mCameraInitialized(false), 1585821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mCallbackOps(NULL), 1595821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mInputStream(NULL), 1605821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mMetadataChannel(NULL), 1615821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mPictureChannel(NULL), 1621e9bf3e0803691d0a228da41fc608347b6db4340Torne (Richard Coles) mFirstRequest(false), 16368043e1e95eeb07d5cae7aca370b26518b0867d6Torne (Richard Coles) mParamHeap(NULL), 1645821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mParameters(NULL), 1655821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mJpegSettings(NULL), 166f2477e01787aa58f445919b809d89e252beef54fTorne (Richard Coles) mIsZslMode(false), 1679ab5563a3196760eb381d102cbb2bc0f7abc6a50Ben Murdoch m_pPowerModule(NULL) 168a3f7b4e666c476898878fa745f637129375cd889Ben Murdoch{ 169f2477e01787aa58f445919b809d89e252beef54fTorne (Richard Coles) mCameraDevice.common.tag = HARDWARE_DEVICE_TAG; 170bb1529ce867d8845a77ec7cdf3e3003ef1771a40Ben Murdoch mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_0; 171bb1529ce867d8845a77ec7cdf3e3003ef1771a40Ben Murdoch mCameraDevice.common.close = close_camera_device; 172a3f6a49ab37290eeeb8db0f41ec0f1cb74a68be7Torne (Richard Coles) mCameraDevice.ops = &mCameraOps; 1735821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mCameraDevice.priv = this; 1745821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) gCamCapability[cameraId]->version = CAM_HAL_V3; 1755821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) // TODO: hardcode for now until mctl add support for min_num_pp_bufs 1765821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3 1775821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) gCamCapability[cameraId]->min_num_pp_bufs = 3; 1785821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 1795821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) pthread_cond_init(&mRequestCond, NULL); 1805821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mPendingRequest = 0; 1815821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mCurrentRequestId = -1; 1825821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) pthread_mutex_init(&mMutex, NULL); 1835821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 1845821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 1855821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mDefaultMetadata[i] = NULL; 1865821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 1875821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#ifdef HAS_MULTIMEDIA_HINTS 1885821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) if (hw_get_module(POWER_HARDWARE_MODULE_ID, (const hw_module_t **)&m_pPowerModule)) { 1895821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) ALOGE("%s: %s module not found", __func__, POWER_HARDWARE_MODULE_ID); 1905821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) } 1915821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#endif 1925821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)} 1935821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 1945821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)/*=========================================================================== 1955821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * FUNCTION : ~QCamera3HardwareInterface 1965821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 1975821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * DESCRIPTION: destructor of QCamera3HardwareInterface 1985821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 1995821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * PARAMETERS : none 2005821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 2015821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * RETURN : none 2025821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) *==========================================================================*/ 2035821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)QCamera3HardwareInterface::~QCamera3HardwareInterface() 2045821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles){ 2055821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) ALOGV("%s: E", __func__); 2065821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) /* We need to stop all streams before deleting any stream */ 2075821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 2085821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) it != mStreamInfo.end(); it++) { 2095821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 2105821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) if (channel) 2115821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) channel->stop(); 2125821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) } 2132a99a7e74a7f215066514fe81d2bfa6639d9edddTorne (Richard Coles) for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 2145821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) it != mStreamInfo.end(); it++) { 2155821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 2165821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) if (channel) 2175821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) delete channel; 2185821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) free (*it); 2195821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) } 2205821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 2215821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mPictureChannel = NULL; 2225821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 2235821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) if (mJpegSettings != NULL) { 2245821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) free(mJpegSettings); 2255821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mJpegSettings = NULL; 2265821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) } 2275821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 2285821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) /* Clean up all channels */ 2295821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) if (mCameraInitialized) { 2305821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mMetadataChannel->stop(); 2315821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) delete mMetadataChannel; 2325821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mMetadataChannel = NULL; 2335821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) deinitParameters(); 2345821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) } 2355821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 2365821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) if (mCameraOpened) 2375821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) closeCamera(); 2385821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 2395821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++) 2405821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) if (mDefaultMetadata[i]) 2415821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) free_camera_metadata(mDefaultMetadata[i]); 2425821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 2435821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) pthread_cond_destroy(&mRequestCond); 2445821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 2455821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) pthread_mutex_destroy(&mMutex); 2465821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) ALOGV("%s: X", __func__); 2475821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)} 2485821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 2495821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)/*=========================================================================== 2505821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * FUNCTION : openCamera 2515821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 2525821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * DESCRIPTION: open camera 2535821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 2545821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * PARAMETERS : 2555821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * @hw_device : double ptr for camera device struct 2565821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 2575821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * RETURN : int32_t type of status 2585821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * NO_ERROR -- success 2595821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * none-zero failure code 2605821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) *==========================================================================*/ 2615821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device) 262ca12bfac764ba476d6cd062bf1dde12cc64c3f40Ben Murdoch{ 263ca12bfac764ba476d6cd062bf1dde12cc64c3f40Ben Murdoch int rc = 0; 264ca12bfac764ba476d6cd062bf1dde12cc64c3f40Ben Murdoch pthread_mutex_lock(&mCameraSessionLock); 265ca12bfac764ba476d6cd062bf1dde12cc64c3f40Ben Murdoch if (mCameraSessionActive) { 266ca12bfac764ba476d6cd062bf1dde12cc64c3f40Ben Murdoch ALOGE("%s: multiple simultaneous camera instance not supported", __func__); 2675821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) pthread_mutex_unlock(&mCameraSessionLock); 2685821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) return INVALID_OPERATION; 2695821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) } 2705821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 2715821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) if (mCameraOpened) { 2725821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) *hw_device = NULL; 2735821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) return PERMISSION_DENIED; 2745821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) } 2755821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 2765821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) rc = openCamera(); 2775821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) if (rc == 0) { 2785821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) *hw_device = &mCameraDevice.common; 2795821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mCameraSessionActive = 1; 2805821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) } else 2815821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) *hw_device = NULL; 2825821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 2835821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#ifdef HAS_MULTIMEDIA_HINTS 2845821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) if (rc == 0) { 2855821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) if (m_pPowerModule) { 2865821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) if (m_pPowerModule->powerHint) { 2875821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, 2885821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) (void *)"state=1"); 2895821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) } 2905821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) } 2915821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) } 2925821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)#endif 2935821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) pthread_mutex_unlock(&mCameraSessionLock); 2945821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) return rc; 2955821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)} 2965821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 2975821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)/*=========================================================================== 2985821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * FUNCTION : openCamera 2995821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 3005821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * DESCRIPTION: open camera 3015821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 3025821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * PARAMETERS : none 3035821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 3045821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * RETURN : int32_t type of status 3055821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * NO_ERROR -- success 306ca12bfac764ba476d6cd062bf1dde12cc64c3f40Ben Murdoch * none-zero failure code 307ca12bfac764ba476d6cd062bf1dde12cc64c3f40Ben Murdoch *==========================================================================*/ 308ca12bfac764ba476d6cd062bf1dde12cc64c3f40Ben Murdochint QCamera3HardwareInterface::openCamera() 309ca12bfac764ba476d6cd062bf1dde12cc64c3f40Ben Murdoch{ 310ca12bfac764ba476d6cd062bf1dde12cc64c3f40Ben Murdoch if (mCameraHandle) { 311ca12bfac764ba476d6cd062bf1dde12cc64c3f40Ben Murdoch ALOGE("Failure: Camera already opened"); 3125821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) return ALREADY_EXISTS; 313c2e0dbddbe15c98d52c4786dac06cb8952a8ae6dTorne (Richard Coles) } 314c2e0dbddbe15c98d52c4786dac06cb8952a8ae6dTorne (Richard Coles) mCameraHandle = camera_open(mCameraId); 315c2e0dbddbe15c98d52c4786dac06cb8952a8ae6dTorne (Richard Coles) if (!mCameraHandle) { 3165821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) ALOGE("camera_open failed."); 3175821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) return UNKNOWN_ERROR; 3185821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) } 3195821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 3205821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mCameraOpened = true; 3215821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 3225821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) return NO_ERROR; 3235821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)} 3245821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 3255821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)/*=========================================================================== 3265821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * FUNCTION : closeCamera 3275821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 3285821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * DESCRIPTION: close camera 3295821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 3305821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * PARAMETERS : none 3315821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * 3325821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * RETURN : int32_t type of status 3335821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * NO_ERROR -- success 3345821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) * none-zero failure code 3355821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) *==========================================================================*/ 3365821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles)int QCamera3HardwareInterface::closeCamera() 3375821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles){ 3385821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) int rc = NO_ERROR; 3395821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) 3405821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle); 3415821806d5e7f356e8fa4b058a389a808ea183019Torne (Richard Coles) mCameraHandle = NULL; 3424e180b6a0b4720a9b8e9e959a882386f690f08ffTorne (Richard Coles) mCameraOpened = false; 343 344#ifdef HAS_MULTIMEDIA_HINTS 345 if (rc == NO_ERROR) { 346 if (m_pPowerModule) { 347 if (m_pPowerModule->powerHint) { 348 m_pPowerModule->powerHint(m_pPowerModule, POWER_HINT_VIDEO_ENCODE, 349 (void *)"state=0"); 350 } 351 } 352 } 353#endif 354 355 return rc; 356} 357 358/*=========================================================================== 359 * FUNCTION : initialize 360 * 361 * DESCRIPTION: Initialize frameworks callback functions 362 * 363 * PARAMETERS : 364 * @callback_ops : callback function to frameworks 365 * 366 * RETURN : 367 * 368 *==========================================================================*/ 369int QCamera3HardwareInterface::initialize( 370 const struct camera3_callback_ops *callback_ops) 371{ 372 int rc; 373 374 pthread_mutex_lock(&mMutex); 375 376 rc = initParameters(); 377 if (rc < 0) { 378 ALOGE("%s: initParamters failed %d", __func__, rc); 379 goto err1; 380 } 381 //Create metadata channel and initialize it 382 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle, 383 mCameraHandle->ops, captureResultCb, 384 &gCamCapability[mCameraId]->padding_info, this); 385 if (mMetadataChannel == NULL) { 386 ALOGE("%s: failed to allocate metadata channel", __func__); 387 rc = -ENOMEM; 388 goto err2; 389 } 390 rc = mMetadataChannel->initialize(); 391 if (rc < 0) { 392 ALOGE("%s: metadata channel initialization failed", __func__); 393 goto err3; 394 } 395 396 mCallbackOps = callback_ops; 397 398 pthread_mutex_unlock(&mMutex); 399 mCameraInitialized = true; 400 return 0; 401 402err3: 403 delete mMetadataChannel; 404 mMetadataChannel = NULL; 405err2: 406 deinitParameters(); 407err1: 408 pthread_mutex_unlock(&mMutex); 409 return rc; 410} 411 412/*=========================================================================== 413 * FUNCTION : configureStreams 414 * 415 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input 416 * and output streams. 417 * 418 * PARAMETERS : 419 * @stream_list : streams to be configured 420 * 421 * RETURN : 422 * 423 *==========================================================================*/ 424int QCamera3HardwareInterface::configureStreams( 425 camera3_stream_configuration_t *streamList) 426{ 427 int rc = 0; 428 pthread_mutex_lock(&mMutex); 429 // Sanity check stream_list 430 if (streamList == NULL) { 431 ALOGE("%s: NULL stream configuration", __func__); 432 pthread_mutex_unlock(&mMutex); 433 return BAD_VALUE; 434 } 435 436 if (streamList->streams == NULL) { 437 ALOGE("%s: NULL stream list", __func__); 438 pthread_mutex_unlock(&mMutex); 439 return BAD_VALUE; 440 } 441 442 if (streamList->num_streams < 1) { 443 ALOGE("%s: Bad number of streams requested: %d", __func__, 444 streamList->num_streams); 445 pthread_mutex_unlock(&mMutex); 446 return BAD_VALUE; 447 } 448 449 camera3_stream_t *inputStream = NULL; 450 camera3_stream_t *jpegStream = NULL; 451 /* first invalidate all the steams in the mStreamList 452 * if they appear again, they will be validated */ 453 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 454 it != mStreamInfo.end(); it++) { 455 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 456 channel->stop(); 457 (*it)->status = INVALID; 458 } 459 460 for (size_t i = 0; i < streamList->num_streams; i++) { 461 camera3_stream_t *newStream = streamList->streams[i]; 462 ALOGV("%s: newStream type = %d, stream format = %d stream size : %d x %d", 463 __func__, newStream->stream_type, newStream->format, 464 newStream->width, newStream->height); 465 //if the stream is in the mStreamList validate it 466 bool stream_exists = false; 467 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 468 it != mStreamInfo.end(); it++) { 469 if ((*it)->stream == newStream) { 470 QCamera3Channel *channel = 471 (QCamera3Channel*)(*it)->stream->priv; 472 stream_exists = true; 473 (*it)->status = RECONFIGURE; 474 /*delete the channel object associated with the stream because 475 we need to reconfigure*/ 476 delete channel; 477 (*it)->stream->priv = NULL; 478 } 479 } 480 if (!stream_exists) { 481 //new stream 482 stream_info_t* stream_info; 483 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t)); 484 stream_info->stream = newStream; 485 stream_info->status = VALID; 486 stream_info->registered = 0; 487 mStreamInfo.push_back(stream_info); 488 } 489 if (newStream->stream_type == CAMERA3_STREAM_INPUT 490 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) { 491 if (inputStream != NULL) { 492 ALOGE("%s: Multiple input streams requested!", __func__); 493 pthread_mutex_unlock(&mMutex); 494 return BAD_VALUE; 495 } 496 inputStream = newStream; 497 } 498 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) { 499 jpegStream = newStream; 500 } 501 } 502 mInputStream = inputStream; 503 504 /*clean up invalid streams*/ 505 for (List<stream_info_t*>::iterator it=mStreamInfo.begin(); 506 it != mStreamInfo.end();) { 507 if(((*it)->status) == INVALID){ 508 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv; 509 delete channel; 510 delete[] (buffer_handle_t*)(*it)->buffer_set.buffers; 511 free(*it); 512 it = mStreamInfo.erase(it); 513 } else { 514 it++; 515 } 516 } 517 518 //mMetadataChannel->stop(); 519 520 /* Allocate channel objects for the requested streams */ 521 for (size_t i = 0; i < streamList->num_streams; i++) { 522 camera3_stream_t *newStream = streamList->streams[i]; 523 if (newStream->priv == NULL) { 524 //New stream, construct channel 525 switch (newStream->stream_type) { 526 case CAMERA3_STREAM_INPUT: 527 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ; 528 break; 529 case CAMERA3_STREAM_BIDIRECTIONAL: 530 newStream->usage = GRALLOC_USAGE_HW_CAMERA_READ | 531 GRALLOC_USAGE_HW_CAMERA_WRITE; 532 break; 533 case CAMERA3_STREAM_OUTPUT: 534 newStream->usage = GRALLOC_USAGE_HW_CAMERA_WRITE; 535 break; 536 default: 537 ALOGE("%s: Invalid stream_type %d", __func__, newStream->stream_type); 538 break; 539 } 540 541 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT || 542 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) { 543 QCamera3Channel *channel; 544 switch (newStream->format) { 545 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: 546 case HAL_PIXEL_FORMAT_YCbCr_420_888: 547 newStream->max_buffers = QCamera3RegularChannel::kMaxBuffers; 548 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL && 549 jpegStream) { 550 uint32_t width = jpegStream->width; 551 uint32_t height = jpegStream->height; 552 mIsZslMode = true; 553 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 554 mCameraHandle->ops, captureResultCb, 555 &gCamCapability[mCameraId]->padding_info, this, newStream, 556 width, height); 557 } else 558 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle, 559 mCameraHandle->ops, captureResultCb, 560 &gCamCapability[mCameraId]->padding_info, this, newStream); 561 if (channel == NULL) { 562 ALOGE("%s: allocation of channel failed", __func__); 563 pthread_mutex_unlock(&mMutex); 564 return -ENOMEM; 565 } 566 567 newStream->priv = channel; 568 break; 569 case HAL_PIXEL_FORMAT_BLOB: 570 newStream->max_buffers = QCamera3PicChannel::kMaxBuffers; 571 mPictureChannel = new QCamera3PicChannel(mCameraHandle->camera_handle, 572 mCameraHandle->ops, captureResultCb, 573 &gCamCapability[mCameraId]->padding_info, this, newStream); 574 if (mPictureChannel == NULL) { 575 ALOGE("%s: allocation of channel failed", __func__); 576 pthread_mutex_unlock(&mMutex); 577 return -ENOMEM; 578 } 579 newStream->priv = (QCamera3Channel*)mPictureChannel; 580 break; 581 582 //TODO: Add support for app consumed format? 583 default: 584 ALOGE("%s: not a supported format 0x%x", __func__, newStream->format); 585 break; 586 } 587 } 588 } else { 589 // Channel already exists for this stream 590 // Do nothing for now 591 } 592 } 593 /*For the streams to be reconfigured we need to register the buffers 594 since the framework wont*/ 595 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 596 it != mStreamInfo.end(); it++) { 597 if ((*it)->status == RECONFIGURE) { 598 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv; 599 /*only register buffers for streams that have already been 600 registered*/ 601 if ((*it)->registered) { 602 rc = channel->registerBuffers((*it)->buffer_set.num_buffers, 603 (*it)->buffer_set.buffers); 604 if (rc != NO_ERROR) { 605 ALOGE("%s: Failed to register the buffers of old stream,\ 606 rc = %d", __func__, rc); 607 } 608 ALOGV("%s: channel %p has %d buffers", 609 __func__, channel, (*it)->buffer_set.num_buffers); 610 } 611 } 612 613 ssize_t index = mPendingBuffersMap.indexOfKey((*it)->stream); 614 if (index == NAME_NOT_FOUND) { 615 mPendingBuffersMap.add((*it)->stream, 0); 616 } else { 617 mPendingBuffersMap.editValueAt(index) = 0; 618 } 619 } 620 621 /* Initialize mPendingRequestInfo and mPendnigBuffersMap */ 622 mPendingRequestsList.clear(); 623 624 //settings/parameters don't carry over for new configureStreams 625 memset(mParameters, 0, sizeof(parm_buffer_t)); 626 mFirstRequest = true; 627 628 pthread_mutex_unlock(&mMutex); 629 return rc; 630} 631 632/*=========================================================================== 633 * FUNCTION : validateCaptureRequest 634 * 635 * DESCRIPTION: validate a capture request from camera service 636 * 637 * PARAMETERS : 638 * @request : request from framework to process 639 * 640 * RETURN : 641 * 642 *==========================================================================*/ 643int QCamera3HardwareInterface::validateCaptureRequest( 644 camera3_capture_request_t *request) 645{ 646 ssize_t idx = 0; 647 const camera3_stream_buffer_t *b; 648 CameraMetadata meta; 649 650 /* Sanity check the request */ 651 if (request == NULL) { 652 ALOGE("%s: NULL capture request", __func__); 653 return BAD_VALUE; 654 } 655 656 uint32_t frameNumber = request->frame_number; 657 if (request->input_buffer != NULL && 658 request->input_buffer->stream != mInputStream) { 659 ALOGE("%s: Request %d: Input buffer not from input stream!", 660 __FUNCTION__, frameNumber); 661 return BAD_VALUE; 662 } 663 if (request->num_output_buffers < 1 || request->output_buffers == NULL) { 664 ALOGE("%s: Request %d: No output buffers provided!", 665 __FUNCTION__, frameNumber); 666 return BAD_VALUE; 667 } 668 if (request->input_buffer != NULL) { 669 b = request->input_buffer; 670 QCamera3Channel *channel = 671 static_cast<QCamera3Channel*>(b->stream->priv); 672 if (channel == NULL) { 673 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 674 __func__, frameNumber, idx); 675 return BAD_VALUE; 676 } 677 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 678 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 679 __func__, frameNumber, idx); 680 return BAD_VALUE; 681 } 682 if (b->release_fence != -1) { 683 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 684 __func__, frameNumber, idx); 685 return BAD_VALUE; 686 } 687 if (b->buffer == NULL) { 688 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 689 __func__, frameNumber, idx); 690 return BAD_VALUE; 691 } 692 } 693 694 // Validate all buffers 695 b = request->output_buffers; 696 do { 697 QCamera3Channel *channel = 698 static_cast<QCamera3Channel*>(b->stream->priv); 699 if (channel == NULL) { 700 ALOGE("%s: Request %d: Buffer %d: Unconfigured stream!", 701 __func__, frameNumber, idx); 702 return BAD_VALUE; 703 } 704 if (b->status != CAMERA3_BUFFER_STATUS_OK) { 705 ALOGE("%s: Request %d: Buffer %d: Status not OK!", 706 __func__, frameNumber, idx); 707 return BAD_VALUE; 708 } 709 if (b->release_fence != -1) { 710 ALOGE("%s: Request %d: Buffer %d: Has a release fence!", 711 __func__, frameNumber, idx); 712 return BAD_VALUE; 713 } 714 if (b->buffer == NULL) { 715 ALOGE("%s: Request %d: Buffer %d: NULL buffer handle!", 716 __func__, frameNumber, idx); 717 return BAD_VALUE; 718 } 719 idx++; 720 b = request->output_buffers + idx; 721 } while (idx < (ssize_t)request->num_output_buffers); 722 723 return NO_ERROR; 724} 725 726/*=========================================================================== 727 * FUNCTION : registerStreamBuffers 728 * 729 * DESCRIPTION: Register buffers for a given stream with the HAL device. 730 * 731 * PARAMETERS : 732 * @stream_list : streams to be configured 733 * 734 * RETURN : 735 * 736 *==========================================================================*/ 737int QCamera3HardwareInterface::registerStreamBuffers( 738 const camera3_stream_buffer_set_t *buffer_set) 739{ 740 int rc = 0; 741 742 pthread_mutex_lock(&mMutex); 743 744 if (buffer_set == NULL) { 745 ALOGE("%s: Invalid buffer_set parameter.", __func__); 746 pthread_mutex_unlock(&mMutex); 747 return -EINVAL; 748 } 749 if (buffer_set->stream == NULL) { 750 ALOGE("%s: Invalid stream parameter.", __func__); 751 pthread_mutex_unlock(&mMutex); 752 return -EINVAL; 753 } 754 if (buffer_set->num_buffers < 1) { 755 ALOGE("%s: Invalid num_buffers %d.", __func__, buffer_set->num_buffers); 756 pthread_mutex_unlock(&mMutex); 757 return -EINVAL; 758 } 759 if (buffer_set->buffers == NULL) { 760 ALOGE("%s: Invalid buffers parameter.", __func__); 761 pthread_mutex_unlock(&mMutex); 762 return -EINVAL; 763 } 764 765 camera3_stream_t *stream = buffer_set->stream; 766 QCamera3Channel *channel = (QCamera3Channel *)stream->priv; 767 768 //set the buffer_set in the mStreamInfo array 769 for (List<stream_info_t *>::iterator it = mStreamInfo.begin(); 770 it != mStreamInfo.end(); it++) { 771 if ((*it)->stream == stream) { 772 uint32_t numBuffers = buffer_set->num_buffers; 773 (*it)->buffer_set.stream = buffer_set->stream; 774 (*it)->buffer_set.num_buffers = numBuffers; 775 (*it)->buffer_set.buffers = new buffer_handle_t*[numBuffers]; 776 if ((*it)->buffer_set.buffers == NULL) { 777 ALOGE("%s: Failed to allocate buffer_handle_t*", __func__); 778 pthread_mutex_unlock(&mMutex); 779 return -ENOMEM; 780 } 781 for (size_t j = 0; j < numBuffers; j++){ 782 (*it)->buffer_set.buffers[j] = buffer_set->buffers[j]; 783 } 784 (*it)->registered = 1; 785 } 786 } 787 rc = channel->registerBuffers(buffer_set->num_buffers, buffer_set->buffers); 788 if (rc < 0) { 789 ALOGE("%s: registerBUffers for stream %p failed", __func__, stream); 790 pthread_mutex_unlock(&mMutex); 791 return -ENODEV; 792 } 793 794 pthread_mutex_unlock(&mMutex); 795 return NO_ERROR; 796} 797 798/*=========================================================================== 799 * FUNCTION : processCaptureRequest 800 * 801 * DESCRIPTION: process a capture request from camera service 802 * 803 * PARAMETERS : 804 * @request : request from framework to process 805 * 806 * RETURN : 807 * 808 *==========================================================================*/ 809int QCamera3HardwareInterface::processCaptureRequest( 810 camera3_capture_request_t *request) 811{ 812 int rc = NO_ERROR; 813 int32_t request_id; 814 CameraMetadata meta; 815 816 pthread_mutex_lock(&mMutex); 817 818 rc = validateCaptureRequest(request); 819 if (rc != NO_ERROR) { 820 ALOGE("%s: incoming request is not valid", __func__); 821 pthread_mutex_unlock(&mMutex); 822 return rc; 823 } 824 825 uint32_t frameNumber = request->frame_number; 826 rc = setFrameParameters(request->frame_number, request->settings); 827 if (rc < 0) { 828 ALOGE("%s: fail to set frame parameters", __func__); 829 pthread_mutex_unlock(&mMutex); 830 return rc; 831 } 832 833 meta = request->settings; 834 if (meta.exists(ANDROID_REQUEST_ID)) { 835 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0]; 836 mCurrentRequestId = request_id; 837 ALOGV("%s: Received request with id: %d",__func__, request_id); 838 } else if (mFirstRequest || mCurrentRequestId == -1){ 839 ALOGE("%s: Unable to find request id field, \ 840 & no previous id available", __func__); 841 return NAME_NOT_FOUND; 842 } else { 843 ALOGV("%s: Re-using old request id", __func__); 844 request_id = mCurrentRequestId; 845 } 846 847 ALOGV("%s: %d, num_output_buffers = %d input_buffer = %p frame_number = %d", 848 __func__, __LINE__, 849 request->num_output_buffers, 850 request->input_buffer, 851 frameNumber); 852 // Acquire all request buffers first 853 int blob_request = 0; 854 for (size_t i = 0; i < request->num_output_buffers; i++) { 855 const camera3_stream_buffer_t& output = request->output_buffers[i]; 856 sp<Fence> acquireFence = new Fence(output.acquire_fence); 857 858 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 859 //Call function to store local copy of jpeg data for encode params. 860 blob_request = 1; 861 rc = getJpegSettings(request->settings); 862 if (rc < 0) { 863 ALOGE("%s: failed to get jpeg parameters", __func__); 864 pthread_mutex_unlock(&mMutex); 865 return rc; 866 } 867 } 868 869 rc = acquireFence->wait(Fence::TIMEOUT_NEVER); 870 if (rc != OK) { 871 ALOGE("%s: fence wait failed %d", __func__, rc); 872 pthread_mutex_unlock(&mMutex); 873 return rc; 874 } 875 } 876 877 /* Update pending request list and pending buffers map */ 878 PendingRequestInfo pendingRequest; 879 pendingRequest.frame_number = frameNumber; 880 pendingRequest.num_buffers = request->num_output_buffers; 881 pendingRequest.request_id = request_id; 882 pendingRequest.blob_request = blob_request; 883 884 for (size_t i = 0; i < request->num_output_buffers; i++) { 885 RequestedBufferInfo requestedBuf; 886 requestedBuf.stream = request->output_buffers[i].stream; 887 requestedBuf.buffer = NULL; 888 pendingRequest.buffers.push_back(requestedBuf); 889 890 mPendingBuffersMap.editValueFor(requestedBuf.stream)++; 891 } 892 mPendingRequestsList.push_back(pendingRequest); 893 894 // Notify metadata channel we receive a request 895 mMetadataChannel->request(NULL, frameNumber); 896 897 // Call request on other streams 898 for (size_t i = 0; i < request->num_output_buffers; i++) { 899 const camera3_stream_buffer_t& output = request->output_buffers[i]; 900 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv; 901 mm_camera_buf_def_t *pInputBuffer = NULL; 902 903 if (channel == NULL) { 904 ALOGE("%s: invalid channel pointer for stream", __func__); 905 continue; 906 } 907 908 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) { 909 QCamera3RegularChannel* inputChannel = NULL; 910 if(request->input_buffer != NULL){ 911 912 //Try to get the internal format 913 inputChannel = (QCamera3RegularChannel*) 914 request->input_buffer->stream->priv; 915 if(inputChannel == NULL ){ 916 ALOGE("%s: failed to get input channel handle", __func__); 917 } else { 918 pInputBuffer = 919 inputChannel->getInternalFormatBuffer( 920 request->input_buffer->buffer); 921 ALOGD("%s: Input buffer dump",__func__); 922 ALOGD("Stream id: %d", pInputBuffer->stream_id); 923 ALOGD("streamtype:%d", pInputBuffer->stream_type); 924 ALOGD("frame len:%d", pInputBuffer->frame_len); 925 } 926 } 927 rc = channel->request(output.buffer, frameNumber, mJpegSettings, 928 pInputBuffer,(QCamera3Channel*)inputChannel); 929 } else { 930 ALOGV("%s: %d, request with buffer %p, frame_number %d", __func__, 931 __LINE__, output.buffer, frameNumber); 932 rc = channel->request(output.buffer, frameNumber); 933 } 934 if (rc < 0) 935 ALOGE("%s: request failed", __func__); 936 } 937 938 mFirstRequest = false; 939 940 //Block on conditional variable 941 mPendingRequest = 1; 942 while (mPendingRequest == 1) { 943 pthread_cond_wait(&mRequestCond, &mMutex); 944 } 945 946 pthread_mutex_unlock(&mMutex); 947 return rc; 948} 949 950/*=========================================================================== 951 * FUNCTION : getMetadataVendorTagOps 952 * 953 * DESCRIPTION: 954 * 955 * PARAMETERS : 956 * 957 * 958 * RETURN : 959 *==========================================================================*/ 960void QCamera3HardwareInterface::getMetadataVendorTagOps( 961 vendor_tag_query_ops_t* /*ops*/) 962{ 963 /* Enable locks when we eventually add Vendor Tags */ 964 /* 965 pthread_mutex_lock(&mMutex); 966 967 pthread_mutex_unlock(&mMutex); 968 */ 969 return; 970} 971 972/*=========================================================================== 973 * FUNCTION : dump 974 * 975 * DESCRIPTION: 976 * 977 * PARAMETERS : 978 * 979 * 980 * RETURN : 981 *==========================================================================*/ 982void QCamera3HardwareInterface::dump(int /*fd*/) 983{ 984 /*Enable lock when we implement this function*/ 985 /* 986 pthread_mutex_lock(&mMutex); 987 988 pthread_mutex_unlock(&mMutex); 989 */ 990 return; 991} 992 993 994/*=========================================================================== 995 * FUNCTION : captureResultCb 996 * 997 * DESCRIPTION: Callback handler for all capture result 998 * (streams, as well as metadata) 999 * 1000 * PARAMETERS : 1001 * @metadata : metadata information 1002 * @buffer : actual gralloc buffer to be returned to frameworks. 1003 * NULL if metadata. 1004 * 1005 * RETURN : NONE 1006 *==========================================================================*/ 1007void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf, 1008 camera3_stream_buffer_t *buffer, uint32_t frame_number) 1009{ 1010 pthread_mutex_lock(&mMutex); 1011 1012 if (metadata_buf) { 1013 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer; 1014 int32_t frame_number_valid = *(int32_t *) 1015 POINTER_OF(CAM_INTF_META_FRAME_NUMBER_VALID, metadata); 1016 uint32_t pending_requests = *(uint32_t *)POINTER_OF( 1017 CAM_INTF_META_PENDING_REQUESTS, metadata); 1018 uint32_t frame_number = *(uint32_t *) 1019 POINTER_OF(CAM_INTF_META_FRAME_NUMBER, metadata); 1020 const struct timeval *tv = (const struct timeval *) 1021 POINTER_OF(CAM_INTF_META_SENSOR_TIMESTAMP, metadata); 1022 nsecs_t capture_time = (nsecs_t)tv->tv_sec * NSEC_PER_SEC + 1023 tv->tv_usec * NSEC_PER_USEC; 1024 1025 if (!frame_number_valid) { 1026 ALOGV("%s: Not a valid frame number, used as SOF only", __func__); 1027 mMetadataChannel->bufDone(metadata_buf); 1028 goto done_metadata; 1029 } 1030 ALOGV("%s: valid frame_number = %d, capture_time = %lld", __func__, 1031 frame_number, capture_time); 1032 1033 // Go through the pending requests info and send shutter/results to frameworks 1034 for (List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1035 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) { 1036 camera3_capture_result_t result; 1037 camera3_notify_msg_t notify_msg; 1038 ALOGV("%s: frame_number in the list is %d", __func__, i->frame_number); 1039 1040 // Flush out all entries with less or equal frame numbers. 1041 1042 //TODO: Make sure shutter timestamp really reflects shutter timestamp. 1043 //Right now it's the same as metadata timestamp 1044 1045 //TODO: When there is metadata drop, how do we derive the timestamp of 1046 //dropped frames? For now, we fake the dropped timestamp by substracting 1047 //from the reported timestamp 1048 nsecs_t current_capture_time = capture_time - 1049 (frame_number - i->frame_number) * NSEC_PER_33MSEC; 1050 1051 // Send shutter notify to frameworks 1052 notify_msg.type = CAMERA3_MSG_SHUTTER; 1053 notify_msg.message.shutter.frame_number = i->frame_number; 1054 notify_msg.message.shutter.timestamp = current_capture_time; 1055 mCallbackOps->notify(mCallbackOps, ¬ify_msg); 1056 ALOGV("%s: notify frame_number = %d, capture_time = %lld", __func__, 1057 i->frame_number, capture_time); 1058 1059 // Send empty metadata with already filled buffers for dropped metadata 1060 // and send valid metadata with already filled buffers for current metadata 1061 if (i->frame_number < frame_number) { 1062 CameraMetadata dummyMetadata; 1063 dummyMetadata.update(ANDROID_SENSOR_TIMESTAMP, 1064 ¤t_capture_time, 1); 1065 dummyMetadata.update(ANDROID_REQUEST_ID, 1066 &(i->request_id), 1); 1067 result.result = dummyMetadata.release(); 1068 } else { 1069 result.result = translateCbMetadataToResultMetadata(metadata, 1070 current_capture_time, i->request_id); 1071 if (i->blob_request) { 1072 //If it is a blob request then send the metadata to the picture channel 1073 mPictureChannel->queueMetadata(metadata_buf); 1074 1075 } else { 1076 // Return metadata buffer 1077 mMetadataChannel->bufDone(metadata_buf); 1078 free(metadata_buf); 1079 } 1080 } 1081 if (!result.result) { 1082 ALOGE("%s: metadata is NULL", __func__); 1083 } 1084 result.frame_number = i->frame_number; 1085 result.num_output_buffers = 0; 1086 result.output_buffers = NULL; 1087 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1088 j != i->buffers.end(); j++) { 1089 if (j->buffer) { 1090 result.num_output_buffers++; 1091 } 1092 } 1093 1094 if (result.num_output_buffers > 0) { 1095 camera3_stream_buffer_t *result_buffers = 1096 new camera3_stream_buffer_t[result.num_output_buffers]; 1097 if (!result_buffers) { 1098 ALOGE("%s: Fatal error: out of memory", __func__); 1099 } 1100 size_t result_buffers_idx = 0; 1101 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1102 j != i->buffers.end(); j++) { 1103 if (j->buffer) { 1104 result_buffers[result_buffers_idx++] = *(j->buffer); 1105 free(j->buffer); 1106 j->buffer = NULL; 1107 mPendingBuffersMap.editValueFor(j->stream)--; 1108 } 1109 } 1110 result.output_buffers = result_buffers; 1111 1112 mCallbackOps->process_capture_result(mCallbackOps, &result); 1113 ALOGV("%s: meta frame_number = %d, capture_time = %lld", 1114 __func__, result.frame_number, current_capture_time); 1115 free_camera_metadata((camera_metadata_t *)result.result); 1116 delete[] result_buffers; 1117 } else { 1118 mCallbackOps->process_capture_result(mCallbackOps, &result); 1119 ALOGV("%s: meta frame_number = %d, capture_time = %lld", 1120 __func__, result.frame_number, current_capture_time); 1121 free_camera_metadata((camera_metadata_t *)result.result); 1122 } 1123 // erase the element from the list 1124 i = mPendingRequestsList.erase(i); 1125 } 1126 1127 1128done_metadata: 1129 bool max_buffers_dequeued = false; 1130 for (size_t i = 0; i < mPendingBuffersMap.size(); i++) { 1131 const camera3_stream_t *stream = mPendingBuffersMap.keyAt(i); 1132 uint32_t queued_buffers = mPendingBuffersMap.valueAt(i); 1133 if (queued_buffers == stream->max_buffers) { 1134 max_buffers_dequeued = true; 1135 break; 1136 } 1137 } 1138 if (!max_buffers_dequeued && !pending_requests) { 1139 // Unblock process_capture_request 1140 mPendingRequest = 0; 1141 pthread_cond_signal(&mRequestCond); 1142 } 1143 } else { 1144 // If the frame number doesn't exist in the pending request list, 1145 // directly send the buffer to the frameworks, and update pending buffers map 1146 // Otherwise, book-keep the buffer. 1147 List<PendingRequestInfo>::iterator i = mPendingRequestsList.begin(); 1148 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){ 1149 i++; 1150 } 1151 if (i == mPendingRequestsList.end()) { 1152 // Verify all pending requests frame_numbers are greater 1153 for (List<PendingRequestInfo>::iterator j = mPendingRequestsList.begin(); 1154 j != mPendingRequestsList.end(); j++) { 1155 if (j->frame_number < frame_number) { 1156 ALOGE("%s: Error: pending frame number %d is smaller than %d", 1157 __func__, j->frame_number, frame_number); 1158 } 1159 } 1160 camera3_capture_result_t result; 1161 result.result = NULL; 1162 result.frame_number = frame_number; 1163 result.num_output_buffers = 1; 1164 result.output_buffers = buffer; 1165 ALOGV("%s: result frame_number = %d, buffer = %p", 1166 __func__, frame_number, buffer); 1167 mPendingBuffersMap.editValueFor(buffer->stream)--; 1168 mCallbackOps->process_capture_result(mCallbackOps, &result); 1169 } else { 1170 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin(); 1171 j != i->buffers.end(); j++) { 1172 if (j->stream == buffer->stream) { 1173 if (j->buffer != NULL) { 1174 ALOGE("%s: Error: buffer is already set", __func__); 1175 } else { 1176 j->buffer = (camera3_stream_buffer_t *)malloc( 1177 sizeof(camera3_stream_buffer_t)); 1178 *(j->buffer) = *buffer; 1179 ALOGV("%s: cache buffer %p at result frame_number %d", 1180 __func__, buffer, frame_number); 1181 } 1182 } 1183 } 1184 } 1185 } 1186 pthread_mutex_unlock(&mMutex); 1187 return; 1188} 1189 1190/*=========================================================================== 1191 * FUNCTION : translateCbMetadataToResultMetadata 1192 * 1193 * DESCRIPTION: 1194 * 1195 * PARAMETERS : 1196 * @metadata : metadata information from callback 1197 * 1198 * RETURN : camera_metadata_t* 1199 * metadata in a format specified by fwk 1200 *==========================================================================*/ 1201camera_metadata_t* 1202QCamera3HardwareInterface::translateCbMetadataToResultMetadata 1203 (metadata_buffer_t *metadata, nsecs_t timestamp, 1204 int32_t request_id) 1205{ 1206 CameraMetadata camMetadata; 1207 camera_metadata_t* resultMetadata; 1208 1209 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 1210 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1); 1211 1212 /*CAM_INTF_META_HISTOGRAM - TODO*/ 1213 /*cam_hist_stats_t *histogram = 1214 (cam_hist_stats_t *)POINTER_OF(CAM_INTF_META_HISTOGRAM, 1215 metadata);*/ 1216 1217 /*face detection*/ 1218 cam_face_detection_data_t *faceDetectionInfo =(cam_face_detection_data_t *) 1219 POINTER_OF(CAM_INTF_META_FACE_DETECTION, metadata); 1220 uint8_t numFaces = faceDetectionInfo->num_faces_detected; 1221 int32_t faceIds[numFaces]; 1222 uint8_t faceScores[numFaces]; 1223 int32_t faceRectangles[numFaces * 4]; 1224 int32_t faceLandmarks[numFaces * 6]; 1225 int j = 0, k = 0; 1226 for (int i = 0; i < numFaces; i++) { 1227 faceIds[i] = faceDetectionInfo->faces[i].face_id; 1228 faceScores[i] = faceDetectionInfo->faces[i].score; 1229 convertToRegions(faceDetectionInfo->faces[i].face_boundary, 1230 faceRectangles+j, -1); 1231 convertLandmarks(faceDetectionInfo->faces[i], faceLandmarks+k); 1232 j+= 4; 1233 k+= 6; 1234 } 1235 if (numFaces > 0) { 1236 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces); 1237 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores, numFaces); 1238 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES, 1239 faceRectangles, numFaces*4); 1240 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS, 1241 faceLandmarks, numFaces*6); 1242 } 1243 1244 uint8_t *color_correct_mode = 1245 (uint8_t *)POINTER_OF(CAM_INTF_META_COLOR_CORRECT_MODE, metadata); 1246 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, color_correct_mode, 1); 1247 1248 int32_t *ae_precapture_id = 1249 (int32_t *)POINTER_OF(CAM_INTF_META_AEC_PRECAPTURE_ID, metadata); 1250 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID, ae_precapture_id, 1); 1251 1252 /*aec regions*/ 1253 cam_area_t *hAeRegions = 1254 (cam_area_t *)POINTER_OF(CAM_INTF_META_AEC_ROI, metadata); 1255 int32_t aeRegions[5]; 1256 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight); 1257 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions, 5); 1258 if(mIsZslMode) { 1259 uint8_t ae_state = ANDROID_CONTROL_AE_STATE_CONVERGED; 1260 camMetadata.update(ANDROID_CONTROL_AE_STATE, &ae_state, 1); 1261 } else { 1262 uint8_t *ae_state = 1263 (uint8_t *)POINTER_OF(CAM_INTF_META_AEC_STATE, metadata); 1264 camMetadata.update(ANDROID_CONTROL_AE_STATE, ae_state, 1); 1265 } 1266 uint8_t *focusMode = 1267 (uint8_t *)POINTER_OF(CAM_INTF_PARM_FOCUS_MODE, metadata); 1268 camMetadata.update(ANDROID_CONTROL_AF_MODE, focusMode, 1); 1269 1270 /*af regions*/ 1271 cam_area_t *hAfRegions = 1272 (cam_area_t *)POINTER_OF(CAM_INTF_META_AF_ROI, metadata); 1273 int32_t afRegions[5]; 1274 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight); 1275 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions, 5); 1276 1277 uint8_t *afState = (uint8_t *)POINTER_OF(CAM_INTF_META_AF_STATE, metadata); 1278 camMetadata.update(ANDROID_CONTROL_AF_STATE, afState, 1); 1279 1280 int32_t *afTriggerId = 1281 (int32_t *)POINTER_OF(CAM_INTF_META_AF_TRIGGER_ID, metadata); 1282 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, afTriggerId, 1); 1283 1284 uint8_t *whiteBalance = 1285 (uint8_t *)POINTER_OF(CAM_INTF_PARM_WHITE_BALANCE, metadata); 1286 camMetadata.update(ANDROID_CONTROL_AWB_MODE, whiteBalance, 1); 1287 1288 /*awb regions*/ 1289 cam_area_t *hAwbRegions = 1290 (cam_area_t *)POINTER_OF(CAM_INTF_META_AWB_REGIONS, metadata); 1291 int32_t awbRegions[5]; 1292 convertToRegions(hAwbRegions->rect, awbRegions, hAwbRegions->weight); 1293 camMetadata.update(ANDROID_CONTROL_AWB_REGIONS, awbRegions, 5); 1294 1295 uint8_t *whiteBalanceState = 1296 (uint8_t *)POINTER_OF(CAM_INTF_META_AWB_STATE, metadata); 1297 camMetadata.update(ANDROID_CONTROL_AWB_STATE, whiteBalanceState, 1); 1298 1299 uint8_t *mode = (uint8_t *)POINTER_OF(CAM_INTF_META_MODE, metadata); 1300 camMetadata.update(ANDROID_CONTROL_MODE, mode, 1); 1301 1302 uint8_t *edgeMode = (uint8_t *)POINTER_OF(CAM_INTF_META_EDGE_MODE, metadata); 1303 camMetadata.update(ANDROID_EDGE_MODE, edgeMode, 1); 1304 1305 uint8_t *flashPower = 1306 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_POWER, metadata); 1307 camMetadata.update(ANDROID_FLASH_FIRING_POWER, flashPower, 1); 1308 1309 int64_t *flashFiringTime = 1310 (int64_t *)POINTER_OF(CAM_INTF_META_FLASH_FIRING_TIME, metadata); 1311 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1); 1312 1313 /*int32_t *ledMode = 1314 (int32_t *)POINTER_OF(CAM_INTF_PARM_LED_MODE, metadata); 1315 camMetadata.update(ANDROID_FLASH_FIRING_TIME, ledMode, 1);*/ 1316 1317 uint8_t *flashState = 1318 (uint8_t *)POINTER_OF(CAM_INTF_META_FLASH_STATE, metadata); 1319 camMetadata.update(ANDROID_FLASH_STATE, flashState, 1); 1320 1321 uint8_t *hotPixelMode = 1322 (uint8_t *)POINTER_OF(CAM_INTF_META_HOTPIXEL_MODE, metadata); 1323 camMetadata.update(ANDROID_HOT_PIXEL_MODE, hotPixelMode, 1); 1324 1325 float *lensAperture = 1326 (float *)POINTER_OF(CAM_INTF_META_LENS_APERTURE, metadata); 1327 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1); 1328 1329 float *filterDensity = 1330 (float *)POINTER_OF(CAM_INTF_META_LENS_FILTERDENSITY, metadata); 1331 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1); 1332 1333 float *focalLength = 1334 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCAL_LENGTH, metadata); 1335 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1); 1336 1337 float *focusDistance = 1338 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata); 1339 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1); 1340 1341 float *focusRange = 1342 (float *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_RANGE, metadata); 1343 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 1); 1344 1345 uint8_t *opticalStab = 1346 (uint8_t *)POINTER_OF(CAM_INTF_META_LENS_OPT_STAB_MODE, metadata); 1347 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE ,opticalStab, 1); 1348 1349 /*int32_t *focusState = 1350 (int32_t *)POINTER_OF(CAM_INTF_META_LENS_FOCUS_STATE, metadata); 1351 camMetadata.update(ANDROID_LENS_STATE , focusState, 1); //check */ 1352 1353 uint8_t *noiseRedMode = 1354 (uint8_t *)POINTER_OF(CAM_INTF_META_NOISE_REDUCTION_MODE, metadata); 1355 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE , noiseRedMode, 1); 1356 1357 /*CAM_INTF_META_SCALER_CROP_REGION - check size*/ 1358 1359 cam_crop_region_t *hScalerCropRegion =(cam_crop_region_t *) 1360 POINTER_OF(CAM_INTF_META_SCALER_CROP_REGION, metadata); 1361 int32_t scalerCropRegion[4]; 1362 scalerCropRegion[0] = hScalerCropRegion->left; 1363 scalerCropRegion[1] = hScalerCropRegion->top; 1364 scalerCropRegion[2] = hScalerCropRegion->width; 1365 scalerCropRegion[3] = hScalerCropRegion->height; 1366 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4); 1367 1368 int64_t *sensorExpTime = 1369 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata); 1370 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1); 1371 1372 int64_t *sensorFameDuration = 1373 (int64_t *)POINTER_OF(CAM_INTF_META_SENSOR_FRAME_DURATION, metadata); 1374 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1); 1375 1376 int32_t *sensorSensitivity = 1377 (int32_t *)POINTER_OF(CAM_INTF_META_SENSOR_SENSITIVITY, metadata); 1378 mMetadataResponse.iso_speed = *sensorSensitivity; 1379 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1); 1380 1381 uint8_t *shadingMode = 1382 (uint8_t *)POINTER_OF(CAM_INTF_META_SHADING_MODE, metadata); 1383 camMetadata.update(ANDROID_SHADING_MODE, shadingMode, 1); 1384 1385 uint8_t *faceDetectMode = 1386 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_FACEDETECT_MODE, metadata); 1387 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, faceDetectMode, 1); 1388 1389 uint8_t *histogramMode = 1390 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata); 1391 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, histogramMode, 1); 1392 1393 uint8_t *sharpnessMapMode = 1394 (uint8_t *)POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata); 1395 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, 1396 sharpnessMapMode, 1); 1397 1398 /*CAM_INTF_META_STATS_SHARPNESS_MAP - check size*/ 1399 cam_sharpness_map_t *sharpnessMap = (cam_sharpness_map_t *) 1400 POINTER_OF(CAM_INTF_META_STATS_SHARPNESS_MAP, metadata); 1401 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, 1402 (int32_t*)sharpnessMap->sharpness, 1403 CAM_MAX_MAP_WIDTH*CAM_MAX_MAP_HEIGHT); 1404 1405 cam_lens_shading_map_t *lensShadingMap = (cam_lens_shading_map_t *) 1406 POINTER_OF(CAM_INTF_META_LENS_SHADING_MAP, metadata); 1407 int map_height = gCamCapability[mCameraId]->lens_shading_map_size.height; 1408 int map_width = gCamCapability[mCameraId]->lens_shading_map_size.width; 1409 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP, 1410 (float*)lensShadingMap->lens_shading, 1411 4*map_width*map_height); 1412 1413 cam_color_correct_gains_t *colorCorrectionGains = (cam_color_correct_gains_t*) 1414 POINTER_OF(CAM_INTF_META_COLOR_CORRECT_GAINS, metadata); 1415 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains, 4); 1416 1417 cam_color_correct_matrix_t *colorCorrectionMatrix = (cam_color_correct_matrix_t*) 1418 POINTER_OF(CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata); 1419 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM, 1420 (camera_metadata_rational_t*)colorCorrectionMatrix->transform_matrix, 3*3); 1421 1422 cam_color_correct_gains_t *predColorCorrectionGains = (cam_color_correct_gains_t*) 1423 POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata); 1424 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, 1425 predColorCorrectionGains->gains, 4); 1426 1427 cam_color_correct_matrix_t *predColorCorrectionMatrix = (cam_color_correct_matrix_t*) 1428 POINTER_OF(CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata); 1429 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM, 1430 (camera_metadata_rational_t*)predColorCorrectionMatrix->transform_matrix, 3*3); 1431 1432 uint8_t *blackLevelLock = (uint8_t*) 1433 POINTER_OF(CAM_INTF_META_BLACK_LEVEL_LOCK, metadata); 1434 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, blackLevelLock, 1); 1435 1436 uint8_t *sceneFlicker = (uint8_t*) 1437 POINTER_OF(CAM_INTF_META_SCENE_FLICKER, metadata); 1438 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, sceneFlicker, 1); 1439 1440 1441 resultMetadata = camMetadata.release(); 1442 return resultMetadata; 1443} 1444 1445/*=========================================================================== 1446 * FUNCTION : convertToRegions 1447 * 1448 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array 1449 * 1450 * PARAMETERS : 1451 * @rect : cam_rect_t struct to convert 1452 * @region : int32_t destination array 1453 * @weight : if we are converting from cam_area_t, weight is valid 1454 * else weight = -1 1455 * 1456 *==========================================================================*/ 1457void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect, int32_t* region, int weight){ 1458 region[0] = rect.left; 1459 region[1] = rect.top; 1460 region[2] = rect.left + rect.width; 1461 region[3] = rect.top + rect.height; 1462 if (weight > -1) { 1463 region[4] = weight; 1464 } 1465} 1466 1467/*=========================================================================== 1468 * FUNCTION : convertFromRegions 1469 * 1470 * DESCRIPTION: helper method to convert from array to cam_rect_t 1471 * 1472 * PARAMETERS : 1473 * @rect : cam_rect_t struct to convert 1474 * @region : int32_t destination array 1475 * @weight : if we are converting from cam_area_t, weight is valid 1476 * else weight = -1 1477 * 1478 *==========================================================================*/ 1479void QCamera3HardwareInterface::convertFromRegions(cam_area_t* roi, 1480 const camera_metadata_t *settings, 1481 uint32_t tag){ 1482 CameraMetadata frame_settings; 1483 frame_settings = settings; 1484 int32_t x_min = frame_settings.find(tag).data.i32[0]; 1485 int32_t y_min = frame_settings.find(tag).data.i32[1]; 1486 int32_t x_max = frame_settings.find(tag).data.i32[2]; 1487 int32_t y_max = frame_settings.find(tag).data.i32[3]; 1488 roi->weight = frame_settings.find(tag).data.i32[4]; 1489 roi->rect.left = x_min; 1490 roi->rect.top = y_min; 1491 roi->rect.width = x_max - x_min; 1492 roi->rect.height = y_max - y_min; 1493} 1494 1495/*=========================================================================== 1496 * FUNCTION : resetIfNeededROI 1497 * 1498 * DESCRIPTION: helper method to reset the roi if it is greater than scaler 1499 * crop region 1500 * 1501 * PARAMETERS : 1502 * @roi : cam_area_t struct to resize 1503 * @scalerCropRegion : cam_crop_region_t region to compare against 1504 * 1505 * 1506 *==========================================================================*/ 1507bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi, 1508 const cam_crop_region_t* scalerCropRegion) 1509{ 1510 int32_t roi_x_max = roi->rect.width + roi->rect.left; 1511 int32_t roi_y_max = roi->rect.height + roi->rect.top; 1512 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->top; 1513 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->left; 1514 if ((roi_x_max < scalerCropRegion->left) || 1515 (roi_y_max < scalerCropRegion->top) || 1516 (roi->rect.left > crop_x_max) || 1517 (roi->rect.top > crop_y_max)){ 1518 return false; 1519 } 1520 if (roi->rect.left < scalerCropRegion->left) { 1521 roi->rect.left = scalerCropRegion->left; 1522 } 1523 if (roi->rect.top < scalerCropRegion->top) { 1524 roi->rect.top = scalerCropRegion->top; 1525 } 1526 if (roi_x_max > crop_x_max) { 1527 roi_x_max = crop_x_max; 1528 } 1529 if (roi_y_max > crop_y_max) { 1530 roi_y_max = crop_y_max; 1531 } 1532 roi->rect.width = roi_x_max - roi->rect.left; 1533 roi->rect.height = roi_y_max - roi->rect.top; 1534 return true; 1535} 1536 1537/*=========================================================================== 1538 * FUNCTION : convertLandmarks 1539 * 1540 * DESCRIPTION: helper method to extract the landmarks from face detection info 1541 * 1542 * PARAMETERS : 1543 * @face : cam_rect_t struct to convert 1544 * @landmarks : int32_t destination array 1545 * 1546 * 1547 *==========================================================================*/ 1548void QCamera3HardwareInterface::convertLandmarks(cam_face_detection_info_t face, int32_t* landmarks) 1549{ 1550 landmarks[0] = face.left_eye_center.x; 1551 landmarks[1] = face.left_eye_center.y; 1552 landmarks[2] = face.right_eye_center.y; 1553 landmarks[3] = face.right_eye_center.y; 1554 landmarks[4] = face.mouth_center.x; 1555 landmarks[5] = face.mouth_center.y; 1556} 1557 1558#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX ) 1559/*=========================================================================== 1560 * FUNCTION : initCapabilities 1561 * 1562 * DESCRIPTION: initialize camera capabilities in static data struct 1563 * 1564 * PARAMETERS : 1565 * @cameraId : camera Id 1566 * 1567 * RETURN : int32_t type of status 1568 * NO_ERROR -- success 1569 * none-zero failure code 1570 *==========================================================================*/ 1571int QCamera3HardwareInterface::initCapabilities(int cameraId) 1572{ 1573 int rc = 0; 1574 mm_camera_vtbl_t *cameraHandle = NULL; 1575 QCamera3HeapMemory *capabilityHeap = NULL; 1576 1577 cameraHandle = camera_open(cameraId); 1578 if (!cameraHandle) { 1579 ALOGE("%s: camera_open failed", __func__); 1580 rc = -1; 1581 goto open_failed; 1582 } 1583 1584 capabilityHeap = new QCamera3HeapMemory(); 1585 if (capabilityHeap == NULL) { 1586 ALOGE("%s: creation of capabilityHeap failed", __func__); 1587 goto heap_creation_failed; 1588 } 1589 /* Allocate memory for capability buffer */ 1590 rc = capabilityHeap->allocate(1, sizeof(cam_capability_t), false); 1591 if(rc != OK) { 1592 ALOGE("%s: No memory for cappability", __func__); 1593 goto allocate_failed; 1594 } 1595 1596 /* Map memory for capability buffer */ 1597 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t)); 1598 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle, 1599 CAM_MAPPING_BUF_TYPE_CAPABILITY, 1600 capabilityHeap->getFd(0), 1601 sizeof(cam_capability_t)); 1602 if(rc < 0) { 1603 ALOGE("%s: failed to map capability buffer", __func__); 1604 goto map_failed; 1605 } 1606 1607 /* Query Capability */ 1608 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle); 1609 if(rc < 0) { 1610 ALOGE("%s: failed to query capability",__func__); 1611 goto query_failed; 1612 } 1613 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t)); 1614 if (!gCamCapability[cameraId]) { 1615 ALOGE("%s: out of memory", __func__); 1616 goto query_failed; 1617 } 1618 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0), 1619 sizeof(cam_capability_t)); 1620 rc = 0; 1621 1622query_failed: 1623 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle, 1624 CAM_MAPPING_BUF_TYPE_CAPABILITY); 1625map_failed: 1626 capabilityHeap->deallocate(); 1627allocate_failed: 1628 delete capabilityHeap; 1629heap_creation_failed: 1630 cameraHandle->ops->close_camera(cameraHandle->camera_handle); 1631 cameraHandle = NULL; 1632open_failed: 1633 return rc; 1634} 1635 1636/*=========================================================================== 1637 * FUNCTION : initParameters 1638 * 1639 * DESCRIPTION: initialize camera parameters 1640 * 1641 * PARAMETERS : 1642 * 1643 * RETURN : int32_t type of status 1644 * NO_ERROR -- success 1645 * none-zero failure code 1646 *==========================================================================*/ 1647int QCamera3HardwareInterface::initParameters() 1648{ 1649 int rc = 0; 1650 1651 //Allocate Set Param Buffer 1652 mParamHeap = new QCamera3HeapMemory(); 1653 rc = mParamHeap->allocate(1, sizeof(parm_buffer_t), false); 1654 if(rc != OK) { 1655 rc = NO_MEMORY; 1656 ALOGE("Failed to allocate SETPARM Heap memory"); 1657 delete mParamHeap; 1658 mParamHeap = NULL; 1659 return rc; 1660 } 1661 1662 //Map memory for parameters buffer 1663 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle, 1664 CAM_MAPPING_BUF_TYPE_PARM_BUF, 1665 mParamHeap->getFd(0), 1666 sizeof(parm_buffer_t)); 1667 if(rc < 0) { 1668 ALOGE("%s:failed to map SETPARM buffer",__func__); 1669 rc = FAILED_TRANSACTION; 1670 mParamHeap->deallocate(); 1671 delete mParamHeap; 1672 mParamHeap = NULL; 1673 return rc; 1674 } 1675 1676 mParameters = (parm_buffer_t*) DATA_PTR(mParamHeap,0); 1677 return rc; 1678} 1679 1680/*=========================================================================== 1681 * FUNCTION : deinitParameters 1682 * 1683 * DESCRIPTION: de-initialize camera parameters 1684 * 1685 * PARAMETERS : 1686 * 1687 * RETURN : NONE 1688 *==========================================================================*/ 1689void QCamera3HardwareInterface::deinitParameters() 1690{ 1691 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle, 1692 CAM_MAPPING_BUF_TYPE_PARM_BUF); 1693 1694 mParamHeap->deallocate(); 1695 delete mParamHeap; 1696 mParamHeap = NULL; 1697 1698 mParameters = NULL; 1699} 1700 1701/*=========================================================================== 1702 * FUNCTION : calcMaxJpegSize 1703 * 1704 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId 1705 * 1706 * PARAMETERS : 1707 * 1708 * RETURN : max_jpeg_size 1709 *==========================================================================*/ 1710int QCamera3HardwareInterface::calcMaxJpegSize() 1711{ 1712 int32_t max_jpeg_size = 0; 1713 int temp_width, temp_height; 1714 for (int i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) { 1715 temp_width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width; 1716 temp_height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height; 1717 if (temp_width * temp_height > max_jpeg_size ) { 1718 max_jpeg_size = temp_width * temp_height; 1719 } 1720 } 1721 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1722 return max_jpeg_size; 1723} 1724 1725/*=========================================================================== 1726 * FUNCTION : initStaticMetadata 1727 * 1728 * DESCRIPTION: initialize the static metadata 1729 * 1730 * PARAMETERS : 1731 * @cameraId : camera Id 1732 * 1733 * RETURN : int32_t type of status 1734 * 0 -- success 1735 * non-zero failure code 1736 *==========================================================================*/ 1737int QCamera3HardwareInterface::initStaticMetadata(int cameraId) 1738{ 1739 int rc = 0; 1740 CameraMetadata staticInfo; 1741 1742 /* android.info: hardware level */ 1743 uint8_t supportedHardwareLevel = ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL; 1744 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL, 1745 &supportedHardwareLevel, 1); 1746 1747 int facingBack = gCamCapability[cameraId]->position == CAM_POSITION_BACK; 1748 /*HAL 3 only*/ 1749 /*staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1750 &gCamCapability[cameraId]->min_focus_distance, 1); */ 1751 1752 /*hard coded for now but this should come from sensor*/ 1753 float min_focus_distance; 1754 if(facingBack){ 1755 min_focus_distance = 10; 1756 } else { 1757 min_focus_distance = 0; 1758 } 1759 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE, 1760 &min_focus_distance, 1); 1761 1762 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, 1763 &gCamCapability[cameraId]->hyper_focal_distance, 1); 1764 1765 /*should be using focal lengths but sensor doesn't provide that info now*/ 1766 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS, 1767 &gCamCapability[cameraId]->focal_length, 1768 1); 1769 1770 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES, 1771 gCamCapability[cameraId]->apertures, 1772 gCamCapability[cameraId]->apertures_count); 1773 1774 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES, 1775 gCamCapability[cameraId]->filter_densities, 1776 gCamCapability[cameraId]->filter_densities_count); 1777 1778 1779 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION, 1780 (uint8_t*)gCamCapability[cameraId]->optical_stab_modes, 1781 gCamCapability[cameraId]->optical_stab_modes_count); 1782 1783 staticInfo.update(ANDROID_LENS_POSITION, 1784 gCamCapability[cameraId]->lens_position, 1785 sizeof(gCamCapability[cameraId]->lens_position)/ sizeof(float)); 1786 1787 int32_t lens_shading_map_size[] = {gCamCapability[cameraId]->lens_shading_map_size.width, 1788 gCamCapability[cameraId]->lens_shading_map_size.height}; 1789 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE, 1790 lens_shading_map_size, 1791 sizeof(lens_shading_map_size)/sizeof(int32_t)); 1792 1793 int32_t geo_correction_map_size[] = {gCamCapability[cameraId]->geo_correction_map_size.width, 1794 gCamCapability[cameraId]->geo_correction_map_size.height}; 1795 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP_SIZE, 1796 geo_correction_map_size, 1797 sizeof(geo_correction_map_size)/sizeof(int32_t)); 1798 1799 staticInfo.update(ANDROID_LENS_INFO_GEOMETRIC_CORRECTION_MAP, 1800 gCamCapability[cameraId]->geo_correction_map, 1801 sizeof(gCamCapability[cameraId]->geo_correction_map)/sizeof(float)); 1802 1803 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE, 1804 gCamCapability[cameraId]->sensor_physical_size, 2); 1805 1806 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, 1807 gCamCapability[cameraId]->exposure_time_range, 2); 1808 1809 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION, 1810 &gCamCapability[cameraId]->max_frame_duration, 1); 1811 1812 1813 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT, 1814 (uint8_t*)&gCamCapability[cameraId]->color_arrangement, 1); 1815 1816 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width, 1817 gCamCapability[cameraId]->pixel_array_size.height}; 1818 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE, 1819 pixel_array_size, 2); 1820 1821 int32_t active_array_size[] = {0, 0, 1822 gCamCapability[cameraId]->active_array_size.width, 1823 gCamCapability[cameraId]->active_array_size.height}; 1824 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE, 1825 active_array_size, 4); 1826 1827 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL, 1828 &gCamCapability[cameraId]->white_level, 1); 1829 1830 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN, 1831 gCamCapability[cameraId]->black_level_pattern, 4); 1832 1833 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION, 1834 &gCamCapability[cameraId]->flash_charge_duration, 1); 1835 1836 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS, 1837 &gCamCapability[cameraId]->max_tone_map_curve_points, 1); 1838 1839 /*staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1840 (int*)&gCamCapability[cameraId]->max_face_detection_count, 1);*/ 1841 /*hardcode 0 for now*/ 1842 int32_t max_face_count = 0; 1843 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, 1844 &max_face_count, 1); 1845 1846 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT, 1847 &gCamCapability[cameraId]->histogram_size, 1); 1848 1849 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT, 1850 &gCamCapability[cameraId]->max_histogram_count, 1); 1851 1852 int32_t sharpness_map_size[] = {gCamCapability[cameraId]->sharpness_map_size.width, 1853 gCamCapability[cameraId]->sharpness_map_size.height}; 1854 1855 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, 1856 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t)); 1857 1858 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE, 1859 &gCamCapability[cameraId]->max_sharpness_map_value, 1); 1860 1861 1862 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_MIN_DURATIONS, 1863 &gCamCapability[cameraId]->raw_min_duration, 1864 1); 1865 1866 int32_t scalar_formats[] = {HAL_PIXEL_FORMAT_YCbCr_420_888, 1867 HAL_PIXEL_FORMAT_BLOB}; 1868 int scalar_formats_count = sizeof(scalar_formats)/sizeof(int32_t); 1869 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, 1870 scalar_formats, 1871 scalar_formats_count); 1872 1873 int32_t available_processed_sizes[CAM_FORMAT_MAX * 2]; 1874 makeTable(gCamCapability[cameraId]->supported_sizes_tbl, 1875 gCamCapability[cameraId]->supported_sizes_tbl_cnt, 1876 available_processed_sizes); 1877 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES, 1878 available_processed_sizes, 1879 (gCamCapability[cameraId]->supported_sizes_tbl_cnt) * 2); 1880 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS, 1881 &gCamCapability[cameraId]->min_duration[0], 1882 gCamCapability[cameraId]->supported_sizes_tbl_cnt); 1883 1884 int32_t available_fps_ranges[MAX_SIZES_CNT * 2]; 1885 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl, 1886 gCamCapability[cameraId]->fps_ranges_tbl_cnt, 1887 available_fps_ranges); 1888 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES, 1889 available_fps_ranges, (gCamCapability[cameraId]->fps_ranges_tbl_cnt*2) ); 1890 1891 camera_metadata_rational exposureCompensationStep = { 1892 gCamCapability[cameraId]->exp_compensation_step.numerator, 1893 gCamCapability[cameraId]->exp_compensation_step.denominator}; 1894 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP, 1895 &exposureCompensationStep, 1); 1896 1897 /*TO DO*/ 1898 uint8_t availableVstabModes[] = {ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF}; 1899 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES, 1900 availableVstabModes, sizeof(availableVstabModes)); 1901 1902 /*HAL 1 and HAL 3 common*/ 1903 float maxZoom = 4; 1904 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM, 1905 &maxZoom, 1); 1906 1907 int32_t max3aRegions = 1; 1908 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS, 1909 &max3aRegions, 1); 1910 1911 uint8_t availableFaceDetectModes[] = { 1912 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF }; 1913 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES, 1914 availableFaceDetectModes, 1915 sizeof(availableFaceDetectModes)); 1916 1917 int32_t raw_size[] = {gCamCapability[cameraId]->raw_dim.width, 1918 gCamCapability[cameraId]->raw_dim.height}; 1919 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES, 1920 raw_size, 1921 sizeof(raw_size)/sizeof(uint32_t)); 1922 1923 int32_t exposureCompensationRange[] = {gCamCapability[cameraId]->exposure_compensation_min, 1924 gCamCapability[cameraId]->exposure_compensation_max}; 1925 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE, 1926 exposureCompensationRange, 1927 sizeof(exposureCompensationRange)/sizeof(int32_t)); 1928 1929 uint8_t lensFacing = (facingBack) ? 1930 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT; 1931 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1); 1932 1933 int32_t available_jpeg_sizes[MAX_SIZES_CNT * 2]; 1934 makeTable(gCamCapability[cameraId]->picture_sizes_tbl, 1935 gCamCapability[cameraId]->picture_sizes_tbl_cnt, 1936 available_jpeg_sizes); 1937 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_SIZES, 1938 available_jpeg_sizes, 1939 (gCamCapability[cameraId]->picture_sizes_tbl_cnt * 2)); 1940 1941 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES, 1942 available_thumbnail_sizes, 1943 sizeof(available_thumbnail_sizes)/sizeof(int32_t)); 1944 1945 int32_t max_jpeg_size = 0; 1946 int temp_width, temp_height; 1947 for (int i = 0; i < gCamCapability[cameraId]->picture_sizes_tbl_cnt; i++) { 1948 temp_width = gCamCapability[cameraId]->picture_sizes_tbl[i].width; 1949 temp_height = gCamCapability[cameraId]->picture_sizes_tbl[i].height; 1950 if (temp_width * temp_height > max_jpeg_size ) { 1951 max_jpeg_size = temp_width * temp_height; 1952 } 1953 } 1954 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t); 1955 staticInfo.update(ANDROID_JPEG_MAX_SIZE, 1956 &max_jpeg_size, 1); 1957 1958 uint8_t avail_effects[CAM_EFFECT_MODE_MAX]; 1959 int32_t size = 0; 1960 for (int i = 0; i < gCamCapability[cameraId]->supported_effects_cnt; i++) { 1961 int val = lookupFwkName(EFFECT_MODES_MAP, 1962 sizeof(EFFECT_MODES_MAP)/sizeof(EFFECT_MODES_MAP[0]), 1963 gCamCapability[cameraId]->supported_effects[i]); 1964 if (val != NAME_NOT_FOUND) { 1965 avail_effects[size] = (uint8_t)val; 1966 size++; 1967 } 1968 } 1969 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS, 1970 avail_effects, 1971 size); 1972 1973 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX]; 1974 uint8_t supported_indexes[CAM_SCENE_MODE_MAX]; 1975 int32_t supported_scene_modes_cnt = 0; 1976 for (int i = 0; i < gCamCapability[cameraId]->supported_scene_modes_cnt; i++) { 1977 int val = lookupFwkName(SCENE_MODES_MAP, 1978 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 1979 gCamCapability[cameraId]->supported_scene_modes[i]); 1980 if (val != NAME_NOT_FOUND) { 1981 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val; 1982 supported_indexes[supported_scene_modes_cnt] = i; 1983 supported_scene_modes_cnt++; 1984 } 1985 } 1986 1987 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES, 1988 avail_scene_modes, 1989 supported_scene_modes_cnt); 1990 1991 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3]; 1992 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides, 1993 supported_scene_modes_cnt, 1994 scene_mode_overrides, 1995 supported_indexes, 1996 cameraId); 1997 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES, 1998 scene_mode_overrides, 1999 supported_scene_modes_cnt*3); 2000 2001 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX]; 2002 size = 0; 2003 for (int i = 0; i < gCamCapability[cameraId]->supported_antibandings_cnt; i++) { 2004 int val = lookupFwkName(ANTIBANDING_MODES_MAP, 2005 sizeof(ANTIBANDING_MODES_MAP)/sizeof(ANTIBANDING_MODES_MAP[0]), 2006 gCamCapability[cameraId]->supported_antibandings[i]); 2007 if (val != NAME_NOT_FOUND) { 2008 avail_antibanding_modes[size] = (uint8_t)val; 2009 size++; 2010 } 2011 2012 } 2013 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES, 2014 avail_antibanding_modes, 2015 size); 2016 2017 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX]; 2018 size = 0; 2019 for (int i = 0; i < gCamCapability[cameraId]->supported_focus_modes_cnt; i++) { 2020 int val = lookupFwkName(FOCUS_MODES_MAP, 2021 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 2022 gCamCapability[cameraId]->supported_focus_modes[i]); 2023 if (val != NAME_NOT_FOUND) { 2024 avail_af_modes[size] = (uint8_t)val; 2025 size++; 2026 } 2027 } 2028 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES, 2029 avail_af_modes, 2030 size); 2031 2032 uint8_t avail_awb_modes[CAM_WB_MODE_MAX]; 2033 size = 0; 2034 for (int i = 0; i < gCamCapability[cameraId]->supported_white_balances_cnt; i++) { 2035 int8_t val = lookupFwkName(WHITE_BALANCE_MODES_MAP, 2036 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 2037 gCamCapability[cameraId]->supported_white_balances[i]); 2038 if (val != NAME_NOT_FOUND) { 2039 avail_awb_modes[size] = (uint8_t)val; 2040 size++; 2041 } 2042 } 2043 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES, 2044 avail_awb_modes, 2045 size); 2046 2047 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX]; 2048 for (int i = 0; i < gCamCapability[cameraId]->supported_flash_firing_level_cnt; i++) 2049 available_flash_levels[i] = gCamCapability[cameraId]->supported_firing_levels[i]; 2050 2051 staticInfo.update(ANDROID_FLASH_FIRING_POWER, 2052 available_flash_levels, 2053 gCamCapability[cameraId]->supported_flash_firing_level_cnt); 2054 2055 uint8_t avail_flash_modes[CAM_FLASH_MODE_MAX]; 2056 size = 0; 2057 for (int i = 0; i < gCamCapability[cameraId]->supported_flash_modes_cnt; i++) { 2058 int val = lookupFwkName(FLASH_MODES_MAP, 2059 sizeof(FLASH_MODES_MAP)/sizeof(FLASH_MODES_MAP[0]), 2060 gCamCapability[cameraId]->supported_flash_modes[i]); 2061 if (val != NAME_NOT_FOUND) { 2062 avail_flash_modes[size] = (uint8_t)val; 2063 size++; 2064 } 2065 } 2066 static uint8_t flashAvailable = 0; 2067 if (size > 1) { 2068 //flash is supported 2069 flashAvailable = 1; 2070 } 2071 staticInfo.update(ANDROID_FLASH_MODE, 2072 avail_flash_modes, 2073 size); 2074 2075 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE, 2076 &flashAvailable, 1); 2077 2078 uint8_t avail_ae_modes[5]; 2079 size = 0; 2080 for (int i = 0; i < gCamCapability[cameraId]->supported_ae_modes_cnt; i++) { 2081 avail_ae_modes[i] = gCamCapability[cameraId]->supported_ae_modes[i]; 2082 size++; 2083 } 2084 if (flashAvailable) { 2085 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH; 2086 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH; 2087 avail_ae_modes[size++] = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE; 2088 } 2089 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES, 2090 avail_ae_modes, 2091 size); 2092 2093 int32_t sensitivity_range[2]; 2094 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity; 2095 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity; 2096 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, 2097 sensitivity_range, 2098 sizeof(sensitivity_range) / sizeof(int32_t)); 2099 2100 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY, 2101 &gCamCapability[cameraId]->max_analog_sensitivity, 2102 sizeof(int32_t) ); 2103 staticInfo.update(ANDROID_SCALER_AVAILABLE_JPEG_MIN_DURATIONS, 2104 &gCamCapability[cameraId]->jpeg_min_duration[0], 2105 gCamCapability[cameraId]->picture_sizes_tbl_cnt); 2106 2107 gStaticMetadata[cameraId] = staticInfo.release(); 2108 return rc; 2109} 2110 2111/*=========================================================================== 2112 * FUNCTION : makeTable 2113 * 2114 * DESCRIPTION: make a table of sizes 2115 * 2116 * PARAMETERS : 2117 * 2118 * 2119 *==========================================================================*/ 2120void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, uint8_t size, 2121 int32_t* sizeTable) 2122{ 2123 int j = 0; 2124 for (int i = 0; i < size; i++) { 2125 sizeTable[j] = dimTable[i].width; 2126 sizeTable[j+1] = dimTable[i].height; 2127 j+=2; 2128 } 2129} 2130 2131/*=========================================================================== 2132 * FUNCTION : makeFPSTable 2133 * 2134 * DESCRIPTION: make a table of fps ranges 2135 * 2136 * PARAMETERS : 2137 * 2138 *==========================================================================*/ 2139void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, uint8_t size, 2140 int32_t* fpsRangesTable) 2141{ 2142 int j = 0; 2143 for (int i = 0; i < size; i++) { 2144 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps; 2145 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps; 2146 j+=2; 2147 } 2148} 2149 2150/*=========================================================================== 2151 * FUNCTION : makeOverridesList 2152 * 2153 * DESCRIPTION: make a list of scene mode overrides 2154 * 2155 * PARAMETERS : 2156 * 2157 * 2158 *==========================================================================*/ 2159void QCamera3HardwareInterface::makeOverridesList(cam_scene_mode_overrides_t* overridesTable, 2160 uint8_t size, uint8_t* overridesList, 2161 uint8_t* supported_indexes, 2162 int camera_id) 2163{ 2164 /*daemon will give a list of overrides for all scene modes. 2165 However we should send the fwk only the overrides for the scene modes 2166 supported by the framework*/ 2167 int j = 0, index = 0, supt = 0; 2168 uint8_t focus_override; 2169 for (int i = 0; i < size; i++) { 2170 supt = 0; 2171 index = supported_indexes[i]; 2172 overridesList[j] = (uint8_t)overridesTable[index].ae_mode; 2173 overridesList[j+1] = (uint8_t)lookupFwkName(WHITE_BALANCE_MODES_MAP, 2174 sizeof(WHITE_BALANCE_MODES_MAP)/sizeof(WHITE_BALANCE_MODES_MAP[0]), 2175 overridesTable[index].awb_mode); 2176 focus_override = (uint8_t)overridesTable[index].af_mode; 2177 for (int k = 0; k < gCamCapability[camera_id]->supported_focus_modes_cnt; k++) { 2178 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) { 2179 supt = 1; 2180 break; 2181 } 2182 } 2183 if (supt) { 2184 overridesList[j+2] = (uint8_t)lookupFwkName(FOCUS_MODES_MAP, 2185 sizeof(FOCUS_MODES_MAP)/sizeof(FOCUS_MODES_MAP[0]), 2186 focus_override); 2187 } else { 2188 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF; 2189 } 2190 j+=3; 2191 } 2192} 2193 2194/*=========================================================================== 2195 * FUNCTION : getPreviewHalPixelFormat 2196 * 2197 * DESCRIPTION: convert the format to type recognized by framework 2198 * 2199 * PARAMETERS : format : the format from backend 2200 * 2201 ** RETURN : format recognized by framework 2202 * 2203 *==========================================================================*/ 2204int32_t QCamera3HardwareInterface::getScalarFormat(int32_t format) 2205{ 2206 int32_t halPixelFormat; 2207 2208 switch (format) { 2209 case CAM_FORMAT_YUV_420_NV12: 2210 halPixelFormat = HAL_PIXEL_FORMAT_YCbCr_420_SP; 2211 break; 2212 case CAM_FORMAT_YUV_420_NV21: 2213 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2214 break; 2215 case CAM_FORMAT_YUV_420_NV21_ADRENO: 2216 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO; 2217 break; 2218 case CAM_FORMAT_YUV_420_YV12: 2219 halPixelFormat = HAL_PIXEL_FORMAT_YV12; 2220 break; 2221 case CAM_FORMAT_YUV_422_NV16: 2222 case CAM_FORMAT_YUV_422_NV61: 2223 default: 2224 halPixelFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP; 2225 break; 2226 } 2227 return halPixelFormat; 2228} 2229 2230/*=========================================================================== 2231 * FUNCTION : getSensorSensitivity 2232 * 2233 * DESCRIPTION: convert iso_mode to an integer value 2234 * 2235 * PARAMETERS : iso_mode : the iso_mode supported by sensor 2236 * 2237 ** RETURN : sensitivity supported by sensor 2238 * 2239 *==========================================================================*/ 2240int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode) 2241{ 2242 int32_t sensitivity; 2243 2244 switch (iso_mode) { 2245 case CAM_ISO_MODE_100: 2246 sensitivity = 100; 2247 break; 2248 case CAM_ISO_MODE_200: 2249 sensitivity = 200; 2250 break; 2251 case CAM_ISO_MODE_400: 2252 sensitivity = 400; 2253 break; 2254 case CAM_ISO_MODE_800: 2255 sensitivity = 800; 2256 break; 2257 case CAM_ISO_MODE_1600: 2258 sensitivity = 1600; 2259 break; 2260 default: 2261 sensitivity = -1; 2262 break; 2263 } 2264 return sensitivity; 2265} 2266 2267 2268/*=========================================================================== 2269 * FUNCTION : AddSetParmEntryToBatch 2270 * 2271 * DESCRIPTION: add set parameter entry into batch 2272 * 2273 * PARAMETERS : 2274 * @p_table : ptr to parameter buffer 2275 * @paramType : parameter type 2276 * @paramLength : length of parameter value 2277 * @paramValue : ptr to parameter value 2278 * 2279 * RETURN : int32_t type of status 2280 * NO_ERROR -- success 2281 * none-zero failure code 2282 *==========================================================================*/ 2283int32_t QCamera3HardwareInterface::AddSetParmEntryToBatch(parm_buffer_t *p_table, 2284 cam_intf_parm_type_t paramType, 2285 uint32_t paramLength, 2286 void *paramValue) 2287{ 2288 int position = paramType; 2289 int current, next; 2290 2291 /************************************************************************* 2292 * Code to take care of linking next flags * 2293 *************************************************************************/ 2294 current = GET_FIRST_PARAM_ID(p_table); 2295 if (position == current){ 2296 //DO NOTHING 2297 } else if (position < current){ 2298 SET_NEXT_PARAM_ID(position, p_table, current); 2299 SET_FIRST_PARAM_ID(p_table, position); 2300 } else { 2301 /* Search for the position in the linked list where we need to slot in*/ 2302 while (position > GET_NEXT_PARAM_ID(current, p_table)) 2303 current = GET_NEXT_PARAM_ID(current, p_table); 2304 2305 /*If node already exists no need to alter linking*/ 2306 if (position != GET_NEXT_PARAM_ID(current, p_table)) { 2307 next = GET_NEXT_PARAM_ID(current, p_table); 2308 SET_NEXT_PARAM_ID(current, p_table, position); 2309 SET_NEXT_PARAM_ID(position, p_table, next); 2310 } 2311 } 2312 2313 /************************************************************************* 2314 * Copy contents into entry * 2315 *************************************************************************/ 2316 2317 if (paramLength > sizeof(parm_type_t)) { 2318 ALOGE("%s:Size of input larger than max entry size",__func__); 2319 return BAD_VALUE; 2320 } 2321 memcpy(POINTER_OF(paramType,p_table), paramValue, paramLength); 2322 return NO_ERROR; 2323} 2324 2325/*=========================================================================== 2326 * FUNCTION : lookupFwkName 2327 * 2328 * DESCRIPTION: In case the enum is not same in fwk and backend 2329 * make sure the parameter is correctly propogated 2330 * 2331 * PARAMETERS : 2332 * @arr : map between the two enums 2333 * @len : len of the map 2334 * @hal_name : name of the hal_parm to map 2335 * 2336 * RETURN : int type of status 2337 * fwk_name -- success 2338 * none-zero failure code 2339 *==========================================================================*/ 2340int8_t QCamera3HardwareInterface::lookupFwkName(const QCameraMap arr[], 2341 int len, int hal_name) 2342{ 2343 2344 for (int i = 0; i < len; i++) { 2345 if (arr[i].hal_name == hal_name) 2346 return arr[i].fwk_name; 2347 } 2348 2349 /* Not able to find matching framework type is not necessarily 2350 * an error case. This happens when mm-camera supports more attributes 2351 * than the frameworks do */ 2352 ALOGD("%s: Cannot find matching framework type", __func__); 2353 return NAME_NOT_FOUND; 2354} 2355 2356/*=========================================================================== 2357 * FUNCTION : lookupHalName 2358 * 2359 * DESCRIPTION: In case the enum is not same in fwk and backend 2360 * make sure the parameter is correctly propogated 2361 * 2362 * PARAMETERS : 2363 * @arr : map between the two enums 2364 * @len : len of the map 2365 * @fwk_name : name of the hal_parm to map 2366 * 2367 * RETURN : int32_t type of status 2368 * hal_name -- success 2369 * none-zero failure code 2370 *==========================================================================*/ 2371int8_t QCamera3HardwareInterface::lookupHalName(const QCameraMap arr[], 2372 int len, int fwk_name) 2373{ 2374 for (int i = 0; i < len; i++) { 2375 if (arr[i].fwk_name == fwk_name) 2376 return arr[i].hal_name; 2377 } 2378 ALOGE("%s: Cannot find matching hal type", __func__); 2379 return NAME_NOT_FOUND; 2380} 2381 2382/*=========================================================================== 2383 * FUNCTION : getCapabilities 2384 * 2385 * DESCRIPTION: query camera capabilities 2386 * 2387 * PARAMETERS : 2388 * @cameraId : camera Id 2389 * @info : camera info struct to be filled in with camera capabilities 2390 * 2391 * RETURN : int32_t type of status 2392 * NO_ERROR -- success 2393 * none-zero failure code 2394 *==========================================================================*/ 2395int QCamera3HardwareInterface::getCamInfo(int cameraId, 2396 struct camera_info *info) 2397{ 2398 int rc = 0; 2399 2400 if (NULL == gCamCapability[cameraId]) { 2401 rc = initCapabilities(cameraId); 2402 if (rc < 0) { 2403 //pthread_mutex_unlock(&g_camlock); 2404 return rc; 2405 } 2406 } 2407 2408 if (NULL == gStaticMetadata[cameraId]) { 2409 rc = initStaticMetadata(cameraId); 2410 if (rc < 0) { 2411 return rc; 2412 } 2413 } 2414 2415 switch(gCamCapability[cameraId]->position) { 2416 case CAM_POSITION_BACK: 2417 info->facing = CAMERA_FACING_BACK; 2418 break; 2419 2420 case CAM_POSITION_FRONT: 2421 info->facing = CAMERA_FACING_FRONT; 2422 break; 2423 2424 default: 2425 ALOGE("%s:Unknown position type for camera id:%d", __func__, cameraId); 2426 rc = -1; 2427 break; 2428 } 2429 2430 2431 info->orientation = gCamCapability[cameraId]->sensor_mount_angle; 2432 info->device_version = HARDWARE_DEVICE_API_VERSION(3, 0); 2433 info->static_camera_characteristics = gStaticMetadata[cameraId]; 2434 2435 return rc; 2436} 2437 2438/*=========================================================================== 2439 * FUNCTION : translateMetadata 2440 * 2441 * DESCRIPTION: translate the metadata into camera_metadata_t 2442 * 2443 * PARAMETERS : type of the request 2444 * 2445 * 2446 * RETURN : success: camera_metadata_t* 2447 * failure: NULL 2448 * 2449 *==========================================================================*/ 2450camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type) 2451{ 2452 pthread_mutex_lock(&mMutex); 2453 2454 if (mDefaultMetadata[type] != NULL) { 2455 pthread_mutex_unlock(&mMutex); 2456 return mDefaultMetadata[type]; 2457 } 2458 //first time we are handling this request 2459 //fill up the metadata structure using the wrapper class 2460 CameraMetadata settings; 2461 //translate from cam_capability_t to camera_metadata_tag_t 2462 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE; 2463 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1); 2464 2465 /*control*/ 2466 2467 uint8_t controlIntent = 0; 2468 switch (type) { 2469 case CAMERA3_TEMPLATE_PREVIEW: 2470 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2471 break; 2472 case CAMERA3_TEMPLATE_STILL_CAPTURE: 2473 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 2474 break; 2475 case CAMERA3_TEMPLATE_VIDEO_RECORD: 2476 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 2477 break; 2478 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT: 2479 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 2480 break; 2481 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG: 2482 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG; 2483 break; 2484 default: 2485 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM; 2486 break; 2487 } 2488 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1); 2489 2490 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, 2491 &gCamCapability[mCameraId]->exposure_compensation_default, 1); 2492 2493 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF; 2494 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1); 2495 2496 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 2497 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 2498 2499 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 2500 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 2501 2502 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 2503 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1); 2504 2505 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 2506 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 2507 2508 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY; //similar to AUTO? 2509 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 2510 2511 static uint8_t focusMode; 2512 if (gCamCapability[mCameraId]->supported_focus_modes_cnt > 1) { 2513 ALOGE("%s: Setting focus mode to auto", __func__); 2514 focusMode = ANDROID_CONTROL_AF_MODE_AUTO; 2515 } else { 2516 ALOGE("%s: Setting focus mode to off", __func__); 2517 focusMode = ANDROID_CONTROL_AF_MODE_OFF; 2518 } 2519 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1); 2520 2521 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; 2522 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1); 2523 2524 /*flash*/ 2525 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 2526 settings.update(ANDROID_FLASH_MODE, &flashMode, 1); 2527 2528 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4; 2529 settings.update(ANDROID_FLASH_FIRING_POWER, 2530 &flashFiringLevel, 1); 2531 2532 /* lens */ 2533 float default_aperture = gCamCapability[mCameraId]->apertures[0]; 2534 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1); 2535 2536 if (gCamCapability[mCameraId]->filter_densities_count) { 2537 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0]; 2538 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density, 2539 gCamCapability[mCameraId]->filter_densities_count); 2540 } 2541 2542 float default_focal_length = gCamCapability[mCameraId]->focal_length; 2543 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1); 2544 2545 mDefaultMetadata[type] = settings.release(); 2546 2547 pthread_mutex_unlock(&mMutex); 2548 return mDefaultMetadata[type]; 2549} 2550 2551/*=========================================================================== 2552 * FUNCTION : setFrameParameters 2553 * 2554 * DESCRIPTION: set parameters per frame as requested in the metadata from 2555 * framework 2556 * 2557 * PARAMETERS : 2558 * @settings : frame settings information from framework 2559 * 2560 * 2561 * RETURN : success: NO_ERROR 2562 * failure: 2563 *==========================================================================*/ 2564int QCamera3HardwareInterface::setFrameParameters(int frame_id, 2565 const camera_metadata_t *settings) 2566{ 2567 /*translate from camera_metadata_t type to parm_type_t*/ 2568 int rc = 0; 2569 if (settings == NULL && mFirstRequest) { 2570 /*settings cannot be null for the first request*/ 2571 return BAD_VALUE; 2572 } 2573 2574 int32_t hal_version = CAM_HAL_V3; 2575 2576 memset(mParameters, 0, sizeof(parm_buffer_t)); 2577 mParameters->first_flagged_entry = CAM_INTF_PARM_MAX; 2578 AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_HAL_VERSION, 2579 sizeof(hal_version), &hal_version); 2580 2581 /*we need to update the frame number in the parameters*/ 2582 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FRAME_NUMBER, 2583 sizeof(frame_id), &frame_id); 2584 if (rc < 0) { 2585 ALOGE("%s: Failed to set the frame number in the parameters", __func__); 2586 return BAD_VALUE; 2587 } 2588 2589 if(settings != NULL){ 2590 rc = translateMetadataToParameters(settings); 2591 } 2592 /*set the parameters to backend*/ 2593 mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters); 2594 return rc; 2595} 2596 2597/*=========================================================================== 2598 * FUNCTION : translateMetadataToParameters 2599 * 2600 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t 2601 * 2602 * 2603 * PARAMETERS : 2604 * @settings : frame settings information from framework 2605 * 2606 * 2607 * RETURN : success: NO_ERROR 2608 * failure: 2609 *==========================================================================*/ 2610int QCamera3HardwareInterface::translateMetadataToParameters 2611 (const camera_metadata_t *settings) 2612{ 2613 int rc = 0; 2614 CameraMetadata frame_settings; 2615 frame_settings = settings; 2616 2617 2618 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) { 2619 int32_t antibandingMode = 2620 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.i32[0]; 2621 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_ANTIBANDING, 2622 sizeof(antibandingMode), &antibandingMode); 2623 } 2624 2625 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) { 2626 int32_t expCompensation = frame_settings.find( 2627 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0]; 2628 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min) 2629 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min; 2630 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max) 2631 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max; 2632 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EXPOSURE_COMPENSATION, 2633 sizeof(expCompensation), &expCompensation); 2634 } 2635 2636 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) { 2637 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0]; 2638 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AEC_LOCK, 2639 sizeof(aeLock), &aeLock); 2640 } 2641 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) { 2642 cam_fps_range_t fps_range; 2643 fps_range.min_fps = 2644 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0]; 2645 fps_range.max_fps = 2646 frame_settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1]; 2647 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FPS_RANGE, 2648 sizeof(fps_range), &fps_range); 2649 } 2650 2651 float focalDistance = -1.0; 2652 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) { 2653 focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0]; 2654 rc = AddSetParmEntryToBatch(mParameters, 2655 CAM_INTF_META_LENS_FOCUS_DISTANCE, 2656 sizeof(focalDistance), &focalDistance); 2657 } 2658 2659 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) { 2660 uint8_t fwk_focusMode = 2661 frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0]; 2662 uint8_t focusMode; 2663 if (focalDistance == 0.0 && fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) { 2664 focusMode = CAM_FOCUS_MODE_INFINITY; 2665 } else{ 2666 focusMode = lookupHalName(FOCUS_MODES_MAP, 2667 sizeof(FOCUS_MODES_MAP), 2668 fwk_focusMode); 2669 } 2670 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_FOCUS_MODE, 2671 sizeof(focusMode), &focusMode); 2672 } 2673 2674 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) { 2675 uint8_t awbLock = 2676 frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0]; 2677 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_AWB_LOCK, 2678 sizeof(awbLock), &awbLock); 2679 } 2680 2681 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) { 2682 uint8_t fwk_whiteLevel = 2683 frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0]; 2684 uint8_t whiteLevel = lookupHalName(WHITE_BALANCE_MODES_MAP, 2685 sizeof(WHITE_BALANCE_MODES_MAP), 2686 fwk_whiteLevel); 2687 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_WHITE_BALANCE, 2688 sizeof(whiteLevel), &whiteLevel); 2689 } 2690 2691 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) { 2692 uint8_t fwk_effectMode = 2693 frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0]; 2694 uint8_t effectMode = lookupHalName(EFFECT_MODES_MAP, 2695 sizeof(EFFECT_MODES_MAP), 2696 fwk_effectMode); 2697 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_EFFECT, 2698 sizeof(effectMode), &effectMode); 2699 } 2700 2701 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 2702 uint8_t fwk_aeMode = 2703 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 2704 uint8_t aeMode; 2705 int32_t redeye; 2706 2707 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) { 2708 aeMode = CAM_AE_MODE_OFF; 2709 } else { 2710 aeMode = CAM_AE_MODE_ON; 2711 } 2712 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) { 2713 redeye = 1; 2714 } else { 2715 redeye = 0; 2716 } 2717 2718 int32_t flashMode = (int32_t)lookupHalName(AE_FLASH_MODE_MAP, 2719 sizeof(AE_FLASH_MODE_MAP), 2720 fwk_aeMode); 2721 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_MODE, 2722 sizeof(aeMode), &aeMode); 2723 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE, 2724 sizeof(flashMode), &flashMode); 2725 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_REDEYE_REDUCTION, 2726 sizeof(redeye), &redeye); 2727 } 2728 2729 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) { 2730 uint8_t colorCorrectMode = 2731 frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0]; 2732 rc = 2733 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_MODE, 2734 sizeof(colorCorrectMode), &colorCorrectMode); 2735 } 2736 2737 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) { 2738 cam_color_correct_gains_t colorCorrectGains; 2739 for (int i = 0; i < 4; i++) { 2740 colorCorrectGains.gains[i] = 2741 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i]; 2742 } 2743 rc = 2744 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_GAINS, 2745 sizeof(colorCorrectGains), &colorCorrectGains); 2746 } 2747 2748 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) { 2749 cam_color_correct_matrix_t colorCorrectTransform; 2750 cam_rational_type_t transform_elem; 2751 int num = 0; 2752 for (int i = 0; i < 3; i++) { 2753 for (int j = 0; j < 3; j++) { 2754 transform_elem.numerator = 2755 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator; 2756 transform_elem.denominator = 2757 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator; 2758 colorCorrectTransform.transform_matrix[i][j] = transform_elem; 2759 num++; 2760 } 2761 } 2762 rc = 2763 AddSetParmEntryToBatch(mParameters, CAM_INTF_META_COLOR_CORRECT_TRANSFORM, 2764 sizeof(colorCorrectTransform), &colorCorrectTransform); 2765 } 2766 2767 cam_trigger_t aecTrigger; 2768 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE; 2769 aecTrigger.trigger_id = -1; 2770 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&& 2771 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) { 2772 aecTrigger.trigger = 2773 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0]; 2774 aecTrigger.trigger_id = 2775 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0]; 2776 } 2777 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, 2778 sizeof(aecTrigger), &aecTrigger); 2779 2780 /*af_trigger must come with a trigger id*/ 2781 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) && 2782 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) { 2783 cam_trigger_t af_trigger; 2784 af_trigger.trigger = 2785 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0]; 2786 af_trigger.trigger_id = 2787 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0]; 2788 rc = AddSetParmEntryToBatch(mParameters, 2789 CAM_INTF_META_AF_TRIGGER, sizeof(af_trigger), &af_trigger); 2790 } 2791 2792 if (frame_settings.exists(ANDROID_CONTROL_MODE)) { 2793 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0]; 2794 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_MODE, 2795 sizeof(metaMode), &metaMode); 2796 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) { 2797 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0]; 2798 uint8_t sceneMode = lookupHalName(SCENE_MODES_MAP, 2799 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]), 2800 fwk_sceneMode); 2801 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE, 2802 sizeof(sceneMode), &sceneMode); 2803 } else if (metaMode == ANDROID_CONTROL_MODE_OFF) { 2804 uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF; 2805 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE, 2806 sizeof(sceneMode), &sceneMode); 2807 } else if (metaMode == ANDROID_CONTROL_MODE_AUTO) { 2808 uint8_t sceneMode = 0;//CAMERA_BESTSHOT_OFF; 2809 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_BESTSHOT_MODE, 2810 sizeof(sceneMode), &sceneMode); 2811 } 2812 } 2813 2814 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) { 2815 int32_t demosaic = 2816 frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0]; 2817 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_DEMOSAIC, 2818 sizeof(demosaic), &demosaic); 2819 } 2820 2821 if (frame_settings.exists(ANDROID_EDGE_MODE)) { 2822 uint8_t edgeMode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0]; 2823 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_EDGE_MODE, 2824 sizeof(edgeMode), &edgeMode); 2825 } 2826 2827 if (frame_settings.exists(ANDROID_EDGE_STRENGTH)) { 2828 int32_t edgeStrength = 2829 frame_settings.find(ANDROID_EDGE_STRENGTH).data.i32[0]; 2830 rc = AddSetParmEntryToBatch(mParameters, 2831 CAM_INTF_META_SHARPNESS_STRENGTH, sizeof(edgeStrength), &edgeStrength); 2832 } 2833 2834 if (frame_settings.exists(ANDROID_FLASH_MODE)) { 2835 int32_t respectFlashMode = 1; 2836 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) { 2837 uint8_t fwk_aeMode = 2838 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0]; 2839 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) { 2840 respectFlashMode = 0; 2841 ALOGI("%s: AE Mode controls flash, ignore android.flash.mode", 2842 __func__); 2843 } 2844 } 2845 if (respectFlashMode) { 2846 uint8_t flashMode = 2847 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]; 2848 flashMode = (int32_t)lookupHalName(FLASH_MODES_MAP, 2849 sizeof(FLASH_MODES_MAP), 2850 flashMode); 2851 ALOGI("%s: flash mode after mapping %d", __func__, flashMode); 2852 // To check: CAM_INTF_META_FLASH_MODE usage 2853 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_PARM_LED_MODE, 2854 sizeof(flashMode), &flashMode); 2855 } 2856 } 2857 2858 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) { 2859 uint8_t flashPower = 2860 frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0]; 2861 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_FLASH_POWER, 2862 sizeof(flashPower), &flashPower); 2863 } 2864 2865 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) { 2866 int64_t flashFiringTime = 2867 frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0]; 2868 rc = AddSetParmEntryToBatch(mParameters, 2869 CAM_INTF_META_FLASH_FIRING_TIME, sizeof(flashFiringTime), &flashFiringTime); 2870 } 2871 2872 if (frame_settings.exists(ANDROID_GEOMETRIC_MODE)) { 2873 uint8_t geometricMode = 2874 frame_settings.find(ANDROID_GEOMETRIC_MODE).data.u8[0]; 2875 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_GEOMETRIC_MODE, 2876 sizeof(geometricMode), &geometricMode); 2877 } 2878 2879 if (frame_settings.exists(ANDROID_GEOMETRIC_STRENGTH)) { 2880 uint8_t geometricStrength = 2881 frame_settings.find(ANDROID_GEOMETRIC_STRENGTH).data.u8[0]; 2882 rc = AddSetParmEntryToBatch(mParameters, 2883 CAM_INTF_META_GEOMETRIC_STRENGTH, 2884 sizeof(geometricStrength), &geometricStrength); 2885 } 2886 2887 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) { 2888 uint8_t hotPixelMode = 2889 frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0]; 2890 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_HOTPIXEL_MODE, 2891 sizeof(hotPixelMode), &hotPixelMode); 2892 } 2893 2894 if (frame_settings.exists(ANDROID_LENS_APERTURE)) { 2895 float lensAperture = 2896 frame_settings.find( ANDROID_LENS_APERTURE).data.f[0]; 2897 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_APERTURE, 2898 sizeof(lensAperture), &lensAperture); 2899 } 2900 2901 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) { 2902 float filterDensity = 2903 frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0]; 2904 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_FILTERDENSITY, 2905 sizeof(filterDensity), &filterDensity); 2906 } 2907 2908 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 2909 float focalLength = 2910 frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 2911 rc = AddSetParmEntryToBatch(mParameters, 2912 CAM_INTF_META_LENS_FOCAL_LENGTH, 2913 sizeof(focalLength), &focalLength); 2914 } 2915 2916 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) { 2917 uint8_t optStabMode = 2918 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0]; 2919 rc = AddSetParmEntryToBatch(mParameters, 2920 CAM_INTF_META_LENS_OPT_STAB_MODE, 2921 sizeof(optStabMode), &optStabMode); 2922 } 2923 2924 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) { 2925 uint8_t noiseRedMode = 2926 frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]; 2927 rc = AddSetParmEntryToBatch(mParameters, 2928 CAM_INTF_META_NOISE_REDUCTION_MODE, 2929 sizeof(noiseRedMode), &noiseRedMode); 2930 } 2931 2932 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_STRENGTH)) { 2933 uint8_t noiseRedStrength = 2934 frame_settings.find(ANDROID_NOISE_REDUCTION_STRENGTH).data.u8[0]; 2935 rc = AddSetParmEntryToBatch(mParameters, 2936 CAM_INTF_META_NOISE_REDUCTION_STRENGTH, 2937 sizeof(noiseRedStrength), &noiseRedStrength); 2938 } 2939 2940 cam_crop_region_t scalerCropRegion; 2941 bool scalerCropSet = false; 2942 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) { 2943 scalerCropRegion.left = 2944 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0]; 2945 scalerCropRegion.top = 2946 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1]; 2947 scalerCropRegion.width = 2948 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2]; 2949 scalerCropRegion.height = 2950 frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3]; 2951 rc = AddSetParmEntryToBatch(mParameters, 2952 CAM_INTF_META_SCALER_CROP_REGION, 2953 sizeof(scalerCropRegion), &scalerCropRegion); 2954 scalerCropSet = true; 2955 } 2956 2957 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) { 2958 int64_t sensorExpTime = 2959 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0]; 2960 rc = AddSetParmEntryToBatch(mParameters, 2961 CAM_INTF_META_SENSOR_EXPOSURE_TIME, 2962 sizeof(sensorExpTime), &sensorExpTime); 2963 } 2964 2965 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) { 2966 int64_t sensorFrameDuration = 2967 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0]; 2968 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration) 2969 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration; 2970 rc = AddSetParmEntryToBatch(mParameters, 2971 CAM_INTF_META_SENSOR_FRAME_DURATION, 2972 sizeof(sensorFrameDuration), &sensorFrameDuration); 2973 } 2974 2975 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 2976 int32_t sensorSensitivity = 2977 frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 2978 if (sensorSensitivity < 2979 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity) 2980 sensorSensitivity = 2981 gCamCapability[mCameraId]->sensitivity_range.min_sensitivity; 2982 if (sensorSensitivity > 2983 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity) 2984 sensorSensitivity = 2985 gCamCapability[mCameraId]->sensitivity_range.max_sensitivity; 2986 rc = AddSetParmEntryToBatch(mParameters, 2987 CAM_INTF_META_SENSOR_SENSITIVITY, 2988 sizeof(sensorSensitivity), &sensorSensitivity); 2989 } 2990 2991 if (frame_settings.exists(ANDROID_SHADING_MODE)) { 2992 int32_t shadingMode = 2993 frame_settings.find(ANDROID_SHADING_MODE).data.u8[0]; 2994 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_MODE, 2995 sizeof(shadingMode), &shadingMode); 2996 } 2997 2998 if (frame_settings.exists(ANDROID_SHADING_STRENGTH)) { 2999 uint8_t shadingStrength = 3000 frame_settings.find(ANDROID_SHADING_STRENGTH).data.u8[0]; 3001 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_SHADING_STRENGTH, 3002 sizeof(shadingStrength), &shadingStrength); 3003 } 3004 3005 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) { 3006 uint8_t facedetectMode = 3007 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0]; 3008 rc = AddSetParmEntryToBatch(mParameters, 3009 CAM_INTF_META_STATS_FACEDETECT_MODE, 3010 sizeof(facedetectMode), &facedetectMode); 3011 } 3012 3013 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) { 3014 uint8_t histogramMode = 3015 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0]; 3016 rc = AddSetParmEntryToBatch(mParameters, 3017 CAM_INTF_META_STATS_HISTOGRAM_MODE, 3018 sizeof(histogramMode), &histogramMode); 3019 } 3020 3021 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) { 3022 uint8_t sharpnessMapMode = 3023 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0]; 3024 rc = AddSetParmEntryToBatch(mParameters, 3025 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, 3026 sizeof(sharpnessMapMode), &sharpnessMapMode); 3027 } 3028 3029 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) { 3030 uint8_t tonemapMode = 3031 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0]; 3032 rc = AddSetParmEntryToBatch(mParameters, 3033 CAM_INTF_META_TONEMAP_MODE, 3034 sizeof(tonemapMode), &tonemapMode); 3035 } 3036 int point = 0; 3037 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE)) { 3038 cam_tonemap_curve_t tonemapCurveBlue; 3039 tonemapCurveBlue.tonemap_points_cnt = 3040 gCamCapability[mCameraId]->max_tone_map_curve_points; 3041 for (int i = 0; i < tonemapCurveBlue.tonemap_points_cnt; i++) { 3042 for (int j = 0; j < 2; j++) { 3043 tonemapCurveBlue.tonemap_points[i][j] = 3044 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point]; 3045 point++; 3046 } 3047 } 3048 rc = AddSetParmEntryToBatch(mParameters, 3049 CAM_INTF_META_TONEMAP_CURVE_BLUE, 3050 sizeof(tonemapCurveBlue), &tonemapCurveBlue); 3051 } 3052 point = 0; 3053 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN)) { 3054 cam_tonemap_curve_t tonemapCurveGreen; 3055 tonemapCurveGreen.tonemap_points_cnt = 3056 gCamCapability[mCameraId]->max_tone_map_curve_points; 3057 for (int i = 0; i < tonemapCurveGreen.tonemap_points_cnt; i++) { 3058 for (int j = 0; j < 2; j++) { 3059 tonemapCurveGreen.tonemap_points[i][j] = 3060 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point]; 3061 point++; 3062 } 3063 } 3064 rc = AddSetParmEntryToBatch(mParameters, 3065 CAM_INTF_META_TONEMAP_CURVE_GREEN, 3066 sizeof(tonemapCurveGreen), &tonemapCurveGreen); 3067 } 3068 point = 0; 3069 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) { 3070 cam_tonemap_curve_t tonemapCurveRed; 3071 tonemapCurveRed.tonemap_points_cnt = 3072 gCamCapability[mCameraId]->max_tone_map_curve_points; 3073 for (int i = 0; i < tonemapCurveRed.tonemap_points_cnt; i++) { 3074 for (int j = 0; j < 2; j++) { 3075 tonemapCurveRed.tonemap_points[i][j] = 3076 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point]; 3077 point++; 3078 } 3079 } 3080 rc = AddSetParmEntryToBatch(mParameters, 3081 CAM_INTF_META_TONEMAP_CURVE_RED, 3082 sizeof(tonemapCurveRed), &tonemapCurveRed); 3083 } 3084 3085 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) { 3086 uint8_t captureIntent = 3087 frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0]; 3088 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_CAPTURE_INTENT, 3089 sizeof(captureIntent), &captureIntent); 3090 } 3091 3092 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) { 3093 uint8_t blackLevelLock = 3094 frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0]; 3095 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_BLACK_LEVEL_LOCK, 3096 sizeof(blackLevelLock), &blackLevelLock); 3097 } 3098 3099 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) { 3100 uint8_t lensShadingMapMode = 3101 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0]; 3102 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE, 3103 sizeof(lensShadingMapMode), &lensShadingMapMode); 3104 } 3105 3106 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) { 3107 cam_area_t roi; 3108 bool reset = true; 3109 convertFromRegions(&roi, settings, ANDROID_CONTROL_AE_REGIONS); 3110 if (scalerCropSet) { 3111 reset = resetIfNeededROI(&roi, &scalerCropRegion); 3112 } 3113 if (reset) { 3114 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AEC_ROI, 3115 sizeof(roi), &roi); 3116 } 3117 } 3118 3119 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) { 3120 cam_area_t roi; 3121 bool reset = true; 3122 convertFromRegions(&roi, settings, ANDROID_CONTROL_AF_REGIONS); 3123 if (scalerCropSet) { 3124 reset = resetIfNeededROI(&roi, &scalerCropRegion); 3125 } 3126 if (reset) { 3127 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AF_ROI, 3128 sizeof(roi), &roi); 3129 } 3130 } 3131 3132 if (frame_settings.exists(ANDROID_CONTROL_AWB_REGIONS)) { 3133 cam_area_t roi; 3134 bool reset = true; 3135 convertFromRegions(&roi, settings, ANDROID_CONTROL_AWB_REGIONS); 3136 if (scalerCropSet) { 3137 reset = resetIfNeededROI(&roi, &scalerCropRegion); 3138 } 3139 if (reset) { 3140 rc = AddSetParmEntryToBatch(mParameters, CAM_INTF_META_AWB_REGIONS, 3141 sizeof(roi), &roi); 3142 } 3143 } 3144 return rc; 3145} 3146 3147/*=========================================================================== 3148 * FUNCTION : getJpegSettings 3149 * 3150 * DESCRIPTION: save the jpeg settings in the HAL 3151 * 3152 * 3153 * PARAMETERS : 3154 * @settings : frame settings information from framework 3155 * 3156 * 3157 * RETURN : success: NO_ERROR 3158 * failure: 3159 *==========================================================================*/ 3160int QCamera3HardwareInterface::getJpegSettings 3161 (const camera_metadata_t *settings) 3162{ 3163 if (mJpegSettings) { 3164 if (mJpegSettings->gps_timestamp) { 3165 free(mJpegSettings->gps_timestamp); 3166 mJpegSettings->gps_timestamp = NULL; 3167 } 3168 if (mJpegSettings->gps_coordinates) { 3169 for (int i = 0; i < 3; i++) { 3170 free(mJpegSettings->gps_coordinates[i]); 3171 mJpegSettings->gps_coordinates[i] = NULL; 3172 } 3173 } 3174 free(mJpegSettings); 3175 mJpegSettings = NULL; 3176 } 3177 mJpegSettings = (jpeg_settings_t*) malloc(sizeof(jpeg_settings_t)); 3178 CameraMetadata jpeg_settings; 3179 jpeg_settings = settings; 3180 3181 if (jpeg_settings.exists(ANDROID_JPEG_ORIENTATION)) { 3182 mJpegSettings->jpeg_orientation = 3183 jpeg_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0]; 3184 } else { 3185 mJpegSettings->jpeg_orientation = 0; 3186 } 3187 if (jpeg_settings.exists(ANDROID_JPEG_QUALITY)) { 3188 mJpegSettings->jpeg_quality = 3189 jpeg_settings.find(ANDROID_JPEG_QUALITY).data.u8[0]; 3190 } else { 3191 mJpegSettings->jpeg_quality = 85; 3192 } 3193 if (jpeg_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 3194 mJpegSettings->thumbnail_size.width = 3195 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0]; 3196 mJpegSettings->thumbnail_size.height = 3197 jpeg_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1]; 3198 } else { 3199 mJpegSettings->thumbnail_size.width = 0; 3200 mJpegSettings->thumbnail_size.height = 0; 3201 } 3202 if (jpeg_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) { 3203 for (int i = 0; i < 3; i++) { 3204 mJpegSettings->gps_coordinates[i] = (double*)malloc(sizeof(double*)); 3205 *(mJpegSettings->gps_coordinates[i]) = 3206 jpeg_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d[i]; 3207 } 3208 } else{ 3209 for (int i = 0; i < 3; i++) { 3210 mJpegSettings->gps_coordinates[i] = NULL; 3211 } 3212 } 3213 3214 if (jpeg_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) { 3215 mJpegSettings->gps_timestamp = (int64_t*)malloc(sizeof(int64_t*)); 3216 *(mJpegSettings->gps_timestamp) = 3217 jpeg_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0]; 3218 } else { 3219 mJpegSettings->gps_timestamp = NULL; 3220 } 3221 3222 if (jpeg_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) { 3223 int len = jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count; 3224 for (int i = 0; i < len; i++) { 3225 mJpegSettings->gps_processing_method[i] = 3226 jpeg_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8[i]; 3227 } 3228 if (mJpegSettings->gps_processing_method[len-1] != '\0') { 3229 mJpegSettings->gps_processing_method[len] = '\0'; 3230 } 3231 } else { 3232 mJpegSettings->gps_processing_method[0] = '\0'; 3233 } 3234 3235 if (jpeg_settings.exists(ANDROID_SENSOR_SENSITIVITY)) { 3236 mJpegSettings->sensor_sensitivity = 3237 jpeg_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0]; 3238 } else { 3239 mJpegSettings->sensor_sensitivity = mMetadataResponse.iso_speed; 3240 } 3241 3242 if (jpeg_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) { 3243 mJpegSettings->lens_focal_length = 3244 jpeg_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0]; 3245 } 3246 if (jpeg_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) { 3247 mJpegSettings->exposure_compensation = 3248 jpeg_settings.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0]; 3249 } 3250 mJpegSettings->exposure_comp_step = gCamCapability[mCameraId]->exp_compensation_step; 3251 mJpegSettings->max_jpeg_size = calcMaxJpegSize(); 3252 mJpegSettings->is_jpeg_format = true; 3253 mJpegSettings->min_required_pp_mask = gCamCapability[mCameraId]->min_required_pp_mask; 3254 return 0; 3255} 3256 3257/*=========================================================================== 3258 * FUNCTION : captureResultCb 3259 * 3260 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata) 3261 * 3262 * PARAMETERS : 3263 * @frame : frame information from mm-camera-interface 3264 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata. 3265 * @userdata: userdata 3266 * 3267 * RETURN : NONE 3268 *==========================================================================*/ 3269void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata, 3270 camera3_stream_buffer_t *buffer, 3271 uint32_t frame_number, void *userdata) 3272{ 3273 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata; 3274 if (hw == NULL) { 3275 ALOGE("%s: Invalid hw %p", __func__, hw); 3276 return; 3277 } 3278 3279 hw->captureResultCb(metadata, buffer, frame_number); 3280 return; 3281} 3282 3283 3284/*=========================================================================== 3285 * FUNCTION : initialize 3286 * 3287 * DESCRIPTION: Pass framework callback pointers to HAL 3288 * 3289 * PARAMETERS : 3290 * 3291 * 3292 * RETURN : Success : 0 3293 * Failure: -ENODEV 3294 *==========================================================================*/ 3295 3296int QCamera3HardwareInterface::initialize(const struct camera3_device *device, 3297 const camera3_callback_ops_t *callback_ops) 3298{ 3299 ALOGV("%s: E", __func__); 3300 QCamera3HardwareInterface *hw = 3301 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3302 if (!hw) { 3303 ALOGE("%s: NULL camera device", __func__); 3304 return -ENODEV; 3305 } 3306 3307 int rc = hw->initialize(callback_ops); 3308 ALOGV("%s: X", __func__); 3309 return rc; 3310} 3311 3312/*=========================================================================== 3313 * FUNCTION : configure_streams 3314 * 3315 * DESCRIPTION: 3316 * 3317 * PARAMETERS : 3318 * 3319 * 3320 * RETURN : Success: 0 3321 * Failure: -EINVAL (if stream configuration is invalid) 3322 * -ENODEV (fatal error) 3323 *==========================================================================*/ 3324 3325int QCamera3HardwareInterface::configure_streams( 3326 const struct camera3_device *device, 3327 camera3_stream_configuration_t *stream_list) 3328{ 3329 ALOGV("%s: E", __func__); 3330 QCamera3HardwareInterface *hw = 3331 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3332 if (!hw) { 3333 ALOGE("%s: NULL camera device", __func__); 3334 return -ENODEV; 3335 } 3336 int rc = hw->configureStreams(stream_list); 3337 ALOGV("%s: X", __func__); 3338 return rc; 3339} 3340 3341/*=========================================================================== 3342 * FUNCTION : register_stream_buffers 3343 * 3344 * DESCRIPTION: Register stream buffers with the device 3345 * 3346 * PARAMETERS : 3347 * 3348 * RETURN : 3349 *==========================================================================*/ 3350int QCamera3HardwareInterface::register_stream_buffers( 3351 const struct camera3_device *device, 3352 const camera3_stream_buffer_set_t *buffer_set) 3353{ 3354 ALOGV("%s: E", __func__); 3355 QCamera3HardwareInterface *hw = 3356 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3357 if (!hw) { 3358 ALOGE("%s: NULL camera device", __func__); 3359 return -ENODEV; 3360 } 3361 int rc = hw->registerStreamBuffers(buffer_set); 3362 ALOGV("%s: X", __func__); 3363 return rc; 3364} 3365 3366/*=========================================================================== 3367 * FUNCTION : construct_default_request_settings 3368 * 3369 * DESCRIPTION: Configure a settings buffer to meet the required use case 3370 * 3371 * PARAMETERS : 3372 * 3373 * 3374 * RETURN : Success: Return valid metadata 3375 * Failure: Return NULL 3376 *==========================================================================*/ 3377const camera_metadata_t* QCamera3HardwareInterface:: 3378 construct_default_request_settings(const struct camera3_device *device, 3379 int type) 3380{ 3381 3382 ALOGV("%s: E", __func__); 3383 camera_metadata_t* fwk_metadata = NULL; 3384 QCamera3HardwareInterface *hw = 3385 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3386 if (!hw) { 3387 ALOGE("%s: NULL camera device", __func__); 3388 return NULL; 3389 } 3390 3391 fwk_metadata = hw->translateCapabilityToMetadata(type); 3392 3393 ALOGV("%s: X", __func__); 3394 return fwk_metadata; 3395} 3396 3397/*=========================================================================== 3398 * FUNCTION : process_capture_request 3399 * 3400 * DESCRIPTION: 3401 * 3402 * PARAMETERS : 3403 * 3404 * 3405 * RETURN : 3406 *==========================================================================*/ 3407int QCamera3HardwareInterface::process_capture_request( 3408 const struct camera3_device *device, 3409 camera3_capture_request_t *request) 3410{ 3411 ALOGV("%s: E", __func__); 3412 QCamera3HardwareInterface *hw = 3413 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3414 if (!hw) { 3415 ALOGE("%s: NULL camera device", __func__); 3416 return -EINVAL; 3417 } 3418 3419 int rc = hw->processCaptureRequest(request); 3420 ALOGV("%s: X", __func__); 3421 return rc; 3422} 3423 3424/*=========================================================================== 3425 * FUNCTION : get_metadata_vendor_tag_ops 3426 * 3427 * DESCRIPTION: 3428 * 3429 * PARAMETERS : 3430 * 3431 * 3432 * RETURN : 3433 *==========================================================================*/ 3434 3435void QCamera3HardwareInterface::get_metadata_vendor_tag_ops( 3436 const struct camera3_device *device, 3437 vendor_tag_query_ops_t* ops) 3438{ 3439 ALOGV("%s: E", __func__); 3440 QCamera3HardwareInterface *hw = 3441 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3442 if (!hw) { 3443 ALOGE("%s: NULL camera device", __func__); 3444 return; 3445 } 3446 3447 hw->getMetadataVendorTagOps(ops); 3448 ALOGV("%s: X", __func__); 3449 return; 3450} 3451 3452/*=========================================================================== 3453 * FUNCTION : dump 3454 * 3455 * DESCRIPTION: 3456 * 3457 * PARAMETERS : 3458 * 3459 * 3460 * RETURN : 3461 *==========================================================================*/ 3462 3463void QCamera3HardwareInterface::dump( 3464 const struct camera3_device *device, int fd) 3465{ 3466 ALOGV("%s: E", __func__); 3467 QCamera3HardwareInterface *hw = 3468 reinterpret_cast<QCamera3HardwareInterface *>(device->priv); 3469 if (!hw) { 3470 ALOGE("%s: NULL camera device", __func__); 3471 return; 3472 } 3473 3474 hw->dump(fd); 3475 ALOGV("%s: X", __func__); 3476 return; 3477} 3478 3479/*=========================================================================== 3480 * FUNCTION : close_camera_device 3481 * 3482 * DESCRIPTION: 3483 * 3484 * PARAMETERS : 3485 * 3486 * 3487 * RETURN : 3488 *==========================================================================*/ 3489int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device) 3490{ 3491 ALOGV("%s: E", __func__); 3492 int ret = NO_ERROR; 3493 QCamera3HardwareInterface *hw = 3494 reinterpret_cast<QCamera3HardwareInterface *>( 3495 reinterpret_cast<camera3_device_t *>(device)->priv); 3496 if (!hw) { 3497 ALOGE("NULL camera device"); 3498 return BAD_VALUE; 3499 } 3500 delete hw; 3501 3502 pthread_mutex_lock(&mCameraSessionLock); 3503 mCameraSessionActive = 0; 3504 pthread_mutex_unlock(&mCameraSessionLock); 3505 ALOGV("%s: X", __func__); 3506 return ret; 3507} 3508 3509/*=========================================================================== 3510 * FUNCTION : getWaveletDenoiseProcessPlate 3511 * 3512 * DESCRIPTION: query wavelet denoise process plate 3513 * 3514 * PARAMETERS : None 3515 * 3516 * RETURN : WNR prcocess plate vlaue 3517 *==========================================================================*/ 3518cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate() 3519{ 3520 char prop[PROPERTY_VALUE_MAX]; 3521 memset(prop, 0, sizeof(prop)); 3522 property_get("persist.denoise.process.plates", prop, "0"); 3523 int processPlate = atoi(prop); 3524 switch(processPlate) { 3525 case 0: 3526 return CAM_WAVELET_DENOISE_YCBCR_PLANE; 3527 case 1: 3528 return CAM_WAVELET_DENOISE_CBCR_ONLY; 3529 case 2: 3530 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR; 3531 case 3: 3532 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR; 3533 default: 3534 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR; 3535 } 3536} 3537 3538/*=========================================================================== 3539 * FUNCTION : needRotationReprocess 3540 * 3541 * DESCRIPTION: if rotation needs to be done by reprocess in pp 3542 * 3543 * PARAMETERS : none 3544 * 3545 * RETURN : true: needed 3546 * false: no need 3547 *==========================================================================*/ 3548bool QCamera3HardwareInterface::needRotationReprocess() 3549{ 3550 3551 if (!mJpegSettings->is_jpeg_format) { 3552 // RAW image, no need to reprocess 3553 return false; 3554 } 3555 3556 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0 && 3557 mJpegSettings->jpeg_orientation > 0) { 3558 // current rotation is not zero, and pp has the capability to process rotation 3559 ALOGD("%s: need do reprocess for rotation", __func__); 3560 return true; 3561 } 3562 3563 return false; 3564} 3565 3566/*=========================================================================== 3567 * FUNCTION : needReprocess 3568 * 3569 * DESCRIPTION: if reprocess in needed 3570 * 3571 * PARAMETERS : none 3572 * 3573 * RETURN : true: needed 3574 * false: no need 3575 *==========================================================================*/ 3576bool QCamera3HardwareInterface::needReprocess() 3577{ 3578 if (!mJpegSettings->is_jpeg_format) { 3579 // RAW image, no need to reprocess 3580 return false; 3581 } 3582 3583 if ((mJpegSettings->min_required_pp_mask > 0) || 3584 isWNREnabled()) { 3585 // TODO: add for ZSL HDR later 3586 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode 3587 ALOGD("%s: need do reprocess for ZSL WNR or min PP reprocess", __func__); 3588 return true; 3589 } 3590 return needRotationReprocess(); 3591} 3592 3593/*=========================================================================== 3594 * FUNCTION : addOnlineReprocChannel 3595 * 3596 * DESCRIPTION: add a online reprocess channel that will do reprocess on frames 3597 * coming from input channel 3598 * 3599 * PARAMETERS : 3600 * @pInputChannel : ptr to input channel whose frames will be post-processed 3601 * 3602 * RETURN : Ptr to the newly created channel obj. NULL if failed. 3603 *==========================================================================*/ 3604QCamera3ReprocessChannel *QCamera3HardwareInterface::addOnlineReprocChannel( 3605 QCamera3Channel *pInputChannel, QCamera3PicChannel *picChHandle) 3606{ 3607 int32_t rc = NO_ERROR; 3608 QCamera3ReprocessChannel *pChannel = NULL; 3609 if (pInputChannel == NULL) { 3610 ALOGE("%s: input channel obj is NULL", __func__); 3611 return NULL; 3612 } 3613 3614 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle, 3615 mCameraHandle->ops, NULL, pInputChannel->mPaddingInfo, this, picChHandle); 3616 if (NULL == pChannel) { 3617 ALOGE("%s: no mem for reprocess channel", __func__); 3618 return NULL; 3619 } 3620 3621 // Capture channel, only need snapshot and postview streams start together 3622 mm_camera_channel_attr_t attr; 3623 memset(&attr, 0, sizeof(mm_camera_channel_attr_t)); 3624 attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS; 3625 attr.max_unmatched_frames = getMaxUnmatchedFramesInQueue(); 3626 rc = pChannel->initialize(); 3627 if (rc != NO_ERROR) { 3628 ALOGE("%s: init reprocess channel failed, ret = %d", __func__, rc); 3629 delete pChannel; 3630 return NULL; 3631 } 3632 3633 // pp feature config 3634 cam_pp_feature_config_t pp_config; 3635 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t)); 3636 if (gCamCapability[mCameraId]->min_required_pp_mask & CAM_QCOM_FEATURE_SHARPNESS) { 3637 pp_config.feature_mask |= CAM_QCOM_FEATURE_SHARPNESS; 3638 pp_config.sharpness = 10; 3639 } 3640 3641 if (isWNREnabled()) { 3642 pp_config.feature_mask |= CAM_QCOM_FEATURE_DENOISE2D; 3643 pp_config.denoise2d.denoise_enable = 1; 3644 pp_config.denoise2d.process_plates = getWaveletDenoiseProcessPlate(); 3645 } 3646 if (needRotationReprocess()) { 3647 pp_config.feature_mask |= CAM_QCOM_FEATURE_ROTATION; 3648 int rotation = mJpegSettings->jpeg_orientation; 3649 if (rotation == 0) { 3650 pp_config.rotation = ROTATE_0; 3651 } else if (rotation == 90) { 3652 pp_config.rotation = ROTATE_90; 3653 } else if (rotation == 180) { 3654 pp_config.rotation = ROTATE_180; 3655 } else if (rotation == 270) { 3656 pp_config.rotation = ROTATE_270; 3657 } 3658 } 3659 3660 rc = pChannel->addReprocStreamsFromSource(pp_config, 3661 pInputChannel, 3662 mMetadataChannel); 3663 3664 if (rc != NO_ERROR) { 3665 delete pChannel; 3666 return NULL; 3667 } 3668 return pChannel; 3669} 3670 3671int QCamera3HardwareInterface::getMaxUnmatchedFramesInQueue() 3672{ 3673 return gCamCapability[mCameraId]->min_num_pp_bufs; 3674} 3675 3676bool QCamera3HardwareInterface::isWNREnabled() { 3677 return gCamCapability[mCameraId]->isWnrSupported; 3678} 3679 3680}; //end namespace qcamera 3681