1/* 2** 3** Copyright 2008, The Android Open Source Project 4** Copyright 2010, Samsung Electronics Co. LTD 5** 6** Licensed under the Apache License, Version 2.0 (the "License"); 7** you may not use this file except in compliance with the License. 8** You may obtain a copy of the License at 9** 10** http://www.apache.org/licenses/LICENSE-2.0 11** 12** Unless required by applicable law or agreed to in writing, software 13** distributed under the License is distributed on an "AS IS" BASIS, 14** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15** See the License for the specific language governing permissions and 16** limitations under the License. 17*/ 18 19/*! 20 * \file ExynosCameraHWInterface.h 21 * \brief source file for Android Camera HAL 22 * \author thun.hwang(thun.hwang@samsung.com) 23 * \date 2010/06/03 24 * 25 * <b>Revision History: </b> 26 * - 2011/12/31 : thun.hwang(thun.hwang@samsung.com) \n 27 * Initial version 28 * 29 * - 2012/02/01 : Sangwoo, Park(sw5771.park@samsung.com) \n 30 * Adjust Android Standard features 31 * 32 * - 2012/03/14 : sangwoo.park(sw5771.park@samsung.com) \n 33 * Change file, class name to ExynosXXX. 34 * 35 */ 36 37#include <sys/types.h> 38#include <sys/stat.h> 39 40//#define LOG_NDEBUG 0 41#define LOG_TAG "ExynosCameraHWInterface" 42#include <utils/Log.h> 43 44#include "ExynosCameraHWInterface.h" 45#include "exynos_format.h" 46 47#define VIDEO_COMMENT_MARKER_H (0xFFBE) 48#define VIDEO_COMMENT_MARKER_L (0xFFBF) 49#define VIDEO_COMMENT_MARKER_LENGTH (4) 50#define JPEG_EOI_MARKER (0xFFD9) 51#define HIBYTE(x) (((x) >> 8) & 0xFF) 52#define LOBYTE(x) ((x) & 0xFF) 53 54/*TODO: This values will be changed */ 55#define BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR "0.10,1.20,Infinity" 56#define FRONT_CAMERA_FOCUS_DISTANCES_STR "0.20,0.25,Infinity" 57 58#define BACK_CAMERA_MACRO_FOCUS_DISTANCES_STR "0.10,0.20,Infinity" 59#define BACK_CAMERA_INFINITY_FOCUS_DISTANCES_STR "0.10,1.20,Infinity" 60 61#define BACK_CAMERA_FOCUS_DISTANCE_INFINITY "Infinity" 62#define FRONT_CAMERA_FOCUS_DISTANCE_INFINITY "Infinity" 63 64// This hack does two things: 65// -- it sets preview to NV21 (YUV420SP) 66// -- it sets gralloc to YV12 67// 68// The reason being: the samsung encoder understands only yuv420sp, and gralloc 69// does yv12 and rgb565. So what we do is we break up the interleaved UV in 70// separate V and U planes, which makes preview look good, and enabled the 71// encoder as well. 72// 73// FIXME: Samsung needs to enable support for proper yv12 coming out of the 74// camera, and to fix their video encoder to work with yv12. 75// FIXME: It also seems like either Samsung's YUV420SP (NV21) or img's YV12 has 76// the color planes switched. We need to figure which side is doing it 77// wrong and have the respective party fix it. 78 79namespace android { 80 81static const int INITIAL_SKIP_FRAME = 8; 82static const int EFFECT_SKIP_FRAME = 1; 83 84gralloc_module_t const* ExynosCameraHWInterface::m_grallocHal; 85 86ExynosCameraHWInterface::ExynosCameraHWInterface(int cameraId, camera_device_t *dev) 87 : 88 m_captureInProgress(false), 89 m_skipFrame(0), 90 m_notifyCb(0), 91 m_dataCb(0), 92 m_dataCbTimestamp(0), 93 m_callbackCookie(0), 94 m_msgEnabled(0), 95 m_faceDetected(false), 96 m_halDevice(dev), 97 m_numOfAvailableVideoBuf(0) 98{ 99 ALOGV("DEBUG(%s):", __func__); 100 int ret = 0; 101 102 m_previewWindow = NULL; 103 m_secCamera = ExynosCamera::createInstance(); 104 105 for (int i = 0; i < NUM_OF_PREVIEW_BUF; i++) { 106 m_previewHeap[i] = NULL; 107 m_previewBufHandle[i] = NULL; 108 m_previewStride[i] = 0; 109 m_avaliblePreviewBufHandle[i] = false; 110 m_flagGrallocLocked[i] = false; 111 m_matchedGrallocIndex[i] = -1; 112 m_grallocVirtAddr[i] = NULL; 113 } 114 115 m_minUndequeuedBufs = 0; 116#ifndef USE_3DNR_DMAOUT 117 m_cntVideoBuf = 0; 118#endif 119 120 m_oldPictureBufQueueHead = NULL; 121 m_getMemoryCb = NULL; 122 m_exynosPreviewCSC = NULL; 123 m_exynosPictureCSC = NULL; 124 m_exynosVideoCSC = NULL; 125 m_frameMetadata.number_of_faces = 0; 126 m_frameMetadata.faces = m_faces; 127 128 for (int i = 0; i < NUM_OF_VIDEO_BUF; i++) { 129 m_videoHeap[i] = NULL; 130 m_resizedVideoHeap[i] = NULL; 131 } 132 133 m_ion_client = ion_client_create(); 134 for (int i = 0; i < NUM_OF_PICTURE_BUF; i++) 135 m_pictureHeap[i] = NULL; 136 137 m_rawHeap = NULL; 138 139 m_exitAutoFocusThread = false; 140 m_exitPreviewThread = false; 141 m_exitVideoThread = false; 142 /* whether the PreviewThread is active in preview or stopped. we 143 * create the thread but it is initially in stopped state. 144 */ 145 m_previewRunning = false; 146 m_videoRunning = false; 147 m_pictureRunning = false; 148#ifndef USE_3DNR_DMAOUT 149 m_videoStart = false; 150#endif 151 152 m_previewStartDeferred = false; 153 154 m_recordingHint = false; 155 156 if (!m_grallocHal) { 157 ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal); 158 if (ret) 159 ALOGE("ERR(%s):Fail on loading gralloc HAL", __func__); 160 } 161 162 if (m_secCamera->create(cameraId) == false) { 163 ALOGE("ERR(%s):Fail on m_secCamera->create(%d)", __func__, cameraId); 164 return; 165 } 166 167 m_initDefaultParameters(cameraId); 168 169 CSC_METHOD cscMethod = CSC_METHOD_HW; 170 171 m_exynosPreviewCSC = csc_init(cscMethod); 172 if (m_exynosPreviewCSC == NULL) 173 ALOGE("ERR(%s):csc_init() fail", __func__); 174 175 m_exynosPictureCSC = csc_init(cscMethod); 176 if (m_exynosPictureCSC == NULL) 177 ALOGE("ERR(%s):csc_init() fail", __func__); 178 179 m_exynosVideoCSC = csc_init(cscMethod); 180 if (m_exynosVideoCSC == NULL) 181 ALOGE("ERR(%s):csc_init() fail", __func__); 182 183 m_previewThread = new PreviewThread(this); 184 m_videoThread = new VideoThread(this); 185 m_autoFocusThread = new AutoFocusThread(this); 186 m_pictureThread = new PictureThread(this); 187} 188 189ExynosCameraHWInterface::~ExynosCameraHWInterface() 190{ 191 close(m_ion_client); 192 this->release(); 193} 194 195status_t ExynosCameraHWInterface::setPreviewWindow(preview_stream_ops *w) 196{ 197 m_previewWindow = w; 198 ALOGV("DEBUG(%s):m_previewWindow %p", __func__, m_previewWindow); 199 200 if (m_previewWindow == NULL) { 201 ALOGV("DEBUG(%s):preview window is NULL!", __func__); 202 return OK; 203 } 204 205 m_previewLock.lock(); 206 207 if (m_previewRunning == true && m_previewStartDeferred == false) { 208 ALOGV("DEBUG(%s):stop preview (window change)", __func__); 209 m_stopPreviewInternal(); 210 } 211 212 if (m_previewWindow->get_min_undequeued_buffer_count(m_previewWindow, &m_minUndequeuedBufs) != 0) { 213 ALOGE("ERR(%s):could not retrieve min undequeued buffer count", __func__); 214 return INVALID_OPERATION; 215 } 216 217 if (NUM_OF_PREVIEW_BUF <= m_minUndequeuedBufs) { 218 ALOGE("ERR(%s):min undequeued buffer count %d is too high (expecting at most %d)", __func__, 219 m_minUndequeuedBufs, NUM_OF_PREVIEW_BUF - 1); 220 } 221 222 if (m_previewWindow->set_buffer_count(m_previewWindow, NUM_OF_PREVIEW_BUF) != 0) { 223 ALOGE("ERR(%s):could not set buffer count", __func__); 224 return INVALID_OPERATION; 225 } 226 227 int previewW, previewH; 228 int hal_pixel_format = HAL_PIXEL_FORMAT_YV12; 229 230 m_params.getPreviewSize(&previewW, &previewH); 231 const char *str_preview_format = m_params.getPreviewFormat(); 232 ALOGV("DEBUG(%s):str preview format %s width : %d height : %d ", __func__, str_preview_format, previewW, previewH); 233 234 if (!strcmp(str_preview_format, 235 CameraParameters::PIXEL_FORMAT_RGB565)) { 236 hal_pixel_format = HAL_PIXEL_FORMAT_RGB_565; 237 } else if (!strcmp(str_preview_format, 238 CameraParameters::PIXEL_FORMAT_RGBA8888)) { 239 hal_pixel_format = HAL_PIXEL_FORMAT_RGBA_8888; 240 } else if (!strcmp(str_preview_format, 241 CameraParameters::PIXEL_FORMAT_YUV420SP)) { 242 hal_pixel_format = HAL_PIXEL_FORMAT_YCrCb_420_SP; 243 } else if (!strcmp(str_preview_format, 244 CameraParameters::PIXEL_FORMAT_YUV420P)) 245 hal_pixel_format = HAL_PIXEL_FORMAT_YV12; 246 247 if (m_previewWindow->set_usage(m_previewWindow, 248 GRALLOC_USAGE_SW_WRITE_OFTEN | 249#ifdef USE_EGL 250#else 251 GRALLOC_USAGE_HWC_HWOVERLAY | 252#endif 253 GRALLOC_USAGE_HW_ION) != 0) { 254 ALOGE("ERR(%s):could not set usage on gralloc buffer", __func__); 255 return INVALID_OPERATION; 256 } 257 258 if (m_previewWindow->set_buffers_geometry(m_previewWindow, 259 previewW, previewH, 260 hal_pixel_format) != 0) { 261 ALOGE("ERR(%s):could not set buffers geometry to %s", 262 __func__, str_preview_format); 263 return INVALID_OPERATION; 264 } 265 266 if (m_previewRunning == true && m_previewStartDeferred == true) { 267 ALOGV("DEBUG(%s):start/resume preview", __func__); 268 if (m_startPreviewInternal() == true) { 269 m_previewStartDeferred = false; 270 m_previewCondition.signal(); 271 } 272 } 273 m_previewLock.unlock(); 274 275 return OK; 276} 277 278void ExynosCameraHWInterface::setCallbacks(camera_notify_callback notify_cb, 279 camera_data_callback data_cb, 280 camera_data_timestamp_callback data_cb_timestamp, 281 camera_request_memory get_memory, 282 void *user) 283{ 284 m_notifyCb = notify_cb; 285 m_dataCb = data_cb; 286 m_dataCbTimestamp = data_cb_timestamp; 287 m_getMemoryCb = get_memory; 288 m_callbackCookie = user; 289} 290 291void ExynosCameraHWInterface::enableMsgType(int32_t msgType) 292{ 293 ALOGV("DEBUG(%s):msgType = 0x%x, m_msgEnabled before = 0x%x", 294 __func__, msgType, m_msgEnabled); 295 m_msgEnabled |= msgType; 296 297 m_previewLock.lock(); 298 if ( msgType & CAMERA_MSG_PREVIEW_FRAME 299 && m_previewRunning == true 300 && m_previewStartDeferred == true) { 301 302 ALOGV("DEBUG(%s):starting deferred preview", __func__); 303 304 if (m_startPreviewInternal() == true) { 305 m_previewStartDeferred = false; 306 m_previewCondition.signal(); 307 } 308 } 309 m_previewLock.unlock(); 310 311 ALOGV("DEBUG(%s):m_msgEnabled = 0x%x", __func__, m_msgEnabled); 312} 313 314void ExynosCameraHWInterface::disableMsgType(int32_t msgType) 315{ 316 ALOGV("DEBUG(%s):msgType = 0x%x, m_msgEnabled before = 0x%x", 317 __func__, msgType, m_msgEnabled); 318 m_msgEnabled &= ~msgType; 319 ALOGV("DEBUG(%s):m_msgEnabled = 0x%x", __func__, m_msgEnabled); 320} 321 322bool ExynosCameraHWInterface::msgTypeEnabled(int32_t msgType) 323{ 324 return (m_msgEnabled & msgType); 325} 326 327status_t ExynosCameraHWInterface::startPreview() 328{ 329 int ret = OK; 330 331 ALOGV("DEBUG(%s):", __func__); 332 333 Mutex::Autolock lock(m_stateLock); 334 if (m_captureInProgress == true) { 335 ALOGE("%s : capture in progress, not allowed", __func__); 336 return INVALID_OPERATION; 337 } 338 339 m_previewLock.lock(); 340 if (m_previewRunning == true) { 341 ALOGE("%s : preview thread already running", __func__); 342 m_previewLock.unlock(); 343 return INVALID_OPERATION; 344 } 345 346 m_previewRunning = true; 347 m_previewStartDeferred = false; 348 349 if (m_previewWindow == NULL) { 350 if (!(m_msgEnabled & CAMERA_MSG_PREVIEW_FRAME)) { 351 ALOGV("DEBUG(%s):deferring", __func__); 352 m_previewStartDeferred = true; 353 m_previewLock.unlock(); 354 return NO_ERROR; 355 } 356 ALOGE("%s(%d): m_previewWindow is NULL", __func__, __LINE__); 357 return UNKNOWN_ERROR; 358 } 359 360 if (m_startPreviewInternal() == true) { 361 m_previewCondition.signal(); 362 ret = OK; 363 } else { 364 ret = UNKNOWN_ERROR; 365 } 366 367 m_previewLock.unlock(); 368 return ret; 369} 370 371void ExynosCameraHWInterface::stopPreview() 372{ 373 ALOGV("DEBUG(%s):", __func__); 374 375 /* request that the preview thread stop. */ 376 m_previewLock.lock(); 377 m_stopPreviewInternal(); 378 m_previewLock.unlock(); 379} 380 381bool ExynosCameraHWInterface::previewEnabled() 382{ 383 Mutex::Autolock lock(m_previewLock); 384 ALOGV("DEBUG(%s):%d", __func__, m_previewRunning); 385 return m_previewRunning; 386} 387 388status_t ExynosCameraHWInterface::storeMetaDataInBuffers(bool enable) 389{ 390 if (!enable) { 391 ALOGE("Non-m_frameMetadata buffer mode is not supported!"); 392 return INVALID_OPERATION; 393 } 394 return OK; 395} 396 397status_t ExynosCameraHWInterface::startRecording() 398{ 399 ALOGV("DEBUG(%s):", __func__); 400 401 Mutex::Autolock lock(m_videoLock); 402 403 int videoW, videoH, videoFormat, videoFramesize; 404 405 m_secCamera->getVideoSize(&videoW, &videoH); 406 videoFormat = m_secCamera->getVideoFormat(); 407 videoFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(videoFormat), videoW, videoH); 408 409 int orgVideoFrameSize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(videoFormat), m_orgVideoRect.w, m_orgVideoRect.h); 410 411 for (int i = 0; i < NUM_OF_VIDEO_BUF; i++) { 412 413#ifdef USE_3DNR_DMAOUT 414 ExynosBuffer videoBuf; 415 416 if (m_videoHeap[i] != NULL) { 417 m_videoHeap[i]->release(m_videoHeap[i]); 418 m_videoHeap[i] = 0; 419 } 420 421 m_videoHeap[i] = m_getMemoryCb(-1, videoFramesize, 1, NULL); 422 if (!m_videoHeap[i]) { 423 ALOGE("ERR(%s):m_getMemoryCb(m_videoHeap[%d], size(%d) fail", __func__, i, videoFramesize); 424 return UNKNOWN_ERROR; 425 } 426 427 m_getAlignedYUVSize(videoFormat, videoW, videoH, &videoBuf); 428 429 videoBuf.virt.extP[0] = (char *)m_videoHeap[i]->data; 430 for (int j = 1; j < 3; j++) { 431 if (videoBuf.size.extS[j] != 0) 432 videoBuf.virt.extP[j] = videoBuf.virt.extP[j-1] + videoBuf.size.extS[j-1]; 433 else 434 videoBuf.virt.extP[j] = NULL; 435 } 436 437 videoBuf.reserved.p = i; 438 439 m_secCamera->setVideoBuf(&videoBuf); 440#endif 441 442 // original VideoSized heap 443 444 if (m_resizedVideoHeap[i] != NULL) { 445 m_resizedVideoHeap[i]->release(m_resizedVideoHeap[i]); 446 m_resizedVideoHeap[i] = 0; 447 } 448 449 m_resizedVideoHeap[i] = m_getMemoryCb(-1, orgVideoFrameSize, 1, NULL); 450 if (!m_resizedVideoHeap[i]) { 451 ALOGE("ERR(%s):m_getMemoryCb(m_resizedVideoHeap[%d], size(%d) fail", __func__, i, orgVideoFrameSize); 452 return UNKNOWN_ERROR; 453 } 454 } 455 456 if (m_videoRunning == false) { 457 if (m_secCamera->startVideo() == false) { 458 ALOGE("ERR(%s):Fail on m_secCamera->startVideo()", __func__); 459 return UNKNOWN_ERROR; 460 } 461 462 m_numOfAvailableVideoBuf = NUM_OF_VIDEO_BUF; 463 464#ifdef USE_3DNR_DMAOUT 465 m_videoRunning = true; 466 467 m_videoCondition.signal(); 468#else 469 m_videoStart = true; 470#endif 471 } 472 473 return NO_ERROR; 474} 475 476void ExynosCameraHWInterface::stopRecording() 477{ 478 ALOGV("DEBUG(%s):", __func__); 479 480#ifndef USE_3DNR_DMAOUT 481 m_videoStart = false; 482#endif 483 484 if (m_videoRunning == true) { 485 m_videoRunning = false; 486 487 Mutex::Autolock lock(m_videoLock); 488 489 m_videoCondition.signal(); 490 /* wait until video thread is stopped */ 491 m_videoStoppedCondition.wait(m_videoLock); 492 } else 493 ALOGV("DEBUG(%s):video not running, doing nothing", __func__); 494} 495 496bool ExynosCameraHWInterface::recordingEnabled() 497{ 498 return m_videoStart; 499} 500 501void ExynosCameraHWInterface::releaseRecordingFrame(const void *opaque) 502{ 503 // This lock makes video lock up 504 // Mutex::Autolock lock(m_videoLock); 505 506 int i; 507 bool find = false; 508 509 // HACK : this causes recording slow 510 /* 511 for (i = 0; i < NUM_OF_VIDEO_BUF; i++) { 512 if ((char *)m_videoHeap[i]->data == (char *)opaque) { 513 find = true; 514 break; 515 } 516 } 517 518 if (find == true) { 519 ExynosBuffer videoBuf; 520 videoBuf.reserved.p = i; 521 522 m_secCamera->putVideoBuf(&videoBuf); 523 524 m_numOfAvailableVideoBuf++; 525 if (NUM_OF_VIDEO_BUF <= m_numOfAvailableVideoBuf) 526 m_numOfAvailableVideoBuf = NUM_OF_VIDEO_BUF; 527 } else { 528 ALOGV("DEBUG(%s):no matched index(%p)", __func__, (char *)opaque); 529 } 530 */ 531} 532 533status_t ExynosCameraHWInterface::autoFocus() 534{ 535 ALOGV("DEBUG(%s):", __func__); 536 /* signal m_autoFocusThread to run once */ 537 m_focusCondition.signal(); 538 return NO_ERROR; 539} 540 541status_t ExynosCameraHWInterface::cancelAutoFocus() 542{ 543 if (m_secCamera->cancelAutoFocus() == false) { 544 ALOGE("ERR(%s):Fail on m_secCamera->cancelAutoFocus()", __func__); 545 return UNKNOWN_ERROR; 546 } 547 548 return NO_ERROR; 549} 550 551status_t ExynosCameraHWInterface::takePicture() 552{ 553 Mutex::Autolock lock(m_stateLock); 554 if (m_captureInProgress == true) { 555 ALOGE("%s : capture already in progress", __func__); 556 return INVALID_OPERATION; 557 } 558 559 if (m_pictureRunning == false) { 560 ALOGI("%s(%d): m_pictureRunning is false", __func__, __LINE__); 561 if (m_startPictureInternal() == false) { 562 ALOGE("%s(%d): m_startPictureInternal() fail!!!", __func__, __LINE__); 563 return INVALID_OPERATION; 564 } 565 } 566 567 m_pictureLock.lock(); 568 m_captureInProgress = true; 569 m_pictureLock.unlock(); 570 571 if (m_pictureThread->run("CameraPictureThread", PRIORITY_DEFAULT) != NO_ERROR) { 572 ALOGE("%s : couldn't run picture thread", __func__); 573 return INVALID_OPERATION; 574 } 575 576 return NO_ERROR; 577} 578 579status_t ExynosCameraHWInterface::cancelPicture() 580{ 581 ALOGV("DEBUG(%s):", __func__); 582 583 if (m_pictureThread.get()) { 584 ALOGV("DEBUG(%s):waiting for picture thread to exit", __func__); 585 m_pictureThread->requestExitAndWait(); 586 ALOGV("DEBUG(%s):picture thread has exited", __func__); 587 } 588 589 return NO_ERROR; 590} 591 592status_t ExynosCameraHWInterface::setParameters(const CameraParameters& params) 593{ 594 ALOGV("DEBUG(%s):", __func__); 595 596 status_t ret = NO_ERROR; 597 598 /* if someone calls us while picture thread is running, it could screw 599 * up the sensor quite a bit so return error. we can't wait because 600 * that would cause deadlock with the callbacks 601 */ 602 m_stateLock.lock(); 603 if (m_captureInProgress == true) { 604 m_stateLock.unlock(); 605 m_pictureLock.lock(); 606 m_pictureCondition.waitRelative(m_pictureLock, (2000 * 1000000)); 607 m_pictureLock.unlock(); 608 } 609 m_stateLock.unlock(); 610 611 /////////////////////////////////////////////////// 612 // Google Official API : Camera.Parameters 613 // http://developer.android.com/reference/android/hardware/Camera.Parameters.html 614 /////////////////////////////////////////////////// 615 616 // recording hint 617 const char *newRecordingHint = params.get(CameraParameters::KEY_RECORDING_HINT); 618 if (newRecordingHint != NULL) { 619 if (strcmp(newRecordingHint, "true") == 0) 620 m_recordingHint = true; 621 else 622 m_recordingHint = false; 623 624 m_secCamera->setRecordingHint(m_recordingHint); 625 } 626 627 // preview size 628 int newPreviewW = 0; 629 int newPreviewH = 0; 630 int newCalPreviewW = 0; 631 int newCalPreviewH = 0; 632 int previewMaxW = 0; 633 int previewMaxH = 0; 634 params.getPreviewSize(&newPreviewW, &newPreviewH); 635 636 // In general, it will show preview max size 637 m_secCamera->getSupportedPreviewSizes(&previewMaxW, &previewMaxH); 638 newCalPreviewW = previewMaxW; 639 newCalPreviewH = previewMaxH; 640 641 // When recording, it will show video max size 642 if (m_recordingHint == true) { 643 m_secCamera->getSupportedVideoSizes(&newCalPreviewW, &newCalPreviewH); 644 if ( previewMaxW < newCalPreviewW 645 || previewMaxH < newCalPreviewH) { 646 newCalPreviewW = previewMaxW; 647 newCalPreviewH = previewMaxH; 648 } 649 } 650 651 m_orgPreviewRect.w = newPreviewW; 652 m_orgPreviewRect.h = newPreviewH; 653 654 // TODO : calibrate original preview ratio 655 //m_getRatioSize(newCalPreviewW, newCalPreviewH, newPreviewW, newPreviewH, &newPreviewW, &newPreviewH); 656 newPreviewW = newCalPreviewW; 657 newPreviewH = newCalPreviewH; 658 659 const char *strNewPreviewFormat = params.getPreviewFormat(); 660 ALOGV("DEBUG(%s):newPreviewW x newPreviewH = %dx%d, format = %s", 661 __func__, newPreviewW, newPreviewH, strNewPreviewFormat); 662 663 if (0 < newPreviewW && 664 0 < newPreviewH && 665 strNewPreviewFormat != NULL && 666 m_isSupportedPreviewSize(newPreviewW, newPreviewH) == true) { 667 int newPreviewFormat = 0; 668 669 if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_RGB565)) 670 newPreviewFormat = V4L2_PIX_FMT_RGB565; 671 else if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_RGBA8888)) 672 newPreviewFormat = V4L2_PIX_FMT_RGB32; 673 else if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) 674 newPreviewFormat = V4L2_PIX_FMT_NV21; 675 else if (!strcmp(strNewPreviewFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) 676 newPreviewFormat = V4L2_PIX_FMT_YVU420M; 677 else if (!strcmp(strNewPreviewFormat, "yuv420sp_custom")) 678 newPreviewFormat = V4L2_PIX_FMT_NV12T; 679 else if (!strcmp(strNewPreviewFormat, "yuv422i")) 680 newPreviewFormat = V4L2_PIX_FMT_YUYV; 681 else if (!strcmp(strNewPreviewFormat, "yuv422p")) 682 newPreviewFormat = V4L2_PIX_FMT_YUV422P; 683 else 684 newPreviewFormat = V4L2_PIX_FMT_NV21; //for 3rd party 685 686 m_orgPreviewRect.colorFormat = newPreviewFormat; 687 688 int curPreviewW, curPreviewH; 689 m_secCamera->getPreviewSize(&curPreviewW, &curPreviewH); 690 int curPreviewFormat = m_secCamera->getPreviewFormat(); 691 692 if (curPreviewW != newPreviewW || 693 curPreviewH != newPreviewH || 694 curPreviewFormat != newPreviewFormat) { 695 if ( m_secCamera->setPreviewSize(newPreviewW, newPreviewH) == false 696 || m_secCamera->setPreviewFormat(newPreviewFormat) == false) { 697 ALOGE("ERR(%s):Fail on m_secCamera->setPreviewSize(width(%d), height(%d), format(%d))", 698 __func__, newPreviewW, newPreviewH, newPreviewFormat); 699 ret = UNKNOWN_ERROR; 700 } else { 701 if (m_previewWindow) { 702 if (m_previewRunning == true && m_previewStartDeferred == false) { 703 ALOGE("ERR(%s):preview is running, cannot change size and format!", __func__); 704 ret = INVALID_OPERATION; 705 } 706 707 ALOGV("DEBUG(%s):m_previewWindow (%p) set_buffers_geometry", __func__, m_previewWindow); 708 ALOGV("DEBUG(%s):m_previewWindow->set_buffers_geometry (%p)", __func__, 709 m_previewWindow->set_buffers_geometry); 710 m_previewWindow->set_buffers_geometry(m_previewWindow, 711 newPreviewW, newPreviewH, 712 newPreviewFormat); 713 ALOGV("DEBUG(%s):DONE m_previewWindow (%p) set_buffers_geometry", __func__, m_previewWindow); 714 } 715 m_params.setPreviewSize(newPreviewW, newPreviewH); 716 m_params.setPreviewFormat(strNewPreviewFormat); 717 } 718 } 719 else { 720 ALOGV("DEBUG(%s):preview size and format has not changed", __func__); 721 } 722 } else { 723 ALOGE("ERR(%s):Invalid preview size(%dx%d)", __func__, newPreviewW, newPreviewH); 724 ret = INVALID_OPERATION; 725 } 726 727 int newPictureW = 0; 728 int newPictureH = 0; 729 params.getPictureSize(&newPictureW, &newPictureH); 730 ALOGV("DEBUG(%s):newPictureW x newPictureH = %dx%d", __func__, newPictureW, newPictureH); 731 732 if (0 < newPictureW && 0 < newPictureH) { 733 734 int orgPictureW, orgPictureH = 0; 735 m_secCamera->getPictureSize(&orgPictureW, &orgPictureH); 736 737 if (m_secCamera->setPictureSize(newPictureW, newPictureH) == false) { 738 ALOGE("ERR(%s):Fail on m_secCamera->setPictureSize(width(%d), height(%d))", 739 __func__, newPictureW, newPictureH); 740 ret = UNKNOWN_ERROR; 741 } else { 742 int tempW, tempH = 0; 743 m_secCamera->getPictureSize(&tempW, &tempH); 744 745 if (tempW != orgPictureW || tempH != orgPictureH) { 746 747 if (m_pictureRunning == true) { 748 if (m_stopPictureInternal() == false) 749 ALOGE("ERR(%s):m_stopPictureInternal() fail", __func__); 750 751 if (m_startPictureInternal() == false) 752 ALOGE("ERR(%s):m_startPictureInternal() fail", __func__); 753 } 754 } 755 m_orgPictureRect.w = newPictureW; 756 m_orgPictureRect.h = newPictureH; 757 m_params.setPictureSize(newPictureW, newPictureH); 758 } 759 } 760 761 // picture format 762 const char *newPictureFormat = params.getPictureFormat(); 763 ALOGV("DEBUG(%s):newPictureFormat %s", __func__, newPictureFormat); 764 765 if (newPictureFormat != NULL) { 766 int value = 0; 767 768 if (!strcmp(newPictureFormat, CameraParameters::PIXEL_FORMAT_RGB565)) 769 value = V4L2_PIX_FMT_RGB565; 770 else if (!strcmp(newPictureFormat, CameraParameters::PIXEL_FORMAT_RGBA8888)) 771 value = V4L2_PIX_FMT_RGB32; 772 else if (!strcmp(newPictureFormat, CameraParameters::PIXEL_FORMAT_YUV420SP)) 773 value = V4L2_PIX_FMT_NV21; 774 else if (!strcmp(newPictureFormat, "yuv420sp_custom")) 775 value = V4L2_PIX_FMT_NV12T; 776 else if (!strcmp(newPictureFormat, "yuv420p")) 777 value = V4L2_PIX_FMT_YUV420; 778 else if (!strcmp(newPictureFormat, "yuv422i")) 779 value = V4L2_PIX_FMT_YUYV; 780 else if (!strcmp(newPictureFormat, "uyv422i_custom")) //Zero copy UYVY format 781 value = V4L2_PIX_FMT_UYVY; 782 else if (!strcmp(newPictureFormat, "uyv422i")) //Non-zero copy UYVY format 783 value = V4L2_PIX_FMT_UYVY; 784 else if (!strcmp(newPictureFormat, CameraParameters::PIXEL_FORMAT_JPEG)) 785 value = V4L2_PIX_FMT_YUYV; 786 else if (!strcmp(newPictureFormat, "yuv422p")) 787 value = V4L2_PIX_FMT_YUV422P; 788 else 789 value = V4L2_PIX_FMT_NV21; //for 3rd party 790 791 if (value != m_secCamera->getPictureFormat()) { 792 if (m_secCamera->setPictureFormat(value) == false) { 793 ALOGE("ERR(%s):Fail on m_secCamera->setPictureFormat(format(%d))", __func__, value); 794 ret = UNKNOWN_ERROR; 795 } else { 796 m_orgPictureRect.colorFormat = value; 797 m_params.setPictureFormat(newPictureFormat); 798 } 799 } 800 } 801 802 // JPEG image quality 803 int newJpegQuality = params.getInt(CameraParameters::KEY_JPEG_QUALITY); 804 ALOGV("DEBUG(%s):newJpegQuality %d", __func__, newJpegQuality); 805 // we ignore bad values 806 if (newJpegQuality >=1 && newJpegQuality <= 100) { 807 if (m_secCamera->setJpegQuality(newJpegQuality) == false) { 808 ALOGE("ERR(%s):Fail on m_secCamera->setJpegQuality(quality(%d))", __func__, newJpegQuality); 809 ret = UNKNOWN_ERROR; 810 } else { 811 m_params.set(CameraParameters::KEY_JPEG_QUALITY, newJpegQuality); 812 } 813 } 814 815 // JPEG thumbnail size 816 int newJpegThumbnailW = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH); 817 int newJpegThumbnailH = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT); 818 if (0 <= newJpegThumbnailW && 0 <= newJpegThumbnailH) { 819 if (m_secCamera->setJpegThumbnailSize(newJpegThumbnailW, newJpegThumbnailH) == false) { 820 ALOGE("ERR(%s):Fail on m_secCamera->setJpegThumbnailSize(width(%d), height(%d))", __func__, newJpegThumbnailW, newJpegThumbnailH); 821 ret = UNKNOWN_ERROR; 822 } else { 823 m_params.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, newJpegThumbnailW); 824 m_params.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, newJpegThumbnailH); 825 } 826 } 827 828 // JPEG thumbnail quality 829 int newJpegThumbnailQuality = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY); 830 ALOGV("DEBUG(%s):newJpegThumbnailQuality %d", __func__, newJpegThumbnailQuality); 831 // we ignore bad values 832 if (newJpegThumbnailQuality >=1 && newJpegThumbnailQuality <= 100) { 833 if (m_secCamera->setJpegThumbnailQuality(newJpegThumbnailQuality) == false) { 834 ALOGE("ERR(%s):Fail on m_secCamera->setJpegThumbnailQuality(quality(%d))", 835 __func__, newJpegThumbnailQuality); 836 ret = UNKNOWN_ERROR; 837 } else { 838 m_params.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, newJpegThumbnailQuality); 839 } 840 } 841 842 // Video size 843 int newVideoW = 0; 844 int newVideoH = 0; 845 params.getVideoSize(&newVideoW, &newVideoH); 846 ALOGV("DEBUG(%s):newVideoW (%d) newVideoH (%d)", __func__, newVideoW, newVideoH); 847 if (0 < newVideoW && 0 < newVideoH && m_videoStart == false) { 848 849 m_orgVideoRect.w = newVideoW; 850 m_orgVideoRect.h = newVideoH; 851 852 if (m_secCamera->setVideoSize(newVideoW, newVideoH) == false) { 853 ALOGE("ERR(%s):Fail on m_secCamera->setVideoSize(width(%d), height(%d))", 854 __func__, newVideoW, newVideoH); 855 ret = UNKNOWN_ERROR; 856 } 857 m_params.setVideoSize(newVideoW, newVideoH); 858 } 859 860 // video stablization 861 const char *newVideoStabilization = params.get(CameraParameters::KEY_VIDEO_STABILIZATION); 862 bool currVideoStabilization = m_secCamera->getVideoStabilization(); 863 ALOGV("DEBUG(%s):newVideoStabilization %s", __func__, newVideoStabilization); 864 if (newVideoStabilization != NULL) { 865 bool toggle = false; 866 867 if (!strcmp(newVideoStabilization, "true")) 868 toggle = true; 869 870 if ( currVideoStabilization != toggle) { 871 if (m_secCamera->setVideoStabilization(toggle) == false) { 872 ALOGE("ERR(%s):setVideoStabilization() fail", __func__); 873 ret = UNKNOWN_ERROR; 874 } else { 875 m_params.set(CameraParameters::KEY_VIDEO_STABILIZATION, newVideoStabilization); 876 } 877 } 878 } 879 880 // 3dnr 881 const char *new3dnr = params.get("3dnr"); 882 ALOGV("DEBUG(%s):new3drn %s", __func__, new3dnr); 883 if (new3dnr != NULL) { 884 bool toggle = false; 885 886 if (!strcmp(new3dnr, "true")) 887 toggle = true; 888 889 if (m_secCamera->set3DNR(toggle) == false) { 890 ALOGE("ERR(%s):set3DNR() fail", __func__); 891 ret = UNKNOWN_ERROR; 892 } else { 893 m_params.set("3dnr", new3dnr); 894 } 895 } 896 897 // odc 898 const char *newOdc = params.get("odc"); 899 ALOGV("DEBUG(%s):newOdc %s", __func__, new3dnr); 900 if (newOdc != NULL) { 901 bool toggle = false; 902 903 if (!strcmp(newOdc, "true")) 904 toggle = true; 905 906 if (m_secCamera->setODC(toggle) == false) { 907 ALOGE("ERR(%s):setODC() fail", __func__); 908 ret = UNKNOWN_ERROR; 909 } else { 910 m_params.set("odc", newOdc); 911 } 912 } 913 914 // frame rate 915 int newFrameRate = params.getPreviewFrameRate(); 916 ALOGV("DEBUG(%s):newFrameRate %d", __func__, newFrameRate); 917 // ignore any fps request, we're determine fps automatically based 918 // on scene mode. don't return an error because it causes CTS failure. 919 if (newFrameRate != m_params.getPreviewFrameRate()) { 920 if (m_secCamera->setPreviewFrameRate(newFrameRate) == false) { 921 ALOGE("ERR(%s):Fail on m_secCamera->setPreviewFrameRate(%d)", __func__, newFrameRate); 922 ret = UNKNOWN_ERROR; 923 } else { 924 m_params.setPreviewFrameRate(newFrameRate); 925 } 926 } 927 928 // zoom 929 int newZoom = params.getInt(CameraParameters::KEY_ZOOM); 930 ALOGV("DEBUG(%s):newZoom %d", __func__, newZoom); 931 if (0 <= newZoom) { 932 if (m_secCamera->setZoom(newZoom) == false) { 933 ALOGE("ERR(%s):Fail on m_secCamera->setZoom(newZoom(%d))", __func__, newZoom); 934 ret = UNKNOWN_ERROR; 935 } 936 else { 937 m_params.set(CameraParameters::KEY_ZOOM, newZoom); 938 } 939 } 940 941 // rotation 942 int newRotation = params.getInt(CameraParameters::KEY_ROTATION); 943 ALOGV("DEBUG(%s):newRotation %d", __func__, newRotation); 944 if (0 <= newRotation) { 945 ALOGV("DEBUG(%s):set orientation:%d", __func__, newRotation); 946 if (m_secCamera->setRotation(newRotation) == false) { 947 ALOGE("ERR(%s):Fail on m_secCamera->setRotation(%d)", __func__, newRotation); 948 ret = UNKNOWN_ERROR; 949 } else { 950 m_params.set(CameraParameters::KEY_ROTATION, newRotation); 951 } 952 } 953 954 // auto exposure lock 955 const char *newAutoExposureLock = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK); 956 if (newAutoExposureLock != NULL) { 957 bool toggle = false; 958 959 if (!strcmp(newAutoExposureLock, "true")) 960 toggle = true; 961 962 if (m_secCamera->setAutoExposureLock(toggle) == false) { 963 ALOGE("ERR(%s):Fail on m_secCamera->setAutoExposureLock()", __func__); 964 ret = UNKNOWN_ERROR; 965 } else { 966 m_params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, newAutoExposureLock); 967 } 968 } 969 970 // exposure 971 int minExposureCompensation = params.getInt(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION); 972 int maxExposureCompensation = params.getInt(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION); 973 int newExposureCompensation = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION); 974 ALOGV("DEBUG(%s):newExposureCompensation %d", __func__, newExposureCompensation); 975 if ((minExposureCompensation <= newExposureCompensation) && 976 (newExposureCompensation <= maxExposureCompensation)) { 977 if (m_secCamera->setExposureCompensation(newExposureCompensation) == false) { 978 ALOGE("ERR(%s):Fail on m_secCamera->setExposureCompensation(exposure(%d))", __func__, newExposureCompensation); 979 ret = UNKNOWN_ERROR; 980 } else { 981 m_params.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, newExposureCompensation); 982 } 983 } 984 985 // auto white balance lock 986 const char *newAutoWhitebalanceLock = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK); 987 if (newAutoWhitebalanceLock != NULL) { 988 bool toggle = false; 989 990 if (!strcmp(newAutoWhitebalanceLock, "true")) 991 toggle = true; 992 993 if (m_secCamera->setAutoWhiteBalanceLock(toggle) == false) { 994 ALOGE("ERR(%s):Fail on m_secCamera->setAutoWhiteBalanceLock()", __func__); 995 ret = UNKNOWN_ERROR; 996 } else { 997 m_params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, newAutoWhitebalanceLock); 998 } 999 } 1000 1001 // white balance 1002 const char *newWhiteBalance = params.get(CameraParameters::KEY_WHITE_BALANCE); 1003 ALOGV("DEBUG(%s):newWhiteBalance %s", __func__, newWhiteBalance); 1004 if (newWhiteBalance != NULL) { 1005 int value = -1; 1006 1007 if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_AUTO)) 1008 value = ExynosCamera::WHITE_BALANCE_AUTO; 1009 else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_INCANDESCENT)) 1010 value = ExynosCamera::WHITE_BALANCE_INCANDESCENT; 1011 else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_FLUORESCENT)) 1012 value = ExynosCamera::WHITE_BALANCE_FLUORESCENT; 1013 else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT)) 1014 value = ExynosCamera::WHITE_BALANCE_WARM_FLUORESCENT; 1015 else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_DAYLIGHT)) 1016 value = ExynosCamera::WHITE_BALANCE_DAYLIGHT; 1017 else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT)) 1018 value = ExynosCamera::WHITE_BALANCE_CLOUDY_DAYLIGHT; 1019 else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_TWILIGHT)) 1020 value = ExynosCamera::WHITE_BALANCE_TWILIGHT; 1021 else if (!strcmp(newWhiteBalance, CameraParameters::WHITE_BALANCE_SHADE)) 1022 value = ExynosCamera::WHITE_BALANCE_SHADE; 1023 else { 1024 ALOGE("ERR(%s):Invalid white balance(%s)", __func__, newWhiteBalance); //twilight, shade, warm_flourescent 1025 ret = UNKNOWN_ERROR; 1026 } 1027 1028 if (0 <= value) { 1029 if (m_secCamera->setWhiteBalance(value) == false) { 1030 ALOGE("ERR(%s):Fail on m_secCamera->setWhiteBalance(white(%d))", __func__, value); 1031 ret = UNKNOWN_ERROR; 1032 } else { 1033 m_params.set(CameraParameters::KEY_WHITE_BALANCE, newWhiteBalance); 1034 } 1035 } 1036 } 1037 1038 // Metering 1039 // This is the additional API(not Google API). 1040 // But, This is set berfore the below KEY_METERING_AREAS. 1041 const char *strNewMetering = params.get("metering"); 1042 ALOGV("DEBUG(%s):strNewMetering %s", __func__, strNewMetering); 1043 if (strNewMetering != NULL) { 1044 int newMetering = -1; 1045 1046 if (!strcmp(strNewMetering, "average")) 1047 newMetering = ExynosCamera::METERING_MODE_AVERAGE; 1048 else if (!strcmp(strNewMetering, "center")) 1049 newMetering = ExynosCamera::METERING_MODE_CENTER; 1050 else if (!strcmp(strNewMetering, "matrix")) 1051 newMetering = ExynosCamera::METERING_MODE_MATRIX; 1052 else if (!strcmp(strNewMetering, "spot")) 1053 newMetering = ExynosCamera::METERING_MODE_SPOT; 1054 else { 1055 ALOGE("ERR(%s):Invalid metering newMetering(%s)", __func__, strNewMetering); 1056 ret = UNKNOWN_ERROR; 1057 } 1058 1059 if (0 <= newMetering) { 1060 if (m_secCamera->setMeteringMode(newMetering) == false) { 1061 ALOGE("ERR(%s):Fail on m_secCamera->setMeteringMode(%d)", __func__, newMetering); 1062 ret = UNKNOWN_ERROR; 1063 } else { 1064 m_params.set("metering", strNewMetering); 1065 } 1066 } 1067 } 1068 1069 // metering areas 1070 const char *newMeteringAreas = params.get(CameraParameters::KEY_METERING_AREAS); 1071 int maxNumMeteringAreas = m_secCamera->getMaxNumMeteringAreas(); 1072 1073 if (newMeteringAreas != NULL && maxNumMeteringAreas != 0) { 1074 // ex : (-10,-10,0,0,300),(0,0,10,10,700) 1075 ExynosRect2 *rect2s = new ExynosRect2[maxNumMeteringAreas]; 1076 int *weights = new int[maxNumMeteringAreas]; 1077 1078 int validMeteringAreas = m_bracketsStr2Ints((char *)newMeteringAreas, maxNumMeteringAreas, rect2s, weights); 1079 if (0 < validMeteringAreas) { 1080 for (int i = 0; i < validMeteringAreas; i++) { 1081 rect2s[i].x1 = m_calibratePosition(2000, newPreviewW, rect2s[i].x1 + 1000); 1082 rect2s[i].y1 = m_calibratePosition(2000, newPreviewH, rect2s[i].y1 + 1000); 1083 rect2s[i].x2 = m_calibratePosition(2000, newPreviewW, rect2s[i].x2 + 1000); 1084 rect2s[i].y2 = m_calibratePosition(2000, newPreviewH, rect2s[i].y2 + 1000); 1085 } 1086 1087 if (m_secCamera->setMeteringAreas(validMeteringAreas, rect2s, weights) == false) { 1088 ALOGE("ERR(%s):setMeteringAreas(%s) fail", __func__, newMeteringAreas); 1089 ret = UNKNOWN_ERROR; 1090 } 1091 else { 1092 m_params.set(CameraParameters::KEY_METERING_AREAS, newMeteringAreas); 1093 } 1094 } 1095 1096 delete [] rect2s; 1097 delete [] weights; 1098 } 1099 1100 // anti banding 1101 const char *newAntibanding = params.get(CameraParameters::KEY_ANTIBANDING); 1102 ALOGV("DEBUG(%s):newAntibanding %s", __func__, newAntibanding); 1103 if (newAntibanding != NULL) { 1104 int value = -1; 1105 1106 if (!strcmp(newAntibanding, CameraParameters::ANTIBANDING_AUTO)) 1107 value = ExynosCamera::ANTIBANDING_AUTO; 1108 else if (!strcmp(newAntibanding, CameraParameters::ANTIBANDING_50HZ)) 1109 value = ExynosCamera::ANTIBANDING_50HZ; 1110 else if (!strcmp(newAntibanding, CameraParameters::ANTIBANDING_60HZ)) 1111 value = ExynosCamera::ANTIBANDING_60HZ; 1112 else if (!strcmp(newAntibanding, CameraParameters::ANTIBANDING_OFF)) 1113 value = ExynosCamera::ANTIBANDING_OFF; 1114 else { 1115 ALOGE("ERR(%s):Invalid antibanding value(%s)", __func__, newAntibanding); 1116 ret = UNKNOWN_ERROR; 1117 } 1118 1119 if (0 <= value) { 1120 if (m_secCamera->setAntibanding(value) == false) { 1121 ALOGE("ERR(%s):Fail on m_secCamera->setAntibanding(%d)", __func__, value); 1122 ret = UNKNOWN_ERROR; 1123 } else { 1124 m_params.set(CameraParameters::KEY_ANTIBANDING, newAntibanding); 1125 } 1126 } 1127 } 1128 1129 // scene mode 1130 const char *strNewSceneMode = params.get(CameraParameters::KEY_SCENE_MODE); 1131 const char *strCurSceneMode = m_params.get(CameraParameters::KEY_SCENE_MODE); 1132 1133 // fps range 1134 int newMinFps = 0; 1135 int newMaxFps = 0; 1136 int curMinFps = 0; 1137 int curMaxFps = 0; 1138 params.getPreviewFpsRange(&newMinFps, &newMaxFps); 1139 m_params.getPreviewFpsRange(&curMinFps, &curMaxFps); 1140 /* our fps range is determined by the sensor, reject any request 1141 * that isn't exactly what we're already at. 1142 * but the check is performed when requesting only changing fps range 1143 */ 1144 if (strNewSceneMode && strCurSceneMode) { 1145 if (!strcmp(strNewSceneMode, strCurSceneMode)) { 1146 if ((newMinFps != curMinFps) || (newMaxFps != curMaxFps)) { 1147 ALOGW("%s : requested newMinFps = %d, newMaxFps = %d not allowed", 1148 __func__, newMinFps, newMaxFps); 1149 ALOGE("%s : curMinFps = %d, curMaxFps = %d", 1150 __func__, curMinFps, curMaxFps); 1151 ret = UNKNOWN_ERROR; 1152 } 1153 } 1154 } else { 1155 /* Check basic validation if scene mode is different */ 1156 if ((newMaxFps < newMinFps) || 1157 (newMinFps < 0) || (newMaxFps < 0)) 1158 ret = UNKNOWN_ERROR; 1159 } 1160 1161 if (strNewSceneMode != NULL) { 1162 int newSceneMode = -1; 1163 1164 const char *strNewFlashMode = params.get(CameraParameters::KEY_FLASH_MODE); 1165 const char *strNewFocusMode = params.get(CameraParameters::KEY_FOCUS_MODE); 1166 1167 // fps range is (15000,30000) by default. 1168 m_params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, "(15000,30000)"); 1169 m_params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "15000,30000"); 1170 1171 if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_AUTO)) { 1172 newSceneMode = ExynosCamera::SCENE_MODE_AUTO; 1173 } else { 1174 // defaults for non-auto scene modes 1175 if (m_secCamera->getSupportedFocusModes() != 0) 1176 strNewFocusMode = CameraParameters::FOCUS_MODE_AUTO; 1177 1178 strNewFlashMode = CameraParameters::FLASH_MODE_OFF; 1179 1180 if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_ACTION)) { 1181 newSceneMode = ExynosCamera::SCENE_MODE_ACTION; 1182 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_PORTRAIT)) { 1183 newSceneMode = ExynosCamera::SCENE_MODE_PORTRAIT; 1184 strNewFlashMode = CameraParameters::FLASH_MODE_AUTO; 1185 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_LANDSCAPE)) { 1186 newSceneMode = ExynosCamera::SCENE_MODE_LANDSCAPE; 1187 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_NIGHT)) { 1188 newSceneMode = ExynosCamera::SCENE_MODE_NIGHT; 1189 m_params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, "(4000,30000)"); 1190 m_params.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "4000,30000"); 1191 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_NIGHT_PORTRAIT)) { 1192 newSceneMode = ExynosCamera::SCENE_MODE_NIGHT_PORTRAIT; 1193 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_THEATRE)) { 1194 newSceneMode = ExynosCamera::SCENE_MODE_THEATRE; 1195 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_BEACH)) { 1196 newSceneMode = ExynosCamera::SCENE_MODE_BEACH; 1197 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_SNOW)) { 1198 newSceneMode = ExynosCamera::SCENE_MODE_SNOW; 1199 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_SUNSET)) { 1200 newSceneMode = ExynosCamera::SCENE_MODE_SUNSET; 1201 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_STEADYPHOTO)) { 1202 newSceneMode = ExynosCamera::SCENE_MODE_STEADYPHOTO; 1203 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_FIREWORKS)) { 1204 newSceneMode = ExynosCamera::SCENE_MODE_FIREWORKS; 1205 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_SPORTS)) { 1206 newSceneMode = ExynosCamera::SCENE_MODE_SPORTS; 1207 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_PARTY)) { 1208 newSceneMode = ExynosCamera::SCENE_MODE_PARTY; 1209 strNewFlashMode = CameraParameters::FLASH_MODE_AUTO; 1210 } else if (!strcmp(strNewSceneMode, CameraParameters::SCENE_MODE_CANDLELIGHT)) { 1211 newSceneMode = ExynosCamera::SCENE_MODE_CANDLELIGHT; 1212 } else { 1213 ALOGE("ERR(%s):unmatched scene_mode(%s)", 1214 __func__, strNewSceneMode); //action, night-portrait, theatre, steadyphoto 1215 ret = UNKNOWN_ERROR; 1216 } 1217 } 1218 1219 // focus mode 1220 if (strNewFocusMode != NULL) { 1221 int newFocusMode = -1; 1222 1223 if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_AUTO)) { 1224 newFocusMode = ExynosCamera::FOCUS_MODE_AUTO; 1225 m_params.set(CameraParameters::KEY_FOCUS_DISTANCES, 1226 BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR); 1227 } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_INFINITY)) { 1228 newFocusMode = ExynosCamera::FOCUS_MODE_INFINITY; 1229 m_params.set(CameraParameters::KEY_FOCUS_DISTANCES, 1230 BACK_CAMERA_INFINITY_FOCUS_DISTANCES_STR); 1231 } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_MACRO)) { 1232 newFocusMode = ExynosCamera::FOCUS_MODE_MACRO; 1233 m_params.set(CameraParameters::KEY_FOCUS_DISTANCES, 1234 BACK_CAMERA_MACRO_FOCUS_DISTANCES_STR); 1235 } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_FIXED)) { 1236 newFocusMode = ExynosCamera::FOCUS_MODE_FIXED; 1237 } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_EDOF)) { 1238 newFocusMode = ExynosCamera::FOCUS_MODE_EDOF; 1239 } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO)) { 1240 newFocusMode = ExynosCamera::FOCUS_MODE_CONTINUOUS_VIDEO; 1241 } else if (!strcmp(strNewFocusMode, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)) { 1242 newFocusMode = ExynosCamera::FOCUS_MODE_CONTINUOUS_PICTURE; 1243 } else { 1244 ALOGE("ERR(%s):unmatched focus_mode(%s)", __func__, strNewFocusMode); 1245 ret = UNKNOWN_ERROR; 1246 } 1247 1248 if (0 <= newFocusMode) { 1249 if (m_secCamera->setFocusMode(newFocusMode) == false) { 1250 ALOGE("ERR(%s):m_secCamera->setFocusMode(%d) fail", __func__, newFocusMode); 1251 ret = UNKNOWN_ERROR; 1252 } else { 1253 m_params.set(CameraParameters::KEY_FOCUS_MODE, strNewFocusMode); 1254 } 1255 } 1256 } 1257 1258 // flash mode 1259 if (strNewFlashMode != NULL) { 1260 int newFlashMode = -1; 1261 1262 if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_OFF)) 1263 newFlashMode = ExynosCamera::FLASH_MODE_OFF; 1264 else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_AUTO)) 1265 newFlashMode = ExynosCamera::FLASH_MODE_AUTO; 1266 else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_ON)) 1267 newFlashMode = ExynosCamera::FLASH_MODE_ON; 1268 else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_RED_EYE)) 1269 newFlashMode = ExynosCamera::FLASH_MODE_RED_EYE; 1270 else if (!strcmp(strNewFlashMode, CameraParameters::FLASH_MODE_TORCH)) 1271 newFlashMode = ExynosCamera::FLASH_MODE_TORCH; 1272 else { 1273 ALOGE("ERR(%s):unmatched flash_mode(%s)", __func__, strNewFlashMode); //red-eye 1274 ret = UNKNOWN_ERROR; 1275 } 1276 if (0 <= newFlashMode) { 1277 if (m_secCamera->setFlashMode(newFlashMode) == false) { 1278 ALOGE("ERR(%s):m_secCamera->setFlashMode(%d) fail", __func__, newFlashMode); 1279 ret = UNKNOWN_ERROR; 1280 } else { 1281 m_params.set(CameraParameters::KEY_FLASH_MODE, strNewFlashMode); 1282 } 1283 } 1284 } 1285 1286 // scene mode 1287 if (0 <= newSceneMode) { 1288 if (m_secCamera->setSceneMode(newSceneMode) == false) { 1289 ALOGE("ERR(%s):m_secCamera->setSceneMode(%d) fail", __func__, newSceneMode); 1290 ret = UNKNOWN_ERROR; 1291 } else { 1292 m_params.set(CameraParameters::KEY_SCENE_MODE, strNewSceneMode); 1293 } 1294 } 1295 } 1296 1297 // focus areas 1298 const char *newFocusAreas = params.get(CameraParameters::KEY_FOCUS_AREAS); 1299 int maxNumFocusAreas = m_secCamera->getMaxNumFocusAreas(); 1300 1301 if (newFocusAreas != NULL && maxNumFocusAreas != 0) { 1302 int curFocusMode = m_secCamera->getFocusMode(); 1303 1304 // In CameraParameters.h 1305 // Focus area only has effect if the cur focus mode is FOCUS_MODE_AUTO, 1306 // FOCUS_MODE_MACRO, FOCUS_MODE_CONTINUOUS_VIDEO, or 1307 // FOCUS_MODE_CONTINUOUS_PICTURE. 1308 if ( curFocusMode & ExynosCamera::FOCUS_MODE_AUTO 1309 || curFocusMode & ExynosCamera::FOCUS_MODE_MACRO 1310 || curFocusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_VIDEO 1311 || curFocusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_PICTURE) { 1312 1313 // ex : (-10,-10,0,0,300),(0,0,10,10,700) 1314 ExynosRect2 *rect2s = new ExynosRect2[maxNumFocusAreas]; 1315 int *weights = new int[maxNumFocusAreas]; 1316 1317 int validFocusedAreas = m_bracketsStr2Ints((char *)newFocusAreas, maxNumFocusAreas, rect2s, weights); 1318 if (0 < validFocusedAreas) { 1319 // CameraParameters.h 1320 // A special case of single focus area (0,0,0,0,0) means driver to decide 1321 // the focus area. For example, the driver may use more signals to decide 1322 // focus areas and change them dynamically. Apps can set (0,0,0,0,0) if they 1323 // want the driver to decide focus areas. 1324 if ( validFocusedAreas == 1 1325 && rect2s[0].x1 == 0 && rect2s[0].y1 == 0 && rect2s[0].x2 == 0 && rect2s[0].y2 == 0) { 1326 rect2s[0].x1 = 0; 1327 rect2s[0].y1 = 0; 1328 rect2s[0].x2 = newPreviewW; 1329 rect2s[0].y2 = newPreviewH; 1330 } else { 1331 for (int i = 0; i < validFocusedAreas; i++) { 1332 rect2s[i].x1 = (rect2s[i].x1 + 1000) * 1023 / 2000; 1333 rect2s[i].y1 = (rect2s[i].y1 + 1000) * 1023 / 2000; 1334 rect2s[i].x2 = (rect2s[i].x2 + 1000) * 1023 / 2000; 1335 rect2s[i].y2 = (rect2s[i].y2 + 1000) * 1023 / 2000; 1336 } 1337 1338 if (m_secCamera->setFocusAreas(validFocusedAreas, rect2s, weights) == false) { 1339 ALOGE("ERR(%s):setFocusAreas(%s) fail", __func__, newFocusAreas); 1340 ret = UNKNOWN_ERROR; 1341 } else { 1342 m_params.set(CameraParameters::KEY_FOCUS_AREAS, newFocusAreas); 1343 } 1344 } 1345 } 1346 1347 delete [] rect2s; 1348 delete [] weights; 1349 } 1350 } 1351 1352 // image effect 1353 const char *strNewEffect = params.get(CameraParameters::KEY_EFFECT); 1354 if (strNewEffect != NULL) { 1355 1356 int newEffect = -1; 1357 1358 if (!strcmp(strNewEffect, CameraParameters::EFFECT_NONE)) { 1359 newEffect = ExynosCamera::EFFECT_NONE; 1360 } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_MONO)) { 1361 newEffect = ExynosCamera::EFFECT_MONO; 1362 } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_NEGATIVE)) { 1363 newEffect = ExynosCamera::EFFECT_NEGATIVE; 1364 } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_SOLARIZE)) { 1365 newEffect = ExynosCamera::EFFECT_SOLARIZE; 1366 } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_SEPIA)) { 1367 newEffect = ExynosCamera::EFFECT_SEPIA; 1368 } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_POSTERIZE)) { 1369 newEffect = ExynosCamera::EFFECT_POSTERIZE; 1370 } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_WHITEBOARD)) { 1371 newEffect = ExynosCamera::EFFECT_WHITEBOARD; 1372 } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_BLACKBOARD)) { 1373 newEffect = ExynosCamera::EFFECT_BLACKBOARD; 1374 } else if (!strcmp(strNewEffect, CameraParameters::EFFECT_AQUA)) { 1375 newEffect = ExynosCamera::EFFECT_AQUA; 1376 } else { 1377 ALOGE("ERR(%s):Invalid effect(%s)", __func__, strNewEffect); 1378 ret = UNKNOWN_ERROR; 1379 } 1380 1381 if (0 <= newEffect) { 1382 if (m_secCamera->setColorEffect(newEffect) == false) { 1383 ALOGE("ERR(%s):Fail on m_secCamera->setColorEffect(effect(%d))", __func__, newEffect); 1384 ret = UNKNOWN_ERROR; 1385 } else { 1386 const char *oldStrEffect = m_params.get(CameraParameters::KEY_EFFECT); 1387 1388 if (oldStrEffect) { 1389 if (strcmp(oldStrEffect, strNewEffect)) { 1390 m_setSkipFrame(EFFECT_SKIP_FRAME); 1391 } 1392 } 1393 m_params.set(CameraParameters::KEY_EFFECT, strNewEffect); 1394 } 1395 } 1396 } 1397 1398 // gps altitude 1399 const char *strNewGpsAltitude = params.get(CameraParameters::KEY_GPS_ALTITUDE); 1400 1401 if (m_secCamera->setGpsAltitude(strNewGpsAltitude) == false) { 1402 ALOGE("ERR(%s):m_secCamera->setGpsAltitude(%s) fail", __func__, strNewGpsAltitude); 1403 ret = UNKNOWN_ERROR; 1404 } else { 1405 if (strNewGpsAltitude) 1406 m_params.set(CameraParameters::KEY_GPS_ALTITUDE, strNewGpsAltitude); 1407 else 1408 m_params.remove(CameraParameters::KEY_GPS_ALTITUDE); 1409 } 1410 1411 // gps latitude 1412 const char *strNewGpsLatitude = params.get(CameraParameters::KEY_GPS_LATITUDE); 1413 if (m_secCamera->setGpsLatitude(strNewGpsLatitude) == false) { 1414 ALOGE("ERR(%s):m_secCamera->setGpsLatitude(%s) fail", __func__, strNewGpsLatitude); 1415 ret = UNKNOWN_ERROR; 1416 } else { 1417 if (strNewGpsLatitude) 1418 m_params.set(CameraParameters::KEY_GPS_LATITUDE, strNewGpsLatitude); 1419 else 1420 m_params.remove(CameraParameters::KEY_GPS_LATITUDE); 1421 } 1422 1423 // gps longitude 1424 const char *strNewGpsLongtitude = params.get(CameraParameters::KEY_GPS_LONGITUDE); 1425 if (m_secCamera->setGpsLongitude(strNewGpsLongtitude) == false) { 1426 ALOGE("ERR(%s):m_secCamera->setGpsLongitude(%s) fail", __func__, strNewGpsLongtitude); 1427 ret = UNKNOWN_ERROR; 1428 } else { 1429 if (strNewGpsLongtitude) 1430 m_params.set(CameraParameters::KEY_GPS_LONGITUDE, strNewGpsLongtitude); 1431 else 1432 m_params.remove(CameraParameters::KEY_GPS_LONGITUDE); 1433 } 1434 1435 // gps processing method 1436 const char *strNewGpsProcessingMethod = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD); 1437 1438 if (m_secCamera->setGpsProcessingMethod(strNewGpsProcessingMethod) == false) { 1439 ALOGE("ERR(%s):m_secCamera->setGpsProcessingMethod(%s) fail", __func__, strNewGpsProcessingMethod); 1440 ret = UNKNOWN_ERROR; 1441 } else { 1442 if (strNewGpsProcessingMethod) 1443 m_params.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, strNewGpsProcessingMethod); 1444 else 1445 m_params.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD); 1446 } 1447 1448 // gps timestamp 1449 const char *strNewGpsTimestamp = params.get(CameraParameters::KEY_GPS_TIMESTAMP); 1450 if (m_secCamera->setGpsTimeStamp(strNewGpsTimestamp) == false) { 1451 ALOGE("ERR(%s):m_secCamera->setGpsTimeStamp(%s) fail", __func__, strNewGpsTimestamp); 1452 ret = UNKNOWN_ERROR; 1453 } else { 1454 if (strNewGpsTimestamp) 1455 m_params.set(CameraParameters::KEY_GPS_TIMESTAMP, strNewGpsTimestamp); 1456 else 1457 m_params.remove(CameraParameters::KEY_GPS_TIMESTAMP); 1458 } 1459 1460 /////////////////////////////////////////////////// 1461 // Additional API. 1462 /////////////////////////////////////////////////// 1463 // brightness 1464 int newBrightness = params.getInt("brightness"); 1465 int maxBrightness = params.getInt("brightness-max"); 1466 int minBrightness = params.getInt("brightness-min"); 1467 ALOGV("DEBUG(%s):newBrightness %d", __func__, newBrightness); 1468 if ((minBrightness <= newBrightness) && (newBrightness <= maxBrightness)) { 1469 if (m_secCamera->setBrightness(newBrightness) == false) { 1470 ALOGE("ERR(%s):Fail on m_secCamera->setBrightness(%d)", __func__, newBrightness); 1471 ret = UNKNOWN_ERROR; 1472 } else { 1473 m_params.set("brightness", newBrightness); 1474 } 1475 } 1476 1477 // saturation 1478 int newSaturation = params.getInt("saturation"); 1479 int maxSaturation = params.getInt("saturation-max"); 1480 int minSaturation = params.getInt("saturation-min"); 1481 ALOGV("DEBUG(%s):newSaturation %d", __func__, newSaturation); 1482 if ((minSaturation <= newSaturation) && (newSaturation <= maxSaturation)) { 1483 if (m_secCamera->setSaturation(newSaturation) == false) { 1484 ALOGE("ERR(%s):Fail on m_secCamera->setSaturation(%d)", __func__, newSaturation); 1485 ret = UNKNOWN_ERROR; 1486 } else { 1487 m_params.set("saturation", newSaturation); 1488 } 1489 } 1490 1491 // sharpness 1492 int newSharpness = params.getInt("sharpness"); 1493 int maxSharpness = params.getInt("sharpness-max"); 1494 int minSharpness = params.getInt("sharpness-min"); 1495 ALOGV("DEBUG(%s):newSharpness %d", __func__, newSharpness); 1496 if ((minSharpness <= newSharpness) && (newSharpness <= maxSharpness)) { 1497 if (m_secCamera->setSharpness(newSharpness) == false) { 1498 ALOGE("ERR(%s):Fail on m_secCamera->setSharpness(%d)", __func__, newSharpness); 1499 ret = UNKNOWN_ERROR; 1500 } else { 1501 m_params.set("sharpness", newSharpness); 1502 } 1503 } 1504 1505 // hue 1506 int newHue = params.getInt("hue"); 1507 int maxHue = params.getInt("hue-max"); 1508 int minHue = params.getInt("hue-min"); 1509 ALOGV("DEBUG(%s):newHue %d", __func__, newHue); 1510 if ((minHue <= newHue) && (maxHue >= newHue)) { 1511 if (m_secCamera->setHue(newHue) == false) { 1512 ALOGE("ERR(%s):Fail on m_secCamera->setHue(hue(%d))", __func__, newHue); 1513 ret = UNKNOWN_ERROR; 1514 } else { 1515 m_params.set("hue", newHue); 1516 } 1517 } 1518 1519 // ISO 1520 const char *strNewISO = params.get("iso"); 1521 ALOGV("DEBUG(%s):strNewISO %s", __func__, strNewISO); 1522 if (strNewISO != NULL) { 1523 int newISO = -1; 1524 1525 if (!strcmp(strNewISO, "auto")) 1526 newISO = 0; 1527 else { 1528 newISO = (int)atoi(strNewISO); 1529 if (newISO == 0) { 1530 ALOGE("ERR(%s):Invalid iso value(%s)", __func__, strNewISO); 1531 ret = UNKNOWN_ERROR; 1532 } 1533 } 1534 1535 if (0 <= newISO) { 1536 if (m_secCamera->setISO(newISO) == false) { 1537 ALOGE("ERR(%s):Fail on m_secCamera->setISO(iso(%d))", __func__, newISO); 1538 ret = UNKNOWN_ERROR; 1539 } else { 1540 m_params.set("iso", strNewISO); 1541 } 1542 } 1543 } 1544 1545 //contrast 1546 const char *strNewContrast = params.get("contrast"); 1547 ALOGV("DEBUG(%s):strNewContrast %s", __func__, strNewContrast); 1548 if (strNewContrast != NULL) { 1549 int newContrast = -1; 1550 1551 if (!strcmp(strNewContrast, "auto")) 1552 newContrast = ExynosCamera::CONTRAST_AUTO; 1553 else if (!strcmp(strNewContrast, "-2")) 1554 newContrast = ExynosCamera::CONTRAST_MINUS_2; 1555 else if (!strcmp(strNewContrast, "-1")) 1556 newContrast = ExynosCamera::CONTRAST_MINUS_1; 1557 else if (!strcmp(strNewContrast, "0")) 1558 newContrast = ExynosCamera::CONTRAST_DEFAULT; 1559 else if (!strcmp(strNewContrast, "1")) 1560 newContrast = ExynosCamera::CONTRAST_PLUS_1; 1561 else if (!strcmp(strNewContrast, "2")) 1562 newContrast = ExynosCamera::CONTRAST_PLUS_2; 1563 else { 1564 ALOGE("ERR(%s):Invalid contrast value(%s)", __func__, strNewContrast); 1565 ret = UNKNOWN_ERROR; 1566 } 1567 1568 if (0 <= newContrast) { 1569 if (m_secCamera->setContrast(newContrast) == false) { 1570 ALOGE("ERR(%s):Fail on m_secCamera->setContrast(contrast(%d))", __func__, newContrast); 1571 ret = UNKNOWN_ERROR; 1572 } else { 1573 m_params.set("contrast", strNewContrast); 1574 } 1575 } 1576 } 1577 1578 //WDR 1579 int newWdr = params.getInt("wdr"); 1580 ALOGV("DEBUG(%s):newWdr %d", __func__, newWdr); 1581 if (0 <= newWdr) { 1582 if (m_secCamera->setWDR(newWdr) == false) { 1583 ALOGE("ERR(%s):Fail on m_secCamera->setWDR(%d)", __func__, newWdr); 1584 ret = UNKNOWN_ERROR; 1585 } 1586 } 1587 1588 //anti shake 1589 int newAntiShake = m_internalParams.getInt("anti-shake"); 1590 ALOGV("DEBUG(%s):newAntiShake %d", __func__, newAntiShake); 1591 if (0 <= newAntiShake) { 1592 bool toggle = false; 1593 if (newAntiShake == 1) 1594 toggle = true; 1595 1596 if (m_secCamera->setAntiShake(toggle) == false) { 1597 ALOGE("ERR(%s):Fail on m_secCamera->setAntiShake(%d)", __func__, newAntiShake); 1598 ret = UNKNOWN_ERROR; 1599 } 1600 } 1601 1602 //gamma 1603 const char *strNewGamma = m_internalParams.get("video_recording_gamma"); 1604 ALOGV("DEBUG(%s):strNewGamma %s", __func__, strNewGamma); 1605 if (strNewGamma != NULL) { 1606 int newGamma = -1; 1607 if (!strcmp(strNewGamma, "off")) 1608 newGamma = 0; 1609 else if (!strcmp(strNewGamma, "on")) 1610 newGamma = 1; 1611 else { 1612 ALOGE("ERR(%s):unmatched gamma(%s)", __func__, strNewGamma); 1613 ret = UNKNOWN_ERROR; 1614 } 1615 1616 if (0 <= newGamma) { 1617 bool toggle = false; 1618 if (newGamma == 1) 1619 toggle = true; 1620 1621 if (m_secCamera->setGamma(toggle) == false) { 1622 ALOGE("ERR(%s):m_secCamera->setGamma(%s) fail", __func__, strNewGamma); 1623 ret = UNKNOWN_ERROR; 1624 } 1625 } 1626 } 1627 1628 //slow ae 1629 const char *strNewSlowAe = m_internalParams.get("slow_ae"); 1630 ALOGV("DEBUG(%s):strNewSlowAe %s", __func__, strNewSlowAe); 1631 if (strNewSlowAe != NULL) { 1632 int newSlowAe = -1; 1633 1634 if (!strcmp(strNewSlowAe, "off")) 1635 newSlowAe = 0; 1636 else if (!strcmp(strNewSlowAe, "on")) 1637 newSlowAe = 1; 1638 else { 1639 ALOGE("ERR(%s):unmatched slow_ae(%s)", __func__, strNewSlowAe); 1640 ret = UNKNOWN_ERROR; 1641 } 1642 1643 if (0 <= newSlowAe) { 1644 bool toggle = false; 1645 if (newSlowAe == 1) 1646 toggle = true; 1647 if (m_secCamera->setSlowAE(newSlowAe) == false) { 1648 ALOGE("ERR(%s):m_secCamera->setSlowAE(%d) fail", __func__, newSlowAe); 1649 ret = UNKNOWN_ERROR; 1650 } 1651 } 1652 } 1653 1654 // Shot mode 1655 int newShotMode = m_internalParams.getInt("shot_mode"); 1656 ALOGV("DEBUG(%s):newShotMode %d", __func__, newShotMode); 1657 if (0 <= newShotMode) { 1658 if (m_secCamera->setShotMode(newShotMode) == false) { 1659 ALOGE("ERR(%s):Fail on m_secCamera->setShotMode(%d)", __func__, newShotMode); 1660 ret = UNKNOWN_ERROR; 1661 } 1662 } else { 1663 newShotMode=0; 1664 } 1665 1666 ALOGV("DEBUG(%s):return ret = %d", __func__, ret); 1667 1668 return ret; 1669} 1670 1671CameraParameters ExynosCameraHWInterface::getParameters() const 1672{ 1673 ALOGV("DEBUG(%s):", __func__); 1674 return m_params; 1675} 1676 1677status_t ExynosCameraHWInterface::sendCommand(int32_t command, int32_t arg1, int32_t arg2) 1678{ 1679 switch (command) { 1680 case CAMERA_CMD_START_FACE_DETECTION: 1681 case CAMERA_CMD_STOP_FACE_DETECTION: 1682 if (m_secCamera->getMaxNumDetectedFaces() == 0) { 1683 ALOGE("ERR(%s):getMaxNumDetectedFaces == 0", __func__); 1684 return BAD_VALUE; 1685 } 1686 1687 if (arg1 == CAMERA_FACE_DETECTION_SW) { 1688 ALOGE("ERR(%s):only support HW face dectection", __func__); 1689 return BAD_VALUE; 1690 } 1691 1692 if (command == CAMERA_CMD_START_FACE_DETECTION) { 1693 if ( m_secCamera->flagStartFaceDetection() == false 1694 && m_secCamera->startFaceDetection() == false) { 1695 ALOGE("ERR(%s):startFaceDetection() fail", __func__); 1696 return BAD_VALUE; 1697 } 1698 } else { // if (command == CAMERA_CMD_STOP_FACE_DETECTION) 1699 if ( m_secCamera->flagStartFaceDetection() == true 1700 && m_secCamera->stopFaceDetection() == false) { 1701 ALOGE("ERR(%s):stopFaceDetection() fail", __func__); 1702 return BAD_VALUE; 1703 } 1704 } 1705 break; 1706 default: 1707 ALOGE("ERR(%s):unexpectect command(%d) fail", __func__, command); 1708 return BAD_VALUE; 1709 break; 1710 } 1711 return NO_ERROR; 1712} 1713 1714void ExynosCameraHWInterface::release() 1715{ 1716 ALOGV("DEBUG(%s):", __func__); 1717 1718 /* shut down any threads we have that might be running. do it here 1719 * instead of the destructor. we're guaranteed to be on another thread 1720 * than the ones below. if we used the destructor, since the threads 1721 * have a reference to this object, we could wind up trying to wait 1722 * for ourself to exit, which is a deadlock. 1723 */ 1724 if (m_videoThread != NULL) { 1725 m_videoThread->requestExit(); 1726 m_exitVideoThread = true; 1727 m_videoRunning = true; // let it run so it can exit 1728 m_videoCondition.signal(); 1729 m_videoThread->requestExitAndWait(); 1730 m_videoThread.clear(); 1731 } 1732 1733 if (m_previewThread != NULL) { 1734 /* this thread is normally already in it's threadLoop but blocked 1735 * on the condition variable or running. signal it so it wakes 1736 * up and can exit. 1737 */ 1738 m_previewThread->requestExit(); 1739 m_exitPreviewThread = true; 1740 m_previewRunning = true; // let it run so it can exit 1741 m_previewCondition.signal(); 1742 m_previewThread->requestExitAndWait(); 1743 m_previewThread.clear(); 1744 } 1745 1746 if (m_autoFocusThread != NULL) { 1747 /* this thread is normally already in it's threadLoop but blocked 1748 * on the condition variable. signal it so it wakes up and can exit. 1749 */ 1750 m_focusLock.lock(); 1751 m_autoFocusThread->requestExit(); 1752 m_exitAutoFocusThread = true; 1753 m_focusCondition.signal(); 1754 m_focusLock.unlock(); 1755 m_autoFocusThread->requestExitAndWait(); 1756 m_autoFocusThread.clear(); 1757 } 1758 1759 if (m_pictureThread != NULL) { 1760 m_pictureThread->requestExitAndWait(); 1761 m_pictureThread.clear(); 1762 } 1763 1764 for (int i = 0; i < NUM_OF_VIDEO_BUF; i++) { 1765 if (m_videoHeap[i]) { 1766 m_videoHeap[i]->release(m_videoHeap[i]); 1767 m_videoHeap[i] = 0; 1768 } 1769 1770 if (m_resizedVideoHeap[i]) { 1771 m_resizedVideoHeap[i]->release(m_resizedVideoHeap[i]); 1772 m_resizedVideoHeap[i] = 0; 1773 } 1774 } 1775 1776 for (int i = 0; i < NUM_OF_PREVIEW_BUF; i++) { 1777 if (m_previewHeap[i]) { 1778 m_previewHeap[i]->release(m_previewHeap[i]); 1779 m_previewHeap[i] = 0; 1780 } 1781 } 1782 1783 if (m_pictureRunning == true) { 1784 if (m_stopPictureInternal() == false) 1785 ALOGE("ERR(%s):m_stopPictureInternal() fail", __func__); 1786 } 1787 1788 if (m_exynosVideoCSC) 1789 csc_deinit(m_exynosVideoCSC); 1790 m_exynosVideoCSC = NULL; 1791 1792 if (m_exynosPictureCSC) 1793 csc_deinit(m_exynosPictureCSC); 1794 m_exynosPictureCSC = NULL; 1795 1796 if (m_exynosPreviewCSC) 1797 csc_deinit(m_exynosPreviewCSC); 1798 m_exynosPreviewCSC = NULL; 1799 1800 /* close after all the heaps are cleared since those 1801 * could have dup'd our file descriptor. 1802 */ 1803 if (m_secCamera->flagCreate() == true) 1804 m_secCamera->destroy(); 1805} 1806 1807status_t ExynosCameraHWInterface::dump(int fd) const 1808{ 1809 const size_t SIZE = 256; 1810 char buffer[SIZE]; 1811 String8 result; 1812 const Vector<String16> args; 1813 1814 if (m_secCamera != 0) { 1815 m_params.dump(fd, args); 1816 m_internalParams.dump(fd, args); 1817 snprintf(buffer, 255, " preview running(%s)\n", m_previewRunning?"true": "false"); 1818 result.append(buffer); 1819 } else { 1820 result.append("No camera client yet.\n"); 1821 } 1822 1823 write(fd, result.string(), result.size()); 1824 return NO_ERROR; 1825} 1826 1827int ExynosCameraHWInterface::getCameraId() const 1828{ 1829 return m_secCamera->getCameraId(); 1830} 1831 1832void ExynosCameraHWInterface::m_initDefaultParameters(int cameraId) 1833{ 1834 if (m_secCamera == NULL) { 1835 ALOGE("ERR(%s):m_secCamera object is NULL", __func__); 1836 return; 1837 } 1838 1839 CameraParameters p; 1840 CameraParameters ip; 1841 1842 String8 parameterString; 1843 1844 char * cameraName; 1845 cameraName = m_secCamera->getCameraName(); 1846 if (cameraName == NULL) 1847 ALOGE("ERR(%s):getCameraName() fail", __func__); 1848 1849 /* 1850 if (cameraId == ExynosCamera::CAMERA_ID_BACK) { 1851 p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, 1852 "3264x2448,2576x1948,1920x1080,1280x720,800x480,720x480,640x480,320x240,528x432,176x144"); 1853 p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, 1854 "3264x2448,1920x1080,1280x720,800x480,720x480,640x480"); 1855 p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, 1856 "1920x1080,1280x720,640x480,176x144"); 1857 } else { 1858 p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, 1859 "1392x1392,1280x720,640x480,352x288,320x240,176x144"); 1860 p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, 1861 "1392x1392,1280x960,640x480"); 1862 p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, 1863 "1280x720,640x480,176x144"); 1864 } 1865 */ 1866 1867 char strBuf[256]; 1868 String8 listString; 1869 1870 // preview 1871 int previewMaxW = 0; 1872 int previewMaxH = 0; 1873 m_secCamera->getSupportedPreviewSizes(&previewMaxW, &previewMaxH); 1874 1875 listString.setTo(""); 1876 if (m_getResolutionList(listString, strBuf, previewMaxW, previewMaxH) == false) { 1877 ALOGE("ERR(%s):m_getResolutionList() fail", __func__); 1878 1879 previewMaxW = 640; 1880 previewMaxH = 480; 1881 listString = String8::format("%dx%d", previewMaxW, previewMaxH); 1882 } 1883 1884 p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, listString.string()); 1885 p.setPreviewSize(previewMaxW, previewMaxH); 1886 p.getSupportedPreviewSizes(m_supportedPreviewSizes); 1887 1888 listString.setTo(""); 1889 listString = String8::format("%s,%s", CameraParameters::PIXEL_FORMAT_YUV420SP, CameraParameters::PIXEL_FORMAT_YUV420P); 1890 p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, listString); 1891 p.setPreviewFormat(CameraParameters::PIXEL_FORMAT_YUV420P); 1892 1893 // video 1894 int videoMaxW = 0; 1895 int videoMaxH = 0; 1896 m_secCamera->getSupportedVideoSizes(&videoMaxW, &videoMaxH); 1897 1898 listString.setTo(""); 1899 if (m_getResolutionList(listString, strBuf, videoMaxW, videoMaxH) == false) { 1900 ALOGE("ERR(%s):m_getResolutionList() fail", __func__); 1901 1902 videoMaxW = 640; 1903 videoMaxH = 480; 1904 listString = String8::format("%dx%d", videoMaxW, videoMaxH); 1905 } 1906 p.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, listString.string()); 1907 p.setVideoSize(videoMaxW, videoMaxH); 1908 1909 int preferredPreviewW = 0; 1910 int preferredPreviewH = 0; 1911 m_secCamera->getPreferredPreivewSizeForVideo(&preferredPreviewW, &preferredPreviewH); 1912 listString.setTo(""); 1913 listString = String8::format("%dx%d", preferredPreviewW, preferredPreviewH); 1914 p.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, listString.string()); 1915 p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, CameraParameters::PIXEL_FORMAT_YUV420SP); 1916 1917 if (m_secCamera->isVideoSnapshotSupported() == true) 1918 p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, "true"); 1919 else 1920 p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, "false"); 1921 1922 if (m_secCamera->isVideoStabilizationSupported() == true) 1923 p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, "true"); 1924 else 1925 p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, "false"); 1926 1927 // picture 1928 int pictureMaxW = 0; 1929 int pictureMaxH = 0; 1930 m_secCamera->getSupportedPictureSizes(&pictureMaxW, &pictureMaxH); 1931 1932 listString.setTo(""); 1933 if (m_getResolutionList(listString, strBuf, pictureMaxW, pictureMaxH) == false) { 1934 ALOGE("ERR(%s):m_getResolutionList() fail", __func__); 1935 1936 pictureMaxW = 640; 1937 pictureMaxW = 480; 1938 listString = String8::format("%dx%d", pictureMaxW, pictureMaxH); 1939 } 1940 p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, listString.string()); 1941 p.setPictureSize(pictureMaxW, pictureMaxH); 1942 1943 p.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, 1944 CameraParameters::PIXEL_FORMAT_JPEG); 1945 1946 p.setPictureFormat(CameraParameters::PIXEL_FORMAT_JPEG); 1947 1948 p.set(CameraParameters::KEY_JPEG_QUALITY, "100"); // maximum quality 1949 1950 // thumbnail 1951 int thumbnailMaxW = 0; 1952 int thumbnailMaxH = 0; 1953 1954 m_secCamera->getSupportedJpegThumbnailSizes(&thumbnailMaxW, &thumbnailMaxH); 1955 listString = String8::format("%dx%d", thumbnailMaxW, thumbnailMaxH); 1956 listString.append(",0x0"); 1957 p.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, listString.string()); 1958 p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, thumbnailMaxW); 1959 p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, thumbnailMaxH); 1960 p.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, "100"); 1961 1962 // exposure 1963 p.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, m_secCamera->getMinExposureCompensation()); 1964 p.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, m_secCamera->getMaxExposureCompensation()); 1965 p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, m_secCamera->getExposureCompensation()); 1966 p.setFloat(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, m_secCamera->getExposureCompensationStep()); 1967 1968 if (m_secCamera->isAutoExposureLockSupported() == true) 1969 p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "true"); 1970 else 1971 p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "false"); 1972 1973 // face detection 1974 p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, m_secCamera->getMaxNumDetectedFaces()); 1975 p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, 0); 1976 1977 // focus mode 1978 int focusMode = m_secCamera->getSupportedFocusModes(); 1979 parameterString.setTo(""); 1980 if (focusMode & ExynosCamera::FOCUS_MODE_AUTO) { 1981 parameterString.append(CameraParameters::FOCUS_MODE_AUTO); 1982 parameterString.append(","); 1983 } 1984 if (focusMode & ExynosCamera::FOCUS_MODE_INFINITY) { 1985 parameterString.append(CameraParameters::FOCUS_MODE_INFINITY); 1986 parameterString.append(","); 1987 } 1988 if (focusMode & ExynosCamera::FOCUS_MODE_MACRO) { 1989 parameterString.append(CameraParameters::FOCUS_MODE_MACRO); 1990 parameterString.append(","); 1991 } 1992 if (focusMode & ExynosCamera::FOCUS_MODE_FIXED) { 1993 parameterString.append(CameraParameters::FOCUS_MODE_FIXED); 1994 parameterString.append(","); 1995 } 1996 if (focusMode & ExynosCamera::FOCUS_MODE_EDOF) { 1997 parameterString.append(CameraParameters::FOCUS_MODE_EDOF); 1998 parameterString.append(","); 1999 } 2000 if (focusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_VIDEO) { 2001 parameterString.append(CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO); 2002 parameterString.append(","); 2003 } 2004 if (focusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_PICTURE) 2005 parameterString.append(CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE); 2006 2007 p.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, 2008 parameterString.string()); 2009 2010 if (focusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_PICTURE) 2011 p.set(CameraParameters::KEY_FOCUS_MODE, 2012 CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE); 2013 else if (focusMode & ExynosCamera::FOCUS_MODE_CONTINUOUS_VIDEO) 2014 p.set(CameraParameters::KEY_FOCUS_MODE, 2015 CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO); 2016 else if (focusMode & ExynosCamera::FOCUS_MODE_AUTO) 2017 p.set(CameraParameters::KEY_FOCUS_MODE, 2018 CameraParameters::FOCUS_MODE_AUTO); 2019 else 2020 p.set(CameraParameters::KEY_FOCUS_MODE, 2021 CameraParameters::FOCUS_MODE_FIXED); 2022 2023 // HACK 2024 if (cameraId == ExynosCamera::CAMERA_ID_BACK) { 2025 p.set(CameraParameters::KEY_FOCUS_DISTANCES, 2026 BACK_CAMERA_AUTO_FOCUS_DISTANCES_STR); 2027 p.set(CameraParameters::FOCUS_DISTANCE_INFINITY, 2028 BACK_CAMERA_FOCUS_DISTANCE_INFINITY); 2029 } else { 2030 p.set(CameraParameters::KEY_FOCUS_DISTANCES, 2031 FRONT_CAMERA_FOCUS_DISTANCES_STR); 2032 p.set(CameraParameters::FOCUS_DISTANCE_INFINITY, 2033 FRONT_CAMERA_FOCUS_DISTANCE_INFINITY); 2034 } 2035 2036 if (focusMode & ExynosCamera::FOCUS_MODE_TOUCH) 2037 p.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, m_secCamera->getMaxNumFocusAreas()); 2038 2039 // flash 2040 int flashMode = m_secCamera->getSupportedFlashModes(); 2041 parameterString.setTo(""); 2042 if (flashMode & ExynosCamera::FLASH_MODE_OFF) { 2043 parameterString.append(CameraParameters::FLASH_MODE_OFF); 2044 parameterString.append(","); 2045 } 2046 if (flashMode & ExynosCamera::FLASH_MODE_AUTO) { 2047 parameterString.append(CameraParameters::FLASH_MODE_AUTO); 2048 parameterString.append(","); 2049 } 2050 if (flashMode & ExynosCamera::FLASH_MODE_ON) { 2051 parameterString.append(CameraParameters::FLASH_MODE_ON); 2052 parameterString.append(","); 2053 } 2054 if (flashMode & ExynosCamera::FLASH_MODE_RED_EYE) { 2055 parameterString.append(CameraParameters::FLASH_MODE_RED_EYE); 2056 parameterString.append(","); 2057 } 2058 if (flashMode & ExynosCamera::FLASH_MODE_TORCH) 2059 parameterString.append(CameraParameters::FLASH_MODE_TORCH); 2060 2061 p.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, parameterString.string()); 2062 p.set(CameraParameters::KEY_FLASH_MODE, CameraParameters::FLASH_MODE_OFF); 2063 2064 // scene mode 2065 int sceneMode = m_secCamera->getSupportedSceneModes(); 2066 parameterString.setTo(""); 2067 if (sceneMode & ExynosCamera::SCENE_MODE_AUTO) { 2068 parameterString.append(CameraParameters::SCENE_MODE_AUTO); 2069 parameterString.append(","); 2070 } 2071 if (sceneMode & ExynosCamera::SCENE_MODE_ACTION) { 2072 parameterString.append(CameraParameters::SCENE_MODE_ACTION); 2073 parameterString.append(","); 2074 } 2075 if (sceneMode & ExynosCamera::SCENE_MODE_PORTRAIT) { 2076 parameterString.append(CameraParameters::SCENE_MODE_PORTRAIT); 2077 parameterString.append(","); 2078 } 2079 if (sceneMode & ExynosCamera::SCENE_MODE_LANDSCAPE) { 2080 parameterString.append(CameraParameters::SCENE_MODE_LANDSCAPE); 2081 parameterString.append(","); 2082 } 2083 if (sceneMode & ExynosCamera::SCENE_MODE_NIGHT) { 2084 parameterString.append(CameraParameters::SCENE_MODE_NIGHT); 2085 parameterString.append(","); 2086 } 2087 if (sceneMode & ExynosCamera::SCENE_MODE_NIGHT_PORTRAIT) { 2088 parameterString.append(CameraParameters::SCENE_MODE_NIGHT_PORTRAIT); 2089 parameterString.append(","); 2090 } 2091 if (sceneMode & ExynosCamera::SCENE_MODE_THEATRE) { 2092 parameterString.append(CameraParameters::SCENE_MODE_THEATRE); 2093 parameterString.append(","); 2094 } 2095 if (sceneMode & ExynosCamera::SCENE_MODE_BEACH) { 2096 parameterString.append(CameraParameters::SCENE_MODE_BEACH); 2097 parameterString.append(","); 2098 } 2099 if (sceneMode & ExynosCamera::SCENE_MODE_SNOW) { 2100 parameterString.append(CameraParameters::SCENE_MODE_SNOW); 2101 parameterString.append(","); 2102 } 2103 if (sceneMode & ExynosCamera::SCENE_MODE_SUNSET) { 2104 parameterString.append(CameraParameters::SCENE_MODE_SUNSET); 2105 parameterString.append(","); 2106 } 2107 if (sceneMode & ExynosCamera::SCENE_MODE_STEADYPHOTO) { 2108 parameterString.append(CameraParameters::SCENE_MODE_STEADYPHOTO); 2109 parameterString.append(","); 2110 } 2111 if (sceneMode & ExynosCamera::SCENE_MODE_FIREWORKS) { 2112 parameterString.append(CameraParameters::SCENE_MODE_FIREWORKS); 2113 parameterString.append(","); 2114 } 2115 if (sceneMode & ExynosCamera::SCENE_MODE_SPORTS) { 2116 parameterString.append(CameraParameters::SCENE_MODE_SPORTS); 2117 parameterString.append(","); 2118 } 2119 if (sceneMode & ExynosCamera::SCENE_MODE_PARTY) { 2120 parameterString.append(CameraParameters::SCENE_MODE_PARTY); 2121 parameterString.append(","); 2122 } 2123 if (sceneMode & ExynosCamera::SCENE_MODE_CANDLELIGHT) 2124 parameterString.append(CameraParameters::SCENE_MODE_CANDLELIGHT); 2125 2126 p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, 2127 parameterString.string()); 2128 p.set(CameraParameters::KEY_SCENE_MODE, 2129 CameraParameters::SCENE_MODE_AUTO); 2130 2131 // effect 2132 int effect = m_secCamera->getSupportedColorEffects(); 2133 parameterString.setTo(""); 2134 if (effect & ExynosCamera::EFFECT_NONE) { 2135 parameterString.append(CameraParameters::EFFECT_NONE); 2136 parameterString.append(","); 2137 } 2138 if (effect & ExynosCamera::EFFECT_MONO) { 2139 parameterString.append(CameraParameters::EFFECT_MONO); 2140 parameterString.append(","); 2141 } 2142 if (effect & ExynosCamera::EFFECT_NEGATIVE) { 2143 parameterString.append(CameraParameters::EFFECT_NEGATIVE); 2144 parameterString.append(","); 2145 } 2146 if (effect & ExynosCamera::EFFECT_SOLARIZE) { 2147 parameterString.append(CameraParameters::EFFECT_SOLARIZE); 2148 parameterString.append(","); 2149 } 2150 if (effect & ExynosCamera::EFFECT_SEPIA) { 2151 parameterString.append(CameraParameters::EFFECT_SEPIA); 2152 parameterString.append(","); 2153 } 2154 if (effect & ExynosCamera::EFFECT_POSTERIZE) { 2155 parameterString.append(CameraParameters::EFFECT_POSTERIZE); 2156 parameterString.append(","); 2157 } 2158 if (effect & ExynosCamera::EFFECT_WHITEBOARD) { 2159 parameterString.append(CameraParameters::EFFECT_WHITEBOARD); 2160 parameterString.append(","); 2161 } 2162 if (effect & ExynosCamera::EFFECT_BLACKBOARD) { 2163 parameterString.append(CameraParameters::EFFECT_BLACKBOARD); 2164 parameterString.append(","); 2165 } 2166 if (effect & ExynosCamera::EFFECT_AQUA) 2167 parameterString.append(CameraParameters::EFFECT_AQUA); 2168 2169 p.set(CameraParameters::KEY_SUPPORTED_EFFECTS, parameterString.string()); 2170 p.set(CameraParameters::KEY_EFFECT, CameraParameters::EFFECT_NONE); 2171 2172 // white balance 2173 int whiteBalance = m_secCamera->getSupportedWhiteBalance(); 2174 parameterString.setTo(""); 2175 if (whiteBalance & ExynosCamera::WHITE_BALANCE_AUTO) { 2176 parameterString.append(CameraParameters::WHITE_BALANCE_AUTO); 2177 parameterString.append(","); 2178 } 2179 if (whiteBalance & ExynosCamera::WHITE_BALANCE_INCANDESCENT) { 2180 parameterString.append(CameraParameters::WHITE_BALANCE_INCANDESCENT); 2181 parameterString.append(","); 2182 } 2183 if (whiteBalance & ExynosCamera::WHITE_BALANCE_FLUORESCENT) { 2184 parameterString.append(CameraParameters::WHITE_BALANCE_FLUORESCENT); 2185 parameterString.append(","); 2186 } 2187 if (whiteBalance & ExynosCamera::WHITE_BALANCE_WARM_FLUORESCENT) { 2188 parameterString.append(CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT); 2189 parameterString.append(","); 2190 } 2191 if (whiteBalance & ExynosCamera::WHITE_BALANCE_DAYLIGHT) { 2192 parameterString.append(CameraParameters::WHITE_BALANCE_DAYLIGHT); 2193 parameterString.append(","); 2194 } 2195 if (whiteBalance & ExynosCamera::WHITE_BALANCE_CLOUDY_DAYLIGHT) { 2196 parameterString.append(CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT); 2197 parameterString.append(","); 2198 } 2199 if (whiteBalance & ExynosCamera::WHITE_BALANCE_TWILIGHT) { 2200 parameterString.append(CameraParameters::WHITE_BALANCE_TWILIGHT); 2201 parameterString.append(","); 2202 } 2203 if (whiteBalance & ExynosCamera::WHITE_BALANCE_SHADE) 2204 parameterString.append(CameraParameters::WHITE_BALANCE_SHADE); 2205 2206 p.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, 2207 parameterString.string()); 2208 p.set(CameraParameters::KEY_WHITE_BALANCE, CameraParameters::WHITE_BALANCE_AUTO); 2209 2210 if (m_secCamera->isAutoWhiteBalanceLockSupported() == true) 2211 p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "true"); 2212 else 2213 p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "false"); 2214 2215 // anti banding 2216 int antiBanding = m_secCamera->getSupportedAntibanding(); 2217 parameterString.setTo(""); 2218 if (antiBanding & ExynosCamera::ANTIBANDING_AUTO) { 2219 parameterString.append(CameraParameters::ANTIBANDING_AUTO); 2220 parameterString.append(","); 2221 } 2222 if (antiBanding & ExynosCamera::ANTIBANDING_50HZ) { 2223 parameterString.append(CameraParameters::ANTIBANDING_50HZ); 2224 parameterString.append(","); 2225 } 2226 if (antiBanding & ExynosCamera::ANTIBANDING_60HZ) { 2227 parameterString.append(CameraParameters::ANTIBANDING_60HZ); 2228 parameterString.append(","); 2229 } 2230 if (antiBanding & ExynosCamera::ANTIBANDING_OFF) 2231 parameterString.append(CameraParameters::ANTIBANDING_OFF); 2232 2233 p.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, 2234 parameterString.string()); 2235 2236 p.set(CameraParameters::KEY_ANTIBANDING, CameraParameters::ANTIBANDING_OFF); 2237 2238 // rotation 2239 p.set(CameraParameters::KEY_ROTATION, 0); 2240 2241 // view angle 2242 p.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, m_secCamera->getHorizontalViewAngle()); 2243 p.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, m_secCamera->getVerticalViewAngle()); 2244 2245 // metering 2246 if (0 < m_secCamera->getMaxNumMeteringAreas()) 2247 p.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, m_secCamera->getMaxNumMeteringAreas()); 2248 2249 // zoom 2250 if (m_secCamera->isZoomSupported() == true) { 2251 2252 int maxZoom = m_secCamera->getMaxZoom(); 2253 if (0 < maxZoom) { 2254 2255 p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "true"); 2256 2257 if (m_secCamera->isSmoothZoomSupported() == true) 2258 p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "true"); 2259 else 2260 p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false"); 2261 2262 p.set(CameraParameters::KEY_MAX_ZOOM, maxZoom); 2263 p.set(CameraParameters::KEY_ZOOM, m_secCamera->getZoom()); 2264 2265 int max_zoom_ratio = m_secCamera->getMaxZoomRatio(); 2266 2267 listString.setTo(""); 2268 2269 if (m_getZoomRatioList(listString, strBuf, maxZoom, 100, max_zoom_ratio) == true) 2270 p.set(CameraParameters::KEY_ZOOM_RATIOS, listString.string()); 2271 else 2272 p.set(CameraParameters::KEY_ZOOM_RATIOS, "100"); 2273 } else { 2274 p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false"); 2275 p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false"); 2276 } 2277 2278 } else { 2279 p.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false"); 2280 p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "false"); 2281 } 2282 2283 // fps 2284 int minPreviewFps, maxPreviewFps; 2285 m_secCamera->getPreviewFpsRange(&minPreviewFps, &maxPreviewFps); 2286 2287 int baseFps = ((minPreviewFps + 5) / 5) * 5; 2288 2289 listString.setTo(""); 2290 snprintf(strBuf, 256, "%d", minPreviewFps); 2291 listString.append(strBuf); 2292 2293 for (int i = baseFps; i <= maxPreviewFps; i += 5) { 2294 int step = (i / 5) * 5; 2295 snprintf(strBuf, 256, ",%d", step); 2296 listString.append(strBuf); 2297 } 2298 p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, listString.string()); 2299 p.setPreviewFrameRate(maxPreviewFps); 2300 2301 int minFpsRange = minPreviewFps * 1000; // 15 -> 15000 2302 int maxFpsRange = maxPreviewFps * 1000; // 30 -> 30000 2303 2304 snprintf(strBuf, 256, "(%d,%d)", minFpsRange, maxFpsRange); 2305 p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, strBuf); 2306 2307 snprintf(strBuf, 256, "%d,%d", minFpsRange, maxFpsRange); 2308 p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, strBuf); 2309 //p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, "(15000,30000)"); 2310 //p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, "15000,30000") 2311 2312 // focal length 2313 int num = 0; 2314 int den = 0; 2315 int precision = 0; 2316 m_secCamera->getFocalLength(&num, &den); 2317 2318 switch (den) { 2319 default: 2320 case 1000: 2321 precision = 3; 2322 break; 2323 case 100: 2324 precision = 2; 2325 break; 2326 case 10: 2327 precision = 1; 2328 break; 2329 case 1: 2330 precision = 0; 2331 break; 2332 } 2333 2334 snprintf(strBuf, 256, "%.*f", precision, ((float)num / (float)den)); 2335 p.set(CameraParameters::KEY_FOCAL_LENGTH, strBuf); 2336 //p.set(CameraParameters::KEY_FOCAL_LENGTH, "3.43"); 2337 //p.set(CameraParameters::KEY_FOCAL_LENGTH, "0.9"); 2338 2339 // Additional params. 2340 2341 p.set("contrast", "auto"); 2342 p.set("iso", "auto"); 2343 p.set("wdr", 0); 2344 p.set("metering", "center"); 2345 2346 p.set("brightness", 0); 2347 p.set("brightness-max", 2); 2348 p.set("brightness-min", -2); 2349 2350 p.set("saturation", 0); 2351 p.set("saturation-max", 2); 2352 p.set("saturation-min", -2); 2353 2354 p.set("sharpness", 0); 2355 p.set("sharpness-max", 2); 2356 p.set("sharpness-min", -2); 2357 2358 p.set("hue", 0); 2359 p.set("hue-max", 2); 2360 p.set("hue-min", -2); 2361 2362 m_params = p; 2363 m_internalParams = ip; 2364 2365 /* make sure m_secCamera has all the settings we do. applications 2366 * aren't required to call setParameters themselves (only if they 2367 * want to change something. 2368 */ 2369 setParameters(p); 2370 2371 m_secCamera->setPreviewFrameRate(maxPreviewFps); 2372} 2373 2374bool ExynosCameraHWInterface::m_startPreviewInternal(void) 2375{ 2376 ALOGV("DEBUG(%s):", __func__); 2377 2378 int i; 2379 int previewW, previewH, previewFormat, previewFramesize; 2380 2381 m_secCamera->getPreviewSize(&previewW, &previewH); 2382 previewFormat = m_secCamera->getPreviewFormat(); 2383 2384 // we will use previewFramesize for m_previewHeap[i] 2385 previewFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(m_orgPreviewRect.colorFormat), m_orgPreviewRect.w, m_orgPreviewRect.h); 2386 2387 ExynosBuffer previewBuf; 2388 void *virtAddr[3]; 2389 int fd[3]; 2390 2391 for (i = 0; i < 3; i++) { 2392 virtAddr[i] = NULL; 2393 fd[i] = -1; 2394 } 2395 2396 for (i = 0; i < NUM_OF_PREVIEW_BUF; i++) { 2397 2398 m_avaliblePreviewBufHandle[i] = false; 2399 2400 if (m_previewWindow->dequeue_buffer(m_previewWindow, &m_previewBufHandle[i], &m_previewStride[i]) != 0) { 2401 ALOGE("ERR(%s):Could not dequeue gralloc buffer[%d]!!", __func__, i); 2402 continue; 2403 } else { 2404 if (m_previewWindow->lock_buffer(m_previewWindow, m_previewBufHandle[i]) != 0) 2405 ALOGE("ERR(%s):Could not lock gralloc buffer[%d]!!", __func__, i); 2406 } 2407 2408 if (m_flagGrallocLocked[i] == false) { 2409 if (m_grallocHal->lock(m_grallocHal, 2410 *m_previewBufHandle[i], 2411 GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_YUV_ADDR, 2412 0, 0, previewW, previewH, virtAddr) != 0) { 2413 ALOGE("ERR(%s):could not obtain gralloc buffer", __func__); 2414 2415 if (m_previewWindow->cancel_buffer(m_previewWindow, m_previewBufHandle[i]) != 0) 2416 ALOGE("ERR(%s):Could not cancel_buffer gralloc buffer[%d]!!", __func__, i); 2417 2418 continue; 2419 } 2420 2421 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*m_previewBufHandle[i]); 2422 fd[0] = priv_handle->fd; 2423 fd[1] = priv_handle->u_fd; 2424 fd[2] = priv_handle->v_fd; 2425 m_grallocVirtAddr[i] = virtAddr[0]; 2426 m_matchedGrallocIndex[i] = i; 2427 m_flagGrallocLocked[i] = true; 2428 } 2429 2430 m_getAlignedYUVSize(previewFormat, previewW, previewH, &previewBuf); 2431 2432 previewBuf.reserved.p = i; 2433 previewBuf.virt.extP[0] = (char *)virtAddr[0]; 2434 previewBuf.virt.extP[1] = (char *)virtAddr[1]; 2435 previewBuf.virt.extP[2] = (char *)virtAddr[2]; 2436 previewBuf.fd.extFd[0] = fd[0]; 2437 previewBuf.fd.extFd[1] = fd[1]; 2438 previewBuf.fd.extFd[2] = fd[2]; 2439 2440 m_secCamera->setPreviewBuf(&previewBuf); 2441 2442 if (m_previewHeap[i]) { 2443 m_previewHeap[i]->release(m_previewHeap[i]); 2444 m_previewHeap[i] = 0; 2445 } 2446 2447 m_previewHeap[i] = m_getMemoryCb(-1, previewFramesize, 1, 0); 2448 if (!m_previewHeap[i]) { 2449 ALOGE("ERR(%s):m_getMemoryCb(m_previewHeap[%d], size(%d) fail", __func__, i, previewFramesize); 2450 continue; 2451 } 2452 2453 m_avaliblePreviewBufHandle[i] = true; 2454 } 2455 2456 if (m_secCamera->startPreview() == false) { 2457 ALOGE("ERR(%s):Fail on m_secCamera->startPreview()", __func__); 2458 return false; 2459 } 2460 2461 for (i = NUM_OF_PREVIEW_BUF - m_minUndequeuedBufs; i < NUM_OF_PREVIEW_BUF; i++) { 2462 if (m_secCamera->getPreviewBuf(&previewBuf) == false) { 2463 ALOGE("ERR(%s):getPreviewBuf() fail", __func__); 2464 return false; 2465 } 2466 2467 if (m_grallocHal && m_flagGrallocLocked[previewBuf.reserved.p] == true) { 2468 m_grallocHal->unlock(m_grallocHal, *m_previewBufHandle[previewBuf.reserved.p]); 2469 m_flagGrallocLocked[previewBuf.reserved.p] = false; 2470 } 2471 2472 if (m_previewWindow->cancel_buffer(m_previewWindow, m_previewBufHandle[previewBuf.reserved.p]) != 0) 2473 ALOGE("ERR(%s):Could not cancel_buffer gralloc buffer[%d]!!", __func__, previewBuf.reserved.p); 2474 2475 m_avaliblePreviewBufHandle[previewBuf.reserved.p] = false; 2476 } 2477 2478 m_setSkipFrame(INITIAL_SKIP_FRAME); 2479 2480 if (m_pictureRunning == false 2481 && m_startPictureInternal() == false) 2482 ALOGE("ERR(%s):m_startPictureInternal() fail", __func__); 2483 2484 return true; 2485} 2486 2487void ExynosCameraHWInterface::m_stopPreviewInternal(void) 2488{ 2489 ALOGV("DEBUG(%s):", __func__); 2490 2491 /* request that the preview thread stop. */ 2492 if (m_previewRunning == true) { 2493 m_previewRunning = false; 2494 2495 if (m_previewStartDeferred == false) { 2496 m_previewCondition.signal(); 2497 /* wait until preview thread is stopped */ 2498 m_previewStoppedCondition.wait(m_previewLock); 2499 2500 for (int i = 0; i < NUM_OF_PREVIEW_BUF; i++) { 2501 if (m_previewBufHandle[i] != NULL) { 2502 if (m_grallocHal && m_flagGrallocLocked[i] == true) { 2503 m_grallocHal->unlock(m_grallocHal, *m_previewBufHandle[i]); 2504 m_flagGrallocLocked[i] = false; 2505 } 2506 2507 if (m_avaliblePreviewBufHandle[i] == true) { 2508 if (m_previewWindow->cancel_buffer(m_previewWindow, m_previewBufHandle[i]) != 0) { 2509 ALOGE("ERR(%s):Fail to cancel buffer(%d)", __func__, i); 2510 } else { 2511 m_previewBufHandle[i] = NULL; 2512 m_previewStride[i] = NULL; 2513 } 2514 2515 m_avaliblePreviewBufHandle[i] = false; 2516 } 2517 } 2518 } 2519 } else { 2520 ALOGV("DEBUG(%s):preview running but deferred, doing nothing", __func__); 2521 } 2522 } else { 2523 ALOGV("DEBUG(%s):preview not running, doing nothing", __func__); 2524 } 2525} 2526 2527bool ExynosCameraHWInterface::m_previewThreadFuncWrapper(void) 2528{ 2529 ALOGV("DEBUG(%s):starting", __func__); 2530 while (1) { 2531 m_previewLock.lock(); 2532 while (m_previewRunning == false) { 2533 if ( m_secCamera->flagStartPreview() == true 2534 && m_secCamera->stopPreview() == false) 2535 ALOGE("ERR(%s):Fail on m_secCamera->stopPreview()", __func__); 2536 2537 ALOGV("DEBUG(%s):calling m_secCamera->stopPreview() and waiting", __func__); 2538 2539 m_previewStoppedCondition.signal(); 2540 m_previewCondition.wait(m_previewLock); 2541 ALOGV("DEBUG(%s):return from wait", __func__); 2542 } 2543 m_previewLock.unlock(); 2544 2545 if (m_exitPreviewThread == true) { 2546 if ( m_secCamera->flagStartPreview() == true 2547 && m_secCamera->stopPreview() == false) 2548 ALOGE("ERR(%s):Fail on m_secCamera->stopPreview()", __func__); 2549 2550 return true; 2551 } 2552 m_previewThreadFunc(); 2553 } 2554} 2555 2556bool ExynosCameraHWInterface::m_previewThreadFunc(void) 2557{ 2558 ExynosBuffer previewBuf, callbackBuf; 2559 int stride; 2560 int previewW, previewH; 2561 bool doPutPreviewBuf = true; 2562 2563 if (m_secCamera->getPreviewBuf(&previewBuf) == false) { 2564 ALOGE("ERR(%s):getPreviewBuf() fail", __func__); 2565 return false; 2566 } 2567 2568#ifndef USE_3DNR_DMAOUT 2569 if (m_videoStart == true) { 2570 copy_previewBuf = previewBuf; 2571 m_videoRunning = true; 2572 m_videoCondition.signal(); 2573 } 2574#endif 2575 2576 m_skipFrameLock.lock(); 2577 if (0 < m_skipFrame) { 2578 m_skipFrame--; 2579 m_skipFrameLock.unlock(); 2580 ALOGV("DEBUG(%s):skipping %d frame", __func__, previewBuf.reserved.p); 2581 2582 if ( doPutPreviewBuf == true 2583 && m_secCamera->putPreviewBuf(&previewBuf) == false) { 2584 ALOGE("ERR(%s):putPreviewBuf(%d) fail", __func__, previewBuf.reserved.p); 2585 return false; 2586 } 2587 2588 return true; 2589 } 2590 m_skipFrameLock.unlock(); 2591 2592 callbackBuf = previewBuf; 2593 2594 m_secCamera->getPreviewSize(&previewW, &previewH); 2595 2596 if (m_previewWindow && m_grallocHal && m_previewRunning == true) { 2597 2598 bool findGrallocBuf = false; 2599 buffer_handle_t *bufHandle = NULL; 2600 void *virtAddr[3]; 2601 int fd[3]; 2602 2603 /* Unlock grallocHal buffer if locked */ 2604 if (m_flagGrallocLocked[previewBuf.reserved.p] == true) { 2605 m_grallocHal->unlock(m_grallocHal, *m_previewBufHandle[previewBuf.reserved.p]); 2606 m_flagGrallocLocked[previewBuf.reserved.p] = false; 2607 } else { 2608 if (m_previewWindow->lock_buffer(m_previewWindow, bufHandle) != 0) 2609 ALOGE("ERR(%s):Could not lock gralloc buffer!!", __func__); 2610 } 2611 2612 /* Enqueue lastest buffer */ 2613 if (m_avaliblePreviewBufHandle[previewBuf.reserved.p] == true) { 2614 if (m_previewWindow->enqueue_buffer(m_previewWindow, 2615 m_previewBufHandle[previewBuf.reserved.p]) != 0) { 2616 ALOGE("ERR(%s):Could not enqueue gralloc buffer[%d]!!", __func__, previewBuf.reserved.p); 2617 goto callbacks; 2618 } 2619 2620 m_avaliblePreviewBufHandle[previewBuf.reserved.p] = false; 2621 } 2622 2623 /* Dequeue buffer from Gralloc */ 2624 if (m_previewWindow->dequeue_buffer(m_previewWindow, 2625 &bufHandle, 2626 &stride) != 0) { 2627 ALOGE("ERR(%s):Could not dequeue gralloc buffer!!", __func__); 2628 goto callbacks; 2629 } 2630 2631 /* Get virtual address from dequeued buf */ 2632 if (m_grallocHal->lock(m_grallocHal, 2633 *bufHandle, 2634 GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_YUV_ADDR, 2635 0, 0, previewW, previewH, virtAddr) != 0) { 2636 ALOGE("ERR(%s):could not obtain gralloc buffer", __func__); 2637 goto callbacks; 2638 } 2639 2640 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*bufHandle); 2641 fd[0] = priv_handle->fd; 2642 fd[1] = priv_handle->u_fd; 2643 fd[2] = priv_handle->v_fd; 2644 2645 for (int i = 0; i < NUM_OF_PREVIEW_BUF; i++) { 2646 if ((unsigned int)m_grallocVirtAddr[i] == (unsigned int)virtAddr[0]) { 2647 findGrallocBuf = true; 2648 2649 m_previewBufHandle[i] = bufHandle; 2650 m_previewStride[i] = stride; 2651 2652 previewBuf.reserved.p = i; 2653 previewBuf.virt.extP[0] = (char *)virtAddr[0]; 2654 previewBuf.virt.extP[1] = (char *)virtAddr[1]; 2655 previewBuf.virt.extP[2] = (char *)virtAddr[2]; 2656 2657 previewBuf.fd.extFd[0] = fd[0]; 2658 previewBuf.fd.extFd[1] = fd[1]; 2659 previewBuf.fd.extFd[2] = fd[2]; 2660 2661 m_secCamera->setPreviewBuf(&previewBuf); 2662 m_matchedGrallocIndex[previewBuf.reserved.p] = i; 2663 m_avaliblePreviewBufHandle[i] = true; 2664 break; 2665 } 2666 } 2667 2668 if (findGrallocBuf == false) { 2669 ALOGE("%s:addr(%x) is not matched any gralloc buffer's addr", __func__, virtAddr[0]); 2670 goto callbacks; 2671 } 2672 2673 if ( doPutPreviewBuf == true 2674 && m_secCamera->putPreviewBuf(&previewBuf) == false) 2675 ALOGE("ERR(%s):putPreviewBuf(%d) fail", __func__, previewBuf.reserved.p); 2676 else 2677 doPutPreviewBuf = false; 2678 } 2679 2680callbacks: 2681 2682 if ( m_previewRunning == true 2683 && m_msgEnabled & CAMERA_MSG_PREVIEW_FRAME) { 2684 2685 // resize from previewBuf(max size) to m_previewHeap(user's set size) 2686 if (m_exynosPreviewCSC) { 2687 int previewFormat = m_secCamera->getPreviewFormat(); 2688 2689 csc_set_src_format(m_exynosPreviewCSC, 2690 previewW, previewH - 8, 2691 0, 0, previewW, previewH - 8, 2692 V4L2_PIX_2_HAL_PIXEL_FORMAT(previewFormat), 2693 0); 2694 2695 csc_set_dst_format(m_exynosPreviewCSC, 2696 m_orgPreviewRect.w, m_orgPreviewRect.h, 2697 0, 0, m_orgPreviewRect.w, m_orgPreviewRect.h, 2698 V4L2_PIX_2_HAL_PIXEL_FORMAT(m_orgPreviewRect.colorFormat), 2699 1); 2700 2701 2702 csc_set_src_buffer(m_exynosPreviewCSC, 2703 (unsigned char *)callbackBuf.virt.extP[0], 2704 (unsigned char *)callbackBuf.virt.extP[1], 2705 (unsigned char *)callbackBuf.virt.extP[2], 2706 0); 2707 2708 ExynosBuffer dstBuf; 2709 m_getAlignedYUVSize(m_orgPreviewRect.colorFormat, m_orgPreviewRect.w, m_orgPreviewRect.h, &dstBuf); 2710 2711 dstBuf.virt.extP[0] = (char *)m_previewHeap[callbackBuf.reserved.p]->data; 2712 for (int i = 1; i < 3; i++) { 2713 if (dstBuf.size.extS[i] != 0) 2714 dstBuf.virt.extP[i] = dstBuf.virt.extP[i-1] + dstBuf.size.extS[i-1]; 2715 } 2716 2717 csc_set_dst_buffer(m_exynosPreviewCSC, 2718 (unsigned char *)dstBuf.virt.extP[0], 2719 (unsigned char *)dstBuf.virt.extP[1], 2720 (unsigned char *)dstBuf.virt.extP[2], 2721 0); 2722 2723 if (csc_convert(m_exynosPreviewCSC) != 0) 2724 ALOGE("ERR(%s):csc_convert() fail", __func__); 2725 } else { 2726 ALOGE("ERR(%s):m_exynosPreviewCSC == NULL", __func__); 2727 } 2728 } 2729 2730 /* TODO: We need better error handling scheme than this scheme */ 2731 if ( doPutPreviewBuf == true 2732 && m_secCamera->putPreviewBuf(&previewBuf) == false) 2733 ALOGE("ERR(%s):putPreviewBuf(%d) fail", __func__, previewBuf.reserved.p); 2734 else 2735 doPutPreviewBuf = false; 2736 2737 if ( m_previewRunning == true 2738 && m_msgEnabled & CAMERA_MSG_PREVIEW_FRAME) { 2739 m_dataCb(CAMERA_MSG_PREVIEW_FRAME, m_previewHeap[callbackBuf.reserved.p], 0, NULL, m_callbackCookie); 2740 } 2741 2742 /* Face detection */ 2743 if ( m_previewRunning == true 2744 && m_msgEnabled & CAMERA_MSG_PREVIEW_METADATA 2745 && m_secCamera->flagStartFaceDetection() == true) { 2746 2747 camera_frame_metadata_t *ptrMetadata = NULL; 2748 2749 int id[NUM_OF_DETECTED_FACES]; 2750 int score[NUM_OF_DETECTED_FACES]; 2751 ExynosRect2 detectedFace[NUM_OF_DETECTED_FACES]; 2752 ExynosRect2 detectedLeftEye[NUM_OF_DETECTED_FACES]; 2753 ExynosRect2 detectedRightEye[NUM_OF_DETECTED_FACES]; 2754 ExynosRect2 detectedMouth[NUM_OF_DETECTED_FACES]; 2755 2756 int numOfDetectedFaces = m_secCamera->getDetectedFacesAreas(NUM_OF_DETECTED_FACES, 2757 id, 2758 score, 2759 detectedFace, 2760 detectedLeftEye, 2761 detectedRightEye, 2762 detectedMouth); 2763 2764 if (0 < numOfDetectedFaces) { 2765 // camera.h 2766 // width : -1000~1000 2767 // height : -1000~1000 2768 // if eye, mouth is not detectable : -2000, -2000. 2769 2770 int realNumOfDetectedFaces = 0; 2771 m_faceDetected = true; 2772 2773 for (int i = 0; i < numOfDetectedFaces; i++) { 2774 // over 50s, we will catch 2775 //if (score[i] < 50) 2776 // continue; 2777 2778 m_faces[realNumOfDetectedFaces].rect[0] = m_calibratePosition(previewW, 2000, detectedFace[i].x1) - 1000; 2779 m_faces[realNumOfDetectedFaces].rect[1] = m_calibratePosition(previewH, 2000, detectedFace[i].y1) - 1000; 2780 m_faces[realNumOfDetectedFaces].rect[2] = m_calibratePosition(previewW, 2000, detectedFace[i].x2) - 1000; 2781 m_faces[realNumOfDetectedFaces].rect[3] = m_calibratePosition(previewH, 2000, detectedFace[i].y2) - 1000; 2782 2783 m_faces[realNumOfDetectedFaces].id = id[i]; 2784 m_faces[realNumOfDetectedFaces].score = score[i]; 2785 2786 m_faces[realNumOfDetectedFaces].left_eye[0] = (detectedLeftEye[i].x1 < 0) ? -2000 : m_calibratePosition(previewW, 2000, detectedLeftEye[i].x1) - 1000; 2787 m_faces[realNumOfDetectedFaces].left_eye[1] = (detectedLeftEye[i].y1 < 0) ? -2000 : m_calibratePosition(previewH, 2000, detectedLeftEye[i].y1) - 1000; 2788 2789 m_faces[realNumOfDetectedFaces].right_eye[0] = (detectedRightEye[i].x1 < 0) ? -2000 : m_calibratePosition(previewW, 2000, detectedRightEye[i].x1) - 1000; 2790 m_faces[realNumOfDetectedFaces].right_eye[1] = (detectedRightEye[i].y1 < 0) ? -2000 : m_calibratePosition(previewH, 2000, detectedRightEye[i].y1) - 1000; 2791 2792 m_faces[realNumOfDetectedFaces].mouth[0] = (detectedMouth[i].x1 < 0) ? -2000 : m_calibratePosition(previewW, 2000, detectedMouth[i].x1) - 1000; 2793 m_faces[realNumOfDetectedFaces].mouth[1] = (detectedMouth[i].y1 < 0) ? -2000 : m_calibratePosition(previewH, 2000, detectedMouth[i].y1) - 1000; 2794 2795 realNumOfDetectedFaces++; 2796 } 2797 2798 m_frameMetadata.number_of_faces = realNumOfDetectedFaces; 2799 m_frameMetadata.faces = m_faces; 2800 2801 ptrMetadata = &m_frameMetadata; 2802 2803 m_dataCb(CAMERA_MSG_PREVIEW_METADATA, m_previewHeap[callbackBuf.reserved.p], 0, ptrMetadata, m_callbackCookie); 2804 } else if (numOfDetectedFaces == 0 && m_faceDetected == true) { 2805 m_frameMetadata.number_of_faces = 0; 2806 m_frameMetadata.faces = m_faces; 2807 2808 ptrMetadata = &m_frameMetadata; 2809 2810 m_dataCb(CAMERA_MSG_PREVIEW_METADATA, m_previewHeap[callbackBuf.reserved.p], 0, ptrMetadata, m_callbackCookie); 2811 m_faceDetected = false; 2812 } 2813 } 2814 2815 // zero shutter lag 2816 if (m_pictureRunning == false 2817 && m_startPictureInternal() == false) 2818 ALOGE("ERR(%s):m_startPictureInternal() fail", __func__); 2819 2820 m_stateLock.lock(); 2821 if (m_captureInProgress == true) { 2822 m_stateLock.unlock(); 2823 } else { 2824 m_stateLock.unlock(); 2825 2826 if (m_numOfAvaliblePictureBuf < NUM_OF_PICTURE_BUF) { 2827 2828 ExynosBufferQueue *cur = m_oldPictureBufQueueHead; 2829 do { 2830 if(cur->next == NULL) { 2831 cur->buf = m_pictureBuf; 2832 break; 2833 } 2834 cur = cur->next; 2835 } while (cur->next); 2836 2837 if (m_secCamera->getPictureBuf(&m_pictureBuf) == false) 2838 ALOGE("ERR(%s):getPictureBuf() fail", __func__); 2839 else 2840 m_numOfAvaliblePictureBuf++; 2841 } 2842 2843 if (NUM_OF_WAITING_PUT_PICTURE_BUF < m_numOfAvaliblePictureBuf) { 2844 ExynosBuffer nullBuf; 2845 ExynosBuffer oldBuf; 2846 2847 oldBuf = m_oldPictureBufQueueHead->buf; 2848 2849 m_oldPictureBufQueueHead->buf = nullBuf; 2850 2851 if (m_oldPictureBufQueueHead->next) { 2852 ExynosBufferQueue *newQueueHead = m_oldPictureBufQueueHead->next; 2853 m_oldPictureBufQueueHead->next = NULL; 2854 m_oldPictureBufQueueHead = newQueueHead; 2855 } else { 2856 m_oldPictureBufQueueHead = &m_oldPictureBufQueue[0]; 2857 } 2858 2859 if (oldBuf != nullBuf) { 2860 if (m_secCamera->putPictureBuf(&oldBuf) == false) 2861 ALOGE("ERR(%s):putPictureBuf(%d) fail", __func__, oldBuf.reserved.p); 2862 else { 2863 m_numOfAvaliblePictureBuf--; 2864 if (m_numOfAvaliblePictureBuf < 0) 2865 m_numOfAvaliblePictureBuf = 0; 2866 } 2867 2868 } 2869 } 2870 } 2871 2872 return true; 2873} 2874 2875bool ExynosCameraHWInterface::m_videoThreadFuncWrapper(void) 2876{ 2877 while (1) { 2878 while (m_videoRunning == false) { 2879 m_videoLock.lock(); 2880 2881#ifdef USE_3DNR_DMAOUT 2882 if ( m_secCamera->flagStartVideo() == true 2883 && m_secCamera->stopVideo() == false) 2884 ALOGE("ERR(%s):Fail on m_secCamera->stopVideo()", __func__); 2885#endif 2886 2887 ALOGV("DEBUG(%s):calling mExynosCamera->stopVideo() and waiting", __func__); 2888 2889 m_videoStoppedCondition.signal(); 2890 m_videoCondition.wait(m_videoLock); 2891 ALOGV("DEBUG(%s):return from wait", __func__); 2892 2893 m_videoLock.unlock(); 2894 } 2895 2896 if (m_exitVideoThread == true) { 2897 m_videoLock.lock(); 2898 2899#ifdef USE_3DNR_DMAOUT 2900 if ( m_secCamera->flagStartVideo() == true 2901 && m_secCamera->stopVideo() == false) 2902 ALOGE("ERR(%s):Fail on m_secCamera->stopVideo()", __func__); 2903#endif 2904 2905 m_videoLock.unlock(); 2906 return true; 2907 } 2908 2909 m_videoThreadFunc(); 2910#ifndef USE_3DNR_DMAOUT 2911 m_videoRunning = false; 2912#endif 2913 } 2914 return true; 2915} 2916 2917bool ExynosCameraHWInterface::m_videoThreadFunc(void) 2918{ 2919 nsecs_t timestamp; 2920#ifdef USE_3DNR_DMAOUT 2921 ExynosBuffer videoBuf; 2922#endif 2923 2924 if (m_numOfAvailableVideoBuf == 0) 2925 usleep(1000); // sleep 1msec for other threads. 2926 2927 { 2928 if ( m_msgEnabled & CAMERA_MSG_VIDEO_FRAME 2929 && m_videoRunning == true) { 2930 2931 Mutex::Autolock lock(m_videoLock); 2932 2933 if (m_numOfAvailableVideoBuf == 0) { 2934 ALOGV("DEBUG(%s):waiting releaseRecordingFrame()", __func__); 2935 return true; 2936 } 2937 2938#ifdef USE_3DNR_DMAOUT 2939 if (m_secCamera->getVideoBuf(&videoBuf) == false) { 2940 ALOGE("ERR(%s):Fail on ExynosCamera->getVideoBuf()", __func__); 2941 return false; 2942 } 2943#endif 2944 2945 m_numOfAvailableVideoBuf--; 2946 if (m_numOfAvailableVideoBuf < 0) 2947 m_numOfAvailableVideoBuf = 0; 2948 2949 timestamp = systemTime(SYSTEM_TIME_MONOTONIC); 2950 2951 // Notify the client of a new frame. 2952 if ( m_msgEnabled & CAMERA_MSG_VIDEO_FRAME 2953 && m_videoRunning == true) { 2954 2955 // resize from videoBuf(max size) to m_videoHeap(user's set size) 2956 if (m_exynosVideoCSC) { 2957 int videoW, videoH, videoFormat = 0; 2958 int cropX, cropY, cropW, cropH = 0; 2959 2960#ifndef USE_3DNR_DMAOUT 2961 int previewW, previewH, previewFormat = 0; 2962 previewFormat = m_secCamera->getPreviewFormat(); 2963 m_secCamera->getPreviewSize(&previewW, &previewH); 2964#endif 2965 videoFormat = m_secCamera->getVideoFormat(); 2966 m_secCamera->getVideoSize(&videoW, &videoH); 2967 2968 m_getRatioSize(videoW, videoH, 2969 m_orgVideoRect.w, m_orgVideoRect.h, 2970 &cropX, &cropY, 2971 &cropW, &cropH, 2972 m_secCamera->getZoom()); 2973 2974 ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", 2975 __func__, cropX, cropY, cropW, cropH); 2976 2977#ifdef USE_3DNR_DMAOUT 2978 csc_set_src_format(m_exynosVideoCSC, 2979 videoW, videoH, 2980 cropX, cropY, cropW, cropH, 2981 V4L2_PIX_2_HAL_PIXEL_FORMAT(videoFormat), 2982 0); 2983#else 2984 csc_set_src_format(m_exynosVideoCSC, 2985 previewW, previewH - 8, 2986 0, 0, previewW, previewH - 8, 2987 V4L2_PIX_2_HAL_PIXEL_FORMAT(previewFormat), 2988 0); 2989#endif 2990 2991 csc_set_dst_format(m_exynosVideoCSC, 2992 m_orgVideoRect.w, m_orgVideoRect.h, 2993 0, 0, m_orgVideoRect.w, m_orgVideoRect.h, 2994 V4L2_PIX_2_HAL_PIXEL_FORMAT(videoFormat), 2995 1); 2996 2997#ifdef USE_3DNR_DMAOUT 2998 csc_set_src_buffer(m_exynosVideoCSC, 2999 (unsigned char *)videoBuf.virt.extP[0], 3000 (unsigned char *)videoBuf.virt.extP[1], 3001 (unsigned char *)videoBuf.virt.extP[2], 3002 0); 3003#else 3004 csc_set_src_buffer(m_exynosVideoCSC, 3005 (unsigned char *)copy_previewBuf.virt.extP[0], 3006 (unsigned char *)copy_previewBuf.virt.extP[2], 3007 (unsigned char *)copy_previewBuf.virt.extP[1], 3008 0); 3009#endif 3010 3011 ExynosBuffer dstBuf; 3012 m_getAlignedYUVSize(videoFormat, m_orgVideoRect.w, m_orgVideoRect.h, &dstBuf); 3013 3014#ifdef USE_3DNR_DMAOUT 3015 dstBuf.virt.extP[0] = (char *)m_resizedVideoHeap[videoBuf.reserved.p]->data; 3016#else 3017 dstBuf.virt.extP[0] = (char *)m_resizedVideoHeap[m_cntVideoBuf]->data; 3018#endif 3019 for (int i = 1; i < 3; i++) { 3020 if (dstBuf.size.extS[i] != 0) 3021 dstBuf.virt.extP[i] = dstBuf.virt.extP[i-1] + dstBuf.size.extS[i-1]; 3022 } 3023 3024 csc_set_dst_buffer(m_exynosVideoCSC, 3025 (unsigned char *)dstBuf.virt.extP[0], 3026 (unsigned char *)dstBuf.virt.extP[1], 3027 (unsigned char *)dstBuf.virt.extP[2], 3028 0); 3029 3030 if (csc_convert(m_exynosVideoCSC) != 0) 3031 ALOGE("ERR(%s):csc_convert() fail", __func__); 3032 } else { 3033 ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __func__); 3034 } 3035#ifdef USE_3DNR_DMAOUT 3036 m_dataCbTimestamp(timestamp, CAMERA_MSG_VIDEO_FRAME, 3037 m_resizedVideoHeap[videoBuf.reserved.p], 0, m_callbackCookie); 3038#else 3039 m_dataCbTimestamp(timestamp, CAMERA_MSG_VIDEO_FRAME, 3040 m_resizedVideoHeap[m_cntVideoBuf], 0, m_callbackCookie); 3041 m_cntVideoBuf++; 3042 if (m_cntVideoBuf == NUM_OF_VIDEO_BUF) 3043 m_cntVideoBuf = 0; 3044#endif 3045 } 3046 3047 // HACK : This must can handle on releaseRecordingFrame() 3048#ifdef USE_3DNR_DMAOUT 3049 m_secCamera->putVideoBuf(&videoBuf); 3050#endif 3051 m_numOfAvailableVideoBuf++; 3052 if (NUM_OF_VIDEO_BUF <= m_numOfAvailableVideoBuf) 3053 m_numOfAvailableVideoBuf = NUM_OF_VIDEO_BUF; 3054 // until here 3055 } else 3056 usleep(1000); // sleep 1msec for stopRecording 3057 } 3058 3059 return true; 3060} 3061 3062bool ExynosCameraHWInterface::m_autoFocusThreadFunc(void) 3063{ 3064 int count =0; 3065 bool afResult = false; 3066 ALOGV("DEBUG(%s):starting", __func__); 3067 3068 /* block until we're told to start. we don't want to use 3069 * a restartable thread and requestExitAndWait() in cancelAutoFocus() 3070 * because it would cause deadlock between our callbacks and the 3071 * caller of cancelAutoFocus() which both want to grab the same lock 3072 * in CameraServices layer. 3073 */ 3074 m_focusLock.lock(); 3075 /* check early exit request */ 3076 if (m_exitAutoFocusThread == true) { 3077 m_focusLock.unlock(); 3078 ALOGV("DEBUG(%s):exiting on request0", __func__); 3079 return true; 3080 } 3081 3082 m_focusCondition.wait(m_focusLock); 3083 /* check early exit request */ 3084 if (m_exitAutoFocusThread == true) { 3085 m_focusLock.unlock(); 3086 ALOGV("DEBUG(%s):exiting on request1", __func__); 3087 return true; 3088 } 3089 m_focusLock.unlock(); 3090 3091 if (m_secCamera->autoFocus() == false) { 3092 ALOGE("ERR(%s):Fail on m_secCamera->autoFocus()", __func__); 3093 return false; 3094 } 3095 3096 switch (m_secCamera->getFucusModeResult()) { 3097 case 0: 3098 ALOGV("DEBUG(%s):AF Cancelled !!", __func__); 3099 afResult = true; 3100 break; 3101 case 1: 3102 ALOGV("DEBUG(%s):AF Success!!", __func__); 3103 afResult = true; 3104 break; 3105 default: 3106 ALOGV("DEBUG(%s):AF Fail !!", __func__); 3107 afResult = false; 3108 break; 3109 } 3110 3111 // CAMERA_MSG_FOCUS only takes a bool. true for 3112 // finished and false for failure. cancel is still 3113 // considered a true result. 3114 if (m_msgEnabled & CAMERA_MSG_FOCUS) 3115 m_notifyCb(CAMERA_MSG_FOCUS, afResult, 0, m_callbackCookie); 3116 3117 ALOGV("DEBUG(%s):exiting with no error", __func__); 3118 return true; 3119} 3120 3121bool ExynosCameraHWInterface::m_startPictureInternal(void) 3122{ 3123 if (m_pictureRunning == true) { 3124 ALOGE("ERR(%s):Aready m_pictureRunning is running", __func__); 3125 return false; 3126 } 3127 3128 int pictureW, pictureH, pictureFormat; 3129 unsigned int pictureFrameSize, pictureChromaSize; 3130 ExynosBuffer nullBuf; 3131 int numPlanes; 3132 3133 m_secCamera->getPictureSize(&pictureW, &pictureH); 3134 pictureFormat = m_secCamera->getPictureFormat(); 3135 PLANAR_FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16), pictureW, pictureH, &pictureFrameSize, 3136 &pictureChromaSize); 3137 numPlanes = NUM_PLANES(V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16)); 3138#if 0 3139 if (m_rawHeap) { 3140 m_rawHeap->release(m_rawHeap); 3141 m_rawHeap = 0; 3142 } 3143 m_rawHeap = m_getMemoryCb(-1, pictureFramesize, 1, NULL); 3144 if (!m_rawHeap) { 3145 ALOGE("ERR(%s):m_getMemoryCb(m_rawHeap, size(%d) fail", __func__, pictureFramesize); 3146 return false; 3147 } 3148 3149 pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH); 3150#endif 3151 for (int i = 0; i < NUM_OF_PICTURE_BUF; i++) { 3152 for (int j = 0; j < 3; j++) 3153 if (m_pictureFds[i][j] >= 0) { 3154 close(m_pictureFds[i][j]); 3155 m_pictureFds[i][j] = -1; 3156 } 3157 3158 m_pictureFds[i][0] = ion_alloc(m_ion_client, pictureFrameSize, 0, ION_HEAP_SYSTEM_MASK, 0); 3159 if (m_pictureFds[i][0] < 0) { 3160 ALOGE("ERR(%s):ion_alloc(m_pictureFds[%d], size(%d) fail", __func__, i, pictureFrameSize); 3161 return false; 3162 } 3163 3164 for (int j = 1; j < numPlanes; j++) { 3165 m_pictureFds[i][j] = ion_alloc(m_ion_client, pictureChromaSize, 0, ION_HEAP_SYSTEM_MASK, 0); 3166 if (m_pictureFds[i][j]) { 3167 ALOGE("ERR(%s):ion_alloc(m_pictureFds[%d][%d], size(%d) fail", __func__, i, j, pictureFrameSize); 3168 return false; 3169 } 3170 } 3171 m_getAlignedYUVSize(pictureFormat, pictureW, pictureH, &m_pictureBuf); 3172 3173 m_pictureBuf.fd.extFd[0] = m_pictureFds[i][0]; 3174 for (int j = 1; j < 3; j++) { 3175 if (m_pictureBuf.size.extS[j] != 0) 3176 m_pictureBuf.fd.extFd[j] = m_pictureFds[i][j]; 3177 else 3178 m_pictureBuf.fd.extFd[j] = -1; 3179 } 3180 3181 m_pictureBuf.reserved.p = i; 3182 3183 m_secCamera->setPictureBuf(&m_pictureBuf); 3184 } 3185 3186 // zero shutter lag 3187 if (m_secCamera->startPicture() == false) { 3188 ALOGE("ERR(%s):Fail on m_secCamera->startPicture()", __func__); 3189 return false; 3190 } 3191 3192 m_numOfAvaliblePictureBuf = 0; 3193 m_pictureBuf = nullBuf; 3194 3195 for (int i = 0; i < NUM_OF_PICTURE_BUF; i++) { 3196 m_oldPictureBufQueue[i].buf = nullBuf; 3197 m_oldPictureBufQueue[i].next = NULL; 3198 } 3199 3200 m_oldPictureBufQueueHead = &m_oldPictureBufQueue[0]; 3201 3202 m_pictureRunning = true; 3203 3204 return true; 3205 3206} 3207 3208bool ExynosCameraHWInterface::m_stopPictureInternal(void) 3209{ 3210 if (m_pictureRunning == false) { 3211 ALOGE("ERR(%s):Aready m_pictureRunning is stop", __func__); 3212 return false; 3213 } 3214 3215 if (m_secCamera->flagStartPicture() == true 3216 && m_secCamera->stopPicture() == false) 3217 ALOGE("ERR(%s):Fail on m_secCamera->stopPicture()", __func__); 3218 3219 for (int i = 0; i < NUM_OF_PICTURE_BUF; i++) { 3220 if (m_pictureHeap[i]) { 3221 m_pictureHeap[i]->release(m_pictureHeap[i]); 3222 m_pictureHeap[i] = 0; 3223 } 3224 } 3225 3226 if (m_rawHeap) { 3227 m_rawHeap->release(m_rawHeap); 3228 m_rawHeap = 0; 3229 } 3230 3231 m_pictureRunning = false; 3232 3233 return true; 3234} 3235 3236bool ExynosCameraHWInterface::m_pictureThreadFunc(void) 3237{ 3238 bool ret = false; 3239 int pictureW, pictureH, pictureFramesize = 0; 3240 int pictureFormat; 3241 int cropX, cropY, cropW, cropH = 0; 3242 3243 ExynosBuffer pictureBuf; 3244 ExynosBuffer jpegBuf; 3245 3246 camera_memory_t *JpegHeap = NULL; 3247 camera_memory_t *JpegHeapOut = NULL; 3248 3249 m_secCamera->getPictureSize(&pictureW, &pictureH); 3250 pictureFormat = m_secCamera->getPictureFormat(); 3251 pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH); 3252 3253 JpegHeap = m_getMemoryCb(-1, pictureFramesize, 1, 0); 3254 if (!JpegHeap) { 3255 ALOGE("ERR(%s):m_getMemoryCb(JpegHeap, size(%d) fail", __func__, pictureFramesize); 3256 return false; 3257 } 3258 3259 // resize from pictureBuf(max size) to rawHeap(user's set size) 3260 if (m_exynosPictureCSC) { 3261 m_getRatioSize(pictureW, pictureH, 3262 m_orgPictureRect.w, m_orgPictureRect.h, 3263 &cropX, &cropY, 3264 &cropW, &cropH, 3265 m_secCamera->getZoom()); 3266 3267 ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", 3268 __func__, cropX, cropY, cropW, cropH); 3269 3270 csc_set_src_format(m_exynosPictureCSC, 3271 pictureW, pictureH, 3272 cropX, cropY, cropW, cropH, 3273 V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), 3274 1); 3275 //0); 3276 3277 csc_set_dst_format(m_exynosPictureCSC, 3278 m_orgPictureRect.w, m_orgPictureRect.h, 3279 0, 0, m_orgPictureRect.w, m_orgPictureRect.h, 3280 V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16), 3281 1); 3282 //0); 3283 3284 csc_set_src_buffer(m_exynosPictureCSC, 3285 (unsigned char *)m_pictureBuf.virt.extP[0], 3286 (unsigned char *)m_pictureBuf.virt.extP[1], 3287 (unsigned char *)m_pictureBuf.virt.extP[2], 3288 0); 3289 3290 pictureBuf.size.extS[0] = ALIGN(m_orgPictureRect.w, 16) * ALIGN(m_orgPictureRect.h, 16) * 2; 3291 pictureBuf.size.extS[1] = 0; 3292 pictureBuf.size.extS[2] = 0; 3293 3294 pictureBuf.virt.extP[0] = (char *)m_rawHeap->data; 3295 3296 csc_set_dst_buffer(m_exynosPictureCSC, 3297 (unsigned char *)pictureBuf.virt.extP[0], 3298 (unsigned char *)pictureBuf.virt.extP[1], 3299 (unsigned char *)pictureBuf.virt.extP[2], 3300 0); 3301 3302 if (csc_convert(m_exynosPictureCSC) != 0) 3303 ALOGE("ERR(%s):csc_convert() fail", __func__); 3304 } else { 3305 ALOGE("ERR(%s):m_exynosPictureCSC == NULL", __func__); 3306 } 3307 3308 if (m_msgEnabled & CAMERA_MSG_SHUTTER) 3309 m_notifyCb(CAMERA_MSG_SHUTTER, 0, 0, m_callbackCookie); 3310 3311 m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_orgPictureRect.w, m_orgPictureRect.h, &pictureBuf); 3312 3313 for (int i = 1; i < 3; i++) { 3314 if (pictureBuf.size.extS[i] != 0) 3315 pictureBuf.virt.extP[i] = pictureBuf.virt.extP[i-1] + pictureBuf.size.extS[i-1]; 3316 3317 ALOGV("(%s): pictureBuf.size.extS[%d] = %d", __func__, i, pictureBuf.size.extS[i]); 3318 } 3319 3320 if (m_msgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) { 3321 jpegBuf.virt.p = (char *)JpegHeap->data; 3322 jpegBuf.size.s = pictureFramesize; 3323 3324 ExynosRect jpegRect; 3325 jpegRect.w = m_orgPictureRect.w; 3326 jpegRect.h = m_orgPictureRect.h; 3327 jpegRect.colorFormat = V4L2_PIX_FMT_NV16; 3328 3329 if (m_secCamera->yuv2Jpeg(&pictureBuf, &jpegBuf, &jpegRect) == false) { 3330 ALOGE("ERR(%s):yuv2Jpeg() fail", __func__); 3331 m_stateLock.lock(); 3332 m_captureInProgress = false; 3333 m_pictureLock.lock(); 3334 m_pictureCondition.signal(); 3335 m_pictureLock.unlock(); 3336 m_stateLock.unlock(); 3337 goto out; 3338 } 3339 } 3340 3341 m_stateLock.lock(); 3342 m_captureInProgress = false; 3343 m_pictureLock.lock(); 3344 m_pictureCondition.signal(); 3345 m_pictureLock.unlock(); 3346 m_stateLock.unlock(); 3347 3348 if (m_msgEnabled & CAMERA_MSG_RAW_IMAGE) 3349 m_dataCb(CAMERA_MSG_RAW_IMAGE, m_rawHeap, 0, NULL, m_callbackCookie); 3350 3351 /* TODO: Currently framework dose not support CAMERA_MSG_RAW_IMAGE_NOTIFY callback */ 3352 /* 3353 if (m_msgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY) 3354 m_dataCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, m_rawHeap, 0, NULL, m_callbackCookie); 3355 */ 3356 3357 if (m_msgEnabled & CAMERA_MSG_POSTVIEW_FRAME) 3358 m_dataCb(CAMERA_MSG_POSTVIEW_FRAME, m_rawHeap, 0, NULL, m_callbackCookie); 3359 3360 if (m_msgEnabled & CAMERA_MSG_COMPRESSED_IMAGE) { 3361 JpegHeapOut = m_getMemoryCb(-1, jpegBuf.size.s, 1, 0); 3362 if (!JpegHeapOut) { 3363 ALOGE("ERR(%s):m_getMemoryCb(JpegHeapOut, size(%d) fail", __func__, jpegBuf.size.s); 3364 return false; 3365 } 3366 3367 // TODO : we shall pass JpegHeap mem directly? 3368 memcpy(JpegHeapOut->data, JpegHeap->data, jpegBuf.size.s); 3369 3370 m_dataCb(CAMERA_MSG_COMPRESSED_IMAGE, JpegHeapOut, 0, NULL, m_callbackCookie); 3371 } 3372 3373 if (m_videoStart == false) 3374 stopPreview(); 3375 3376 ALOGV("DEBUG(%s):m_pictureThread end", __func__); 3377 3378 ret = true; 3379 3380out: 3381 3382 if (JpegHeapOut) { 3383 JpegHeapOut->release(JpegHeapOut); 3384 JpegHeapOut = 0; 3385 } 3386 3387 if (JpegHeap) { 3388 JpegHeap->release(JpegHeap); 3389 JpegHeap = 0; 3390 } 3391 3392 return ret; 3393} 3394 3395#ifdef LOG_NDEBUG 3396bool ExynosCameraHWInterface::m_fileDump(char *filename, void *srcBuf, uint32_t size) 3397{ 3398 FILE *yuv_fd = NULL; 3399 char *buffer = NULL; 3400 static int count = 0; 3401 3402 yuv_fd = fopen(filename, "w+"); 3403 3404 if (yuv_fd == NULL) { 3405 ALOGE("ERR file open fail: %s", filename); 3406 return 0; 3407 } 3408 3409 buffer = (char *)malloc(size); 3410 3411 if (buffer == NULL) { 3412 ALOGE("ERR malloc file"); 3413 fclose(yuv_fd); 3414 return 0; 3415 } 3416 3417 memcpy(buffer, srcBuf, size); 3418 3419 fflush(stdout); 3420 3421 fwrite(buffer, 1, size, yuv_fd); 3422 3423 fflush(yuv_fd); 3424 3425 if (yuv_fd) 3426 fclose(yuv_fd); 3427 if (buffer) 3428 free(buffer); 3429 3430 ALOGV("filedump(%s) is successed!!", filename); 3431 return true; 3432} 3433#endif 3434 3435void ExynosCameraHWInterface::m_setSkipFrame(int frame) 3436{ 3437 Mutex::Autolock lock(m_skipFrameLock); 3438 if (frame < m_skipFrame) 3439 return; 3440 3441 m_skipFrame = frame; 3442} 3443 3444int ExynosCameraHWInterface::m_saveJpeg( unsigned char *real_jpeg, int jpeg_size) 3445{ 3446 FILE *yuv_fp = NULL; 3447 char filename[100], *buffer = NULL; 3448 3449 /* file create/open, note to "wb" */ 3450 yuv_fp = fopen("/data/camera_dump.jpeg", "wb"); 3451 if (yuv_fp == NULL) { 3452 ALOGE("Save jpeg file open error"); 3453 return -1; 3454 } 3455 3456 ALOGV("DEBUG(%s):[BestIQ] real_jpeg size ========> %d", __func__, jpeg_size); 3457 buffer = (char *) malloc(jpeg_size); 3458 if (buffer == NULL) { 3459 ALOGE("Save YUV] buffer alloc failed"); 3460 if (yuv_fp) 3461 fclose(yuv_fp); 3462 3463 return -1; 3464 } 3465 3466 memcpy(buffer, real_jpeg, jpeg_size); 3467 3468 fflush(stdout); 3469 3470 fwrite(buffer, 1, jpeg_size, yuv_fp); 3471 3472 fflush(yuv_fp); 3473 3474 if (yuv_fp) 3475 fclose(yuv_fp); 3476 if (buffer) 3477 free(buffer); 3478 3479 return 0; 3480} 3481 3482void ExynosCameraHWInterface::m_savePostView(const char *fname, uint8_t *buf, uint32_t size) 3483{ 3484 int nw; 3485 int cnt = 0; 3486 uint32_t written = 0; 3487 3488 ALOGD("opening file [%s]", fname); 3489 int fd = open(fname, O_RDWR | O_CREAT, S_IRUSR | S_IWUSR); 3490 if (fd < 0) { 3491 ALOGE("failed to create file [%s]: %s", fname, strerror(errno)); 3492 return; 3493 } 3494 3495 ALOGD("writing %d bytes to file [%s]", size, fname); 3496 while (written < size) { 3497 nw = ::write(fd, buf + written, size - written); 3498 if (nw < 0) { 3499 ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno)); 3500 break; 3501 } 3502 written += nw; 3503 cnt++; 3504 } 3505 ALOGD("done writing %d bytes to file [%s] in %d passes",size, fname, cnt); 3506 ::close(fd); 3507} 3508 3509bool ExynosCameraHWInterface::m_scaleDownYuv422(char *srcBuf, uint32_t srcWidth, uint32_t srcHeight, 3510 char *dstBuf, uint32_t dstWidth, uint32_t dstHeight) 3511{ 3512 int32_t step_x, step_y; 3513 int32_t iXsrc, iXdst; 3514 int32_t x, y, src_y_start_pos, dst_pos, src_pos; 3515 3516 if (dstWidth % 2 != 0 || dstHeight % 2 != 0) { 3517 ALOGE("scale_down_yuv422: invalid width, height for scaling"); 3518 return false; 3519 } 3520 3521 step_x = srcWidth / dstWidth; 3522 step_y = srcHeight / dstHeight; 3523 3524 dst_pos = 0; 3525 for (uint32_t y = 0; y < dstHeight; y++) { 3526 src_y_start_pos = (y * step_y * (srcWidth * 2)); 3527 3528 for (uint32_t x = 0; x < dstWidth; x += 2) { 3529 src_pos = src_y_start_pos + (x * (step_x * 2)); 3530 3531 dstBuf[dst_pos++] = srcBuf[src_pos ]; 3532 dstBuf[dst_pos++] = srcBuf[src_pos + 1]; 3533 dstBuf[dst_pos++] = srcBuf[src_pos + 2]; 3534 dstBuf[dst_pos++] = srcBuf[src_pos + 3]; 3535 } 3536 } 3537 3538 return true; 3539} 3540 3541bool ExynosCameraHWInterface::m_YUY2toNV21(void *srcBuf, void *dstBuf, uint32_t srcWidth, uint32_t srcHeight) 3542{ 3543 int32_t x, y, src_y_start_pos, dst_cbcr_pos, dst_pos, src_pos; 3544 unsigned char *srcBufPointer = (unsigned char *)srcBuf; 3545 unsigned char *dstBufPointer = (unsigned char *)dstBuf; 3546 3547 dst_pos = 0; 3548 dst_cbcr_pos = srcWidth*srcHeight; 3549 for (uint32_t y = 0; y < srcHeight; y++) { 3550 src_y_start_pos = (y * (srcWidth * 2)); 3551 3552 for (uint32_t x = 0; x < (srcWidth * 2); x += 2) { 3553 src_pos = src_y_start_pos + x; 3554 3555 dstBufPointer[dst_pos++] = srcBufPointer[src_pos]; 3556 } 3557 } 3558 for (uint32_t y = 0; y < srcHeight; y += 2) { 3559 src_y_start_pos = (y * (srcWidth * 2)); 3560 3561 for (uint32_t x = 0; x < (srcWidth * 2); x += 4) { 3562 src_pos = src_y_start_pos + x; 3563 3564 dstBufPointer[dst_cbcr_pos++] = srcBufPointer[src_pos + 3]; 3565 dstBufPointer[dst_cbcr_pos++] = srcBufPointer[src_pos + 1]; 3566 } 3567 } 3568 3569 return true; 3570} 3571 3572bool ExynosCameraHWInterface::m_checkVideoStartMarker(unsigned char *pBuf) 3573{ 3574 if (!pBuf) { 3575 ALOGE("m_checkVideoStartMarker() => pBuf is NULL"); 3576 return false; 3577 } 3578 3579 if (HIBYTE(VIDEO_COMMENT_MARKER_H) == * pBuf && LOBYTE(VIDEO_COMMENT_MARKER_H) == *(pBuf + 1) && 3580 HIBYTE(VIDEO_COMMENT_MARKER_L) == *(pBuf + 2) && LOBYTE(VIDEO_COMMENT_MARKER_L) == *(pBuf + 3)) 3581 return true; 3582 3583 return false; 3584} 3585 3586bool ExynosCameraHWInterface::m_checkEOIMarker(unsigned char *pBuf) 3587{ 3588 if (!pBuf) { 3589 ALOGE("m_checkEOIMarker() => pBuf is NULL"); 3590 return false; 3591 } 3592 3593 // EOI marker [FF D9] 3594 if (HIBYTE(JPEG_EOI_MARKER) == *pBuf && LOBYTE(JPEG_EOI_MARKER) == *(pBuf + 1)) 3595 return true; 3596 3597 return false; 3598} 3599 3600bool ExynosCameraHWInterface::m_findEOIMarkerInJPEG(unsigned char *pBuf, int dwBufSize, int *pnJPEGsize) 3601{ 3602 if (NULL == pBuf || 0 >= dwBufSize) { 3603 ALOGE("m_findEOIMarkerInJPEG() => There is no contents."); 3604 return false; 3605 } 3606 3607 unsigned char *pBufEnd = pBuf + dwBufSize; 3608 3609 while (pBuf < pBufEnd) { 3610 if (m_checkEOIMarker(pBuf++)) 3611 return true; 3612 3613 (*pnJPEGsize)++; 3614 } 3615 3616 return false; 3617} 3618 3619bool ExynosCameraHWInterface::m_splitFrame(unsigned char *pFrame, int dwSize, 3620 int dwJPEGLineLength, int dwVideoLineLength, int dwVideoHeight, 3621 void *pJPEG, int *pdwJPEGSize, 3622 void *pVideo, int *pdwVideoSize) 3623{ 3624 ALOGV("DEBUG(%s):===========m_splitFrame Start==============", __func__); 3625 3626 if (NULL == pFrame || 0 >= dwSize) { 3627 ALOGE("There is no contents (pFrame=%p, dwSize=%d", pFrame, dwSize); 3628 return false; 3629 } 3630 3631 if (0 == dwJPEGLineLength || 0 == dwVideoLineLength) { 3632 ALOGE("There in no input information for decoding interleaved jpeg"); 3633 return false; 3634 } 3635 3636 unsigned char *pSrc = pFrame; 3637 unsigned char *pSrcEnd = pFrame + dwSize; 3638 3639 unsigned char *pJ = (unsigned char *)pJPEG; 3640 int dwJSize = 0; 3641 unsigned char *pV = (unsigned char *)pVideo; 3642 int dwVSize = 0; 3643 3644 bool bRet = false; 3645 bool isFinishJpeg = false; 3646 3647 while (pSrc < pSrcEnd) { 3648 // Check video start marker 3649 if (m_checkVideoStartMarker(pSrc)) { 3650 int copyLength; 3651 3652 if (pSrc + dwVideoLineLength <= pSrcEnd) 3653 copyLength = dwVideoLineLength; 3654 else 3655 copyLength = pSrcEnd - pSrc - VIDEO_COMMENT_MARKER_LENGTH; 3656 3657 // Copy video data 3658 if (pV) { 3659 memcpy(pV, pSrc + VIDEO_COMMENT_MARKER_LENGTH, copyLength); 3660 pV += copyLength; 3661 dwVSize += copyLength; 3662 } 3663 3664 pSrc += copyLength + VIDEO_COMMENT_MARKER_LENGTH; 3665 } else { 3666 // Copy pure JPEG data 3667 int size = 0; 3668 int dwCopyBufLen = dwJPEGLineLength <= pSrcEnd-pSrc ? dwJPEGLineLength : pSrcEnd - pSrc; 3669 3670 if (m_findEOIMarkerInJPEG((unsigned char *)pSrc, dwCopyBufLen, &size)) { 3671 isFinishJpeg = true; 3672 size += 2; // to count EOF marker size 3673 } else { 3674 if ((dwCopyBufLen == 1) && (pJPEG < pJ)) { 3675 unsigned char checkBuf[2] = { *(pJ - 1), *pSrc }; 3676 3677 if (m_checkEOIMarker(checkBuf)) 3678 isFinishJpeg = true; 3679 } 3680 size = dwCopyBufLen; 3681 } 3682 3683 memcpy(pJ, pSrc, size); 3684 3685 dwJSize += size; 3686 3687 pJ += dwCopyBufLen; 3688 pSrc += dwCopyBufLen; 3689 } 3690 if (isFinishJpeg) 3691 break; 3692 } 3693 3694 if (isFinishJpeg) { 3695 bRet = true; 3696 if (pdwJPEGSize) 3697 *pdwJPEGSize = dwJSize; 3698 if (pdwVideoSize) 3699 *pdwVideoSize = dwVSize; 3700 } else { 3701 ALOGE("DecodeInterleaveJPEG_WithOutDT() => Can not find EOI"); 3702 bRet = false; 3703 if (pdwJPEGSize) 3704 *pdwJPEGSize = 0; 3705 if (pdwVideoSize) 3706 *pdwVideoSize = 0; 3707 } 3708 ALOGV("DEBUG(%s):===========m_splitFrame end==============", __func__); 3709 3710 return bRet; 3711} 3712 3713int ExynosCameraHWInterface::m_decodeInterleaveData(unsigned char *pInterleaveData, 3714 int interleaveDataSize, 3715 int yuvWidth, 3716 int yuvHeight, 3717 int *pJpegSize, 3718 void *pJpegData, 3719 void *pYuvData) 3720{ 3721 if (pInterleaveData == NULL) 3722 return false; 3723 3724 bool ret = true; 3725 unsigned int *interleave_ptr = (unsigned int *)pInterleaveData; 3726 unsigned char *jpeg_ptr = (unsigned char *)pJpegData; 3727 unsigned char *yuv_ptr = (unsigned char *)pYuvData; 3728 unsigned char *p; 3729 int jpeg_size = 0; 3730 int yuv_size = 0; 3731 3732 int i = 0; 3733 3734 ALOGV("DEBUG(%s):m_decodeInterleaveData Start~~~", __func__); 3735 while (i < interleaveDataSize) { 3736 if ((*interleave_ptr == 0xFFFFFFFF) || (*interleave_ptr == 0x02FFFFFF) || 3737 (*interleave_ptr == 0xFF02FFFF)) { 3738 // Padding Data 3739 interleave_ptr++; 3740 i += 4; 3741 } else if ((*interleave_ptr & 0xFFFF) == 0x05FF) { 3742 // Start-code of YUV Data 3743 p = (unsigned char *)interleave_ptr; 3744 p += 2; 3745 i += 2; 3746 3747 // Extract YUV Data 3748 if (pYuvData != NULL) { 3749 memcpy(yuv_ptr, p, yuvWidth * 2); 3750 yuv_ptr += yuvWidth * 2; 3751 yuv_size += yuvWidth * 2; 3752 } 3753 p += yuvWidth * 2; 3754 i += yuvWidth * 2; 3755 3756 // Check End-code of YUV Data 3757 if ((*p == 0xFF) && (*(p + 1) == 0x06)) { 3758 interleave_ptr = (unsigned int *)(p + 2); 3759 i += 2; 3760 } else { 3761 ret = false; 3762 break; 3763 } 3764 } else { 3765 // Extract JPEG Data 3766 if (pJpegData != NULL) { 3767 memcpy(jpeg_ptr, interleave_ptr, 4); 3768 jpeg_ptr += 4; 3769 jpeg_size += 4; 3770 } 3771 interleave_ptr++; 3772 i += 4; 3773 } 3774 } 3775 if (ret) { 3776 if (pJpegData != NULL) { 3777 // Remove Padding after EOI 3778 for (i = 0; i < 3; i++) { 3779 if (*(--jpeg_ptr) != 0xFF) { 3780 break; 3781 } 3782 jpeg_size--; 3783 } 3784 *pJpegSize = jpeg_size; 3785 3786 } 3787 // Check YUV Data Size 3788 if (pYuvData != NULL) { 3789 if (yuv_size != (yuvWidth * yuvHeight * 2)) { 3790 ret = false; 3791 } 3792 } 3793 } 3794 ALOGV("DEBUG(%s):m_decodeInterleaveData End~~~", __func__); 3795 return ret; 3796} 3797 3798bool ExynosCameraHWInterface::m_isSupportedPreviewSize(const int width, 3799 const int height) const 3800{ 3801 unsigned int i; 3802 3803 for (i = 0; i < m_supportedPreviewSizes.size(); i++) { 3804 if (m_supportedPreviewSizes[i].width == width && 3805 m_supportedPreviewSizes[i].height == height) 3806 return true; 3807 } 3808 3809 return false; 3810} 3811 3812void ExynosCameraHWInterface::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf) 3813{ 3814 switch (colorFormat) { 3815 // 1p 3816 case V4L2_PIX_FMT_RGB565 : 3817 case V4L2_PIX_FMT_YUYV : 3818 case V4L2_PIX_FMT_UYVY : 3819 case V4L2_PIX_FMT_VYUY : 3820 case V4L2_PIX_FMT_YVYU : 3821 buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h); 3822 buf->size.extS[1] = 0; 3823 buf->size.extS[2] = 0; 3824 break; 3825 // 2p 3826 case V4L2_PIX_FMT_NV12 : 3827 case V4L2_PIX_FMT_NV12T : 3828 case V4L2_PIX_FMT_NV21 : 3829 buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); 3830 buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16); 3831 buf->size.extS[2] = 0; 3832 break; 3833 case V4L2_PIX_FMT_NV12M : 3834 case V4L2_PIX_FMT_NV12MT_16X16 : 3835 buf->size.extS[0] = ALIGN(ALIGN(w, 16) * ALIGN(h, 16), 2048); 3836 buf->size.extS[1] = ALIGN(ALIGN(w, 16) * ALIGN(h >> 1, 8), 2048); 3837 buf->size.extS[2] = 0; 3838 break; 3839 case V4L2_PIX_FMT_NV16 : 3840 case V4L2_PIX_FMT_NV61 : 3841 buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); 3842 buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h, 16); 3843 buf->size.extS[2] = 0; 3844 break; 3845 // 3p 3846 case V4L2_PIX_FMT_YUV420 : 3847 case V4L2_PIX_FMT_YVU420 : 3848 buf->size.extS[0] = (w * h); 3849 buf->size.extS[1] = (w * h) >> 2; 3850 buf->size.extS[2] = (w * h) >> 2; 3851 break; 3852 case V4L2_PIX_FMT_YUV420M: 3853 case V4L2_PIX_FMT_YVU420M : 3854 case V4L2_PIX_FMT_YUV422P : 3855 buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); 3856 buf->size.extS[1] = ALIGN(w/2, 8) * ALIGN(h/2, 8); 3857 buf->size.extS[2] = ALIGN(w/2, 8) * ALIGN(h/2, 8); 3858 break; 3859 default: 3860 ALOGE("ERR(%s):unmatched colorFormat(%d)", __func__, colorFormat); 3861 return; 3862 break; 3863 } 3864} 3865 3866bool ExynosCameraHWInterface::m_getResolutionList(String8 & string8Buf, char * strBuf, int w, int h) 3867{ 3868 bool ret = false; 3869 bool flagFirst = true; 3870 3871 // this is up to /packages/apps/Camera/res/values/arrays.xml 3872 int RESOLUTION_LIST[][2] = 3873 { 3874 { 3264, 2448}, 3875 { 2592, 1936}, 3876 { 2576, 1948}, 3877 { 2560, 1920}, 3878 { 2048, 1536}, 3879 { 1920, 1080}, 3880 { 1600, 1200}, 3881 { 1280, 720}, 3882 { 1024, 768}, 3883 { 800, 600}, 3884 { 800, 480}, 3885 { 720, 480}, 3886 { 640, 480}, 3887 { 528, 432}, 3888 { 480, 320}, 3889 { 352, 288}, 3890 { 320, 240}, 3891 { 176, 144} 3892 }; 3893 3894 int sizeOfResSize = sizeof(RESOLUTION_LIST) / (sizeof(int) * 2); 3895 3896 for (int i = 0; i < sizeOfResSize; i++) { 3897 if ( RESOLUTION_LIST[i][0] <= w 3898 && RESOLUTION_LIST[i][1] <= h) { 3899 if (flagFirst == true) 3900 flagFirst = false; 3901 else 3902 string8Buf.append(","); 3903 3904 sprintf(strBuf, "%dx%d", RESOLUTION_LIST[i][0], RESOLUTION_LIST[i][1]); 3905 string8Buf.append(strBuf); 3906 3907 ret = true; 3908 } 3909 } 3910 3911 if (ret == false) 3912 ALOGE("ERR(%s):cannot find resolutions", __func__); 3913 3914 return ret; 3915} 3916 3917bool ExynosCameraHWInterface::m_getZoomRatioList(String8 & string8Buf, char * strBuf, int maxZoom, int start, int end) 3918{ 3919 bool flagFirst = true; 3920 3921 int cur = start; 3922 int step = (end - start) / maxZoom; 3923 3924 for (int i = 0; i < maxZoom; i++) { 3925 sprintf(strBuf, "%d", cur); 3926 string8Buf.append(strBuf); 3927 string8Buf.append(","); 3928 cur += step; 3929 } 3930 3931 sprintf(strBuf, "%d", end); 3932 string8Buf.append(strBuf); 3933 3934 // ex : "100,130,160,190,220,250,280,310,340,360,400" 3935 3936 return true; 3937} 3938 3939int ExynosCameraHWInterface::m_bracketsStr2Ints(char *str, int num, ExynosRect2 *rect2s, int *weights) 3940{ 3941 char *curStr = str; 3942 char buf[128]; 3943 char *bracketsOpen; 3944 char *bracketsClose; 3945 3946 int tempArray[5]; 3947 int validFocusedAreas = 0; 3948 3949 for (int i = 0; i < num; i++) { 3950 if (curStr == NULL) 3951 break; 3952 3953 bracketsOpen = strchr(curStr, '('); 3954 if (bracketsOpen == NULL) 3955 break; 3956 3957 bracketsClose = strchr(bracketsOpen, ')'); 3958 if (bracketsClose == NULL) 3959 break; 3960 3961 strncpy(buf, bracketsOpen, bracketsClose - bracketsOpen + 1); 3962 buf[bracketsClose - bracketsOpen + 1] = 0; 3963 3964 if (m_subBracketsStr2Ints(5, buf, tempArray) == false) { 3965 ALOGE("ERR(%s):m_subBracketsStr2Ints(%s) fail", __func__, buf); 3966 break; 3967 } 3968 3969 rect2s[i].x1 = tempArray[0]; 3970 rect2s[i].y1 = tempArray[1]; 3971 rect2s[i].x2 = tempArray[2]; 3972 rect2s[i].y2 = tempArray[3]; 3973 weights[i] = tempArray[4]; 3974 3975 validFocusedAreas++; 3976 3977 curStr = bracketsClose; 3978 } 3979 return validFocusedAreas; 3980} 3981 3982bool ExynosCameraHWInterface::m_subBracketsStr2Ints(int num, char *str, int *arr) 3983{ 3984 if (str == NULL || arr == NULL) { 3985 ALOGE("ERR(%s):str or arr is NULL", __func__); 3986 return false; 3987 } 3988 3989 // ex : (-10,-10,0,0,300) 3990 char buf[128]; 3991 char *bracketsOpen; 3992 char *bracketsClose; 3993 char *tok; 3994 3995 bracketsOpen = strchr(str, '('); 3996 if (bracketsOpen == NULL) { 3997 ALOGE("ERR(%s):no '('", __func__); 3998 return false; 3999 } 4000 4001 bracketsClose = strchr(bracketsOpen, ')'); 4002 if (bracketsClose == NULL) { 4003 ALOGE("ERR(%s):no ')'", __func__); 4004 return false; 4005 } 4006 4007 strncpy(buf, bracketsOpen + 1, bracketsClose - bracketsOpen + 1); 4008 buf[bracketsClose - bracketsOpen + 1] = 0; 4009 4010 tok = strtok(buf, ","); 4011 if (tok == NULL) { 4012 ALOGE("ERR(%s):strtok(%s) fail", __func__, buf); 4013 return false; 4014 } 4015 4016 arr[0] = atoi(tok); 4017 4018 for (int i = 1; i < num; i++) { 4019 tok = strtok(NULL, ","); 4020 if (tok == NULL) { 4021 if (i < num - 1) { 4022 ALOGE("ERR(%s):strtok() (index : %d, num : %d) fail", __func__, i, num); 4023 return false; 4024 } 4025 break; 4026 } 4027 4028 arr[i] = atoi(tok); 4029 } 4030 4031 return true; 4032} 4033 4034bool ExynosCameraHWInterface::m_getRatioSize(int src_w, int src_h, 4035 int dst_w, int dst_h, 4036 int *crop_x, int *crop_y, 4037 int *crop_w, int *crop_h, 4038 int zoom) 4039{ 4040 *crop_w = src_w; 4041 *crop_h = src_h; 4042 4043 if ( src_w != dst_w 4044 || src_h != dst_h) { 4045 float src_ratio = 1.0f; 4046 float dst_ratio = 1.0f; 4047 4048 // ex : 1024 / 768 4049 src_ratio = (float)src_w / (float)src_h; 4050 4051 // ex : 352 / 288 4052 dst_ratio = (float)dst_w / (float)dst_h; 4053 4054 if (src_ratio != dst_ratio) { 4055 if (dst_w * dst_h < src_w * src_h) { 4056 if (src_ratio <= dst_ratio) { 4057 // shrink h 4058 *crop_w = src_w; 4059 *crop_h = src_w / dst_ratio; 4060 } else { 4061 // shrink w 4062 *crop_w = dst_h * dst_ratio; 4063 *crop_h = dst_h; 4064 } 4065 } else { 4066 if (src_ratio <= dst_ratio) { 4067 // shrink h 4068 *crop_w = src_w; 4069 *crop_h = src_w / dst_ratio; 4070 } else { 4071 // shrink w 4072 *crop_w = src_h * dst_ratio; 4073 *crop_h = src_h; 4074 } 4075 } 4076 4077 if (zoom != 0) { 4078 int zoomLevel = ((float)zoom + 10.0) / 10.0; 4079 *crop_w = (int)((float)*crop_w / zoomLevel); 4080 *crop_h = (int)((float)*crop_h / zoomLevel); 4081 } 4082 } 4083 } 4084 4085 #define CAMERA_CROP_WIDTH_RESTRAIN_NUM (0x2) 4086 unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1)); 4087 if (w_align != 0) { 4088 if ( (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align 4089 && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) { 4090 *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align); 4091 } 4092 else 4093 *crop_w -= w_align; 4094 } 4095 4096 #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM (0x2) 4097 unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1)); 4098 if (h_align != 0) { 4099 if ( (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align 4100 && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) { 4101 *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align); 4102 } 4103 else 4104 *crop_h -= h_align; 4105 } 4106 4107 *crop_x = (src_w - *crop_w) >> 1; 4108 *crop_y = (src_h - *crop_h) >> 1; 4109 4110 if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1)) 4111 *crop_x -= 1; 4112 4113 if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1)) 4114 *crop_y -= 1; 4115 4116 return true; 4117} 4118 4119int ExynosCameraHWInterface::m_calibratePosition(int w, int new_w, int pos) 4120{ 4121 return (float)(pos * new_w) / (float)w; 4122} 4123 4124static CameraInfo sCameraInfo[] = { 4125 { 4126 CAMERA_FACING_BACK, 4127 0, /* orientation */ 4128 }, 4129 { 4130 CAMERA_FACING_FRONT, 4131 0, /* orientation */ 4132 } 4133}; 4134 4135/** Close this device */ 4136 4137static camera_device_t *g_cam_device; 4138 4139static int HAL_camera_device_close(struct hw_device_t* device) 4140{ 4141 ALOGV("DEBUG(%s):", __func__); 4142 if (device) { 4143 camera_device_t *cam_device = (camera_device_t *)device; 4144 delete static_cast<ExynosCameraHWInterface *>(cam_device->priv); 4145 free(cam_device); 4146 g_cam_device = 0; 4147 } 4148 return 0; 4149} 4150 4151static inline ExynosCameraHWInterface *obj(struct camera_device *dev) 4152{ 4153 return reinterpret_cast<ExynosCameraHWInterface *>(dev->priv); 4154} 4155 4156/** Set the preview_stream_ops to which preview frames are sent */ 4157static int HAL_camera_device_set_preview_window(struct camera_device *dev, 4158 struct preview_stream_ops *buf) 4159{ 4160 ALOGV("DEBUG(%s):", __func__); 4161 return obj(dev)->setPreviewWindow(buf); 4162} 4163 4164/** Set the notification and data callbacks */ 4165static void HAL_camera_device_set_callbacks(struct camera_device *dev, 4166 camera_notify_callback notify_cb, 4167 camera_data_callback data_cb, 4168 camera_data_timestamp_callback data_cb_timestamp, 4169 camera_request_memory get_memory, 4170 void* user) 4171{ 4172 ALOGV("DEBUG(%s):", __func__); 4173 obj(dev)->setCallbacks(notify_cb, data_cb, data_cb_timestamp, 4174 get_memory, 4175 user); 4176} 4177 4178/** 4179 * The following three functions all take a msg_type, which is a bitmask of 4180 * the messages defined in include/ui/Camera.h 4181 */ 4182 4183/** 4184 * Enable a message, or set of messages. 4185 */ 4186static void HAL_camera_device_enable_msg_type(struct camera_device *dev, int32_t msg_type) 4187{ 4188 ALOGV("DEBUG(%s):", __func__); 4189 obj(dev)->enableMsgType(msg_type); 4190} 4191 4192/** 4193 * Disable a message, or a set of messages. 4194 * 4195 * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera 4196 * HAL should not rely on its client to call releaseRecordingFrame() to 4197 * release video recording frames sent out by the cameral HAL before and 4198 * after the disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera HAL 4199 * clients must not modify/access any video recording frame after calling 4200 * disableMsgType(CAMERA_MSG_VIDEO_FRAME). 4201 */ 4202static void HAL_camera_device_disable_msg_type(struct camera_device *dev, int32_t msg_type) 4203{ 4204 ALOGV("DEBUG(%s):", __func__); 4205 obj(dev)->disableMsgType(msg_type); 4206} 4207 4208/** 4209 * Query whether a message, or a set of messages, is enabled. Note that 4210 * this is operates as an AND, if any of the messages queried are off, this 4211 * will return false. 4212 */ 4213static int HAL_camera_device_msg_type_enabled(struct camera_device *dev, int32_t msg_type) 4214{ 4215 ALOGV("DEBUG(%s):", __func__); 4216 return obj(dev)->msgTypeEnabled(msg_type); 4217} 4218 4219/** 4220 * Start preview mode. 4221 */ 4222static int HAL_camera_device_start_preview(struct camera_device *dev) 4223{ 4224 ALOGV("DEBUG(%s):", __func__); 4225 return obj(dev)->startPreview(); 4226} 4227 4228/** 4229 * Stop a previously started preview. 4230 */ 4231static void HAL_camera_device_stop_preview(struct camera_device *dev) 4232{ 4233 ALOGV("DEBUG(%s):", __func__); 4234 obj(dev)->stopPreview(); 4235} 4236 4237/** 4238 * Returns true if preview is enabled. 4239 */ 4240static int HAL_camera_device_preview_enabled(struct camera_device *dev) 4241{ 4242 ALOGV("DEBUG(%s):", __func__); 4243 return obj(dev)->previewEnabled(); 4244} 4245 4246/** 4247 * Request the camera HAL to store meta data or real YUV data in the video 4248 * buffers sent out via CAMERA_MSG_VIDEO_FRAME for a recording session. If 4249 * it is not called, the default camera HAL behavior is to store real YUV 4250 * data in the video buffers. 4251 * 4252 * This method should be called before startRecording() in order to be 4253 * effective. 4254 * 4255 * If meta data is stored in the video buffers, it is up to the receiver of 4256 * the video buffers to interpret the contents and to find the actual frame 4257 * data with the help of the meta data in the buffer. How this is done is 4258 * outside of the scope of this method. 4259 * 4260 * Some camera HALs may not support storing meta data in the video buffers, 4261 * but all camera HALs should support storing real YUV data in the video 4262 * buffers. If the camera HAL does not support storing the meta data in the 4263 * video buffers when it is requested to do do, INVALID_OPERATION must be 4264 * returned. It is very useful for the camera HAL to pass meta data rather 4265 * than the actual frame data directly to the video encoder, since the 4266 * amount of the uncompressed frame data can be very large if video size is 4267 * large. 4268 * 4269 * @param enable if true to instruct the camera HAL to store 4270 * meta data in the video buffers; false to instruct 4271 * the camera HAL to store real YUV data in the video 4272 * buffers. 4273 * 4274 * @return OK on success. 4275 */ 4276static int HAL_camera_device_store_meta_data_in_buffers(struct camera_device *dev, int enable) 4277{ 4278 ALOGV("DEBUG(%s):", __func__); 4279 return obj(dev)->storeMetaDataInBuffers(enable); 4280} 4281 4282/** 4283 * Start record mode. When a record image is available, a 4284 * CAMERA_MSG_VIDEO_FRAME message is sent with the corresponding 4285 * frame. Every record frame must be released by a camera HAL client via 4286 * releaseRecordingFrame() before the client calls 4287 * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls 4288 * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's 4289 * responsibility to manage the life-cycle of the video recording frames, 4290 * and the client must not modify/access any video recording frames. 4291 */ 4292static int HAL_camera_device_start_recording(struct camera_device *dev) 4293{ 4294 ALOGV("DEBUG(%s):", __func__); 4295 return obj(dev)->startRecording(); 4296} 4297 4298/** 4299 * Stop a previously started recording. 4300 */ 4301static void HAL_camera_device_stop_recording(struct camera_device *dev) 4302{ 4303 ALOGV("DEBUG(%s):", __func__); 4304 obj(dev)->stopRecording(); 4305} 4306 4307/** 4308 * Returns true if recording is enabled. 4309 */ 4310static int HAL_camera_device_recording_enabled(struct camera_device *dev) 4311{ 4312 ALOGV("DEBUG(%s):", __func__); 4313 return obj(dev)->recordingEnabled(); 4314} 4315 4316/** 4317 * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME. 4318 * 4319 * It is camera HAL client's responsibility to release video recording 4320 * frames sent out by the camera HAL before the camera HAL receives a call 4321 * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to 4322 * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's 4323 * responsibility to manage the life-cycle of the video recording frames. 4324 */ 4325static void HAL_camera_device_release_recording_frame(struct camera_device *dev, 4326 const void *opaque) 4327{ 4328 ALOGV("DEBUG(%s):", __func__); 4329 obj(dev)->releaseRecordingFrame(opaque); 4330} 4331 4332/** 4333 * Start auto focus, the notification callback routine is called with 4334 * CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() will be 4335 * called again if another auto focus is needed. 4336 */ 4337static int HAL_camera_device_auto_focus(struct camera_device *dev) 4338{ 4339 ALOGV("DEBUG(%s):", __func__); 4340 return obj(dev)->autoFocus(); 4341} 4342 4343/** 4344 * Cancels auto-focus function. If the auto-focus is still in progress, 4345 * this function will cancel it. Whether the auto-focus is in progress or 4346 * not, this function will return the focus position to the default. If 4347 * the camera does not support auto-focus, this is a no-op. 4348 */ 4349static int HAL_camera_device_cancel_auto_focus(struct camera_device *dev) 4350{ 4351 ALOGV("DEBUG(%s):", __func__); 4352 return obj(dev)->cancelAutoFocus(); 4353} 4354 4355/** 4356 * Take a picture. 4357 */ 4358static int HAL_camera_device_take_picture(struct camera_device *dev) 4359{ 4360 ALOGV("DEBUG(%s):", __func__); 4361 return obj(dev)->takePicture(); 4362} 4363 4364/** 4365 * Cancel a picture that was started with takePicture. Calling this method 4366 * when no picture is being taken is a no-op. 4367 */ 4368static int HAL_camera_device_cancel_picture(struct camera_device *dev) 4369{ 4370 ALOGV("DEBUG(%s):", __func__); 4371 return obj(dev)->cancelPicture(); 4372} 4373 4374/** 4375 * Set the camera parameters. This returns BAD_VALUE if any parameter is 4376 * invalid or not supported. 4377 */ 4378static int HAL_camera_device_set_parameters(struct camera_device *dev, 4379 const char *parms) 4380{ 4381 ALOGV("DEBUG(%s):", __func__); 4382 String8 str(parms); 4383 CameraParameters p(str); 4384 return obj(dev)->setParameters(p); 4385} 4386 4387/** Return the camera parameters. */ 4388char *HAL_camera_device_get_parameters(struct camera_device *dev) 4389{ 4390 ALOGV("DEBUG(%s):", __func__); 4391 String8 str; 4392 CameraParameters parms = obj(dev)->getParameters(); 4393 str = parms.flatten(); 4394 return strdup(str.string()); 4395} 4396 4397static void HAL_camera_device_put_parameters(struct camera_device *dev, char *parms) 4398{ 4399 ALOGV("DEBUG(%s):", __func__); 4400 free(parms); 4401} 4402 4403/** 4404 * Send command to camera driver. 4405 */ 4406static int HAL_camera_device_send_command(struct camera_device *dev, 4407 int32_t cmd, int32_t arg1, int32_t arg2) 4408{ 4409 ALOGV("DEBUG(%s):", __func__); 4410 return obj(dev)->sendCommand(cmd, arg1, arg2); 4411} 4412 4413/** 4414 * Release the hardware resources owned by this object. Note that this is 4415 * *not* done in the destructor. 4416 */ 4417static void HAL_camera_device_release(struct camera_device *dev) 4418{ 4419 ALOGV("DEBUG(%s):", __func__); 4420 obj(dev)->release(); 4421} 4422 4423/** 4424 * Dump state of the camera hardware 4425 */ 4426static int HAL_camera_device_dump(struct camera_device *dev, int fd) 4427{ 4428 ALOGV("DEBUG(%s):", __func__); 4429 return obj(dev)->dump(fd); 4430} 4431 4432static int HAL_getNumberOfCameras() 4433{ 4434 ALOGV("DEBUG(%s):", __func__); 4435 return sizeof(sCameraInfo) / sizeof(sCameraInfo[0]); 4436} 4437 4438static int HAL_getCameraInfo(int cameraId, struct camera_info *cameraInfo) 4439{ 4440 ALOGV("DEBUG(%s):", __func__); 4441 memcpy(cameraInfo, &sCameraInfo[cameraId], sizeof(CameraInfo)); 4442 return 0; 4443} 4444 4445#define SET_METHOD(m) m : HAL_camera_device_##m 4446 4447static camera_device_ops_t camera_device_ops = { 4448 SET_METHOD(set_preview_window), 4449 SET_METHOD(set_callbacks), 4450 SET_METHOD(enable_msg_type), 4451 SET_METHOD(disable_msg_type), 4452 SET_METHOD(msg_type_enabled), 4453 SET_METHOD(start_preview), 4454 SET_METHOD(stop_preview), 4455 SET_METHOD(preview_enabled), 4456 SET_METHOD(store_meta_data_in_buffers), 4457 SET_METHOD(start_recording), 4458 SET_METHOD(stop_recording), 4459 SET_METHOD(recording_enabled), 4460 SET_METHOD(release_recording_frame), 4461 SET_METHOD(auto_focus), 4462 SET_METHOD(cancel_auto_focus), 4463 SET_METHOD(take_picture), 4464 SET_METHOD(cancel_picture), 4465 SET_METHOD(set_parameters), 4466 SET_METHOD(get_parameters), 4467 SET_METHOD(put_parameters), 4468 SET_METHOD(send_command), 4469 SET_METHOD(release), 4470 SET_METHOD(dump), 4471}; 4472 4473#undef SET_METHOD 4474 4475static int HAL_camera_device_open(const struct hw_module_t* module, 4476 const char *id, 4477 struct hw_device_t** device) 4478{ 4479 ALOGV("DEBUG(%s):", __func__); 4480 4481 int cameraId = atoi(id); 4482 if (cameraId < 0 || cameraId >= HAL_getNumberOfCameras()) { 4483 ALOGE("ERR(%s):Invalid camera ID %s", __func__, id); 4484 return -EINVAL; 4485 } 4486 4487 if (g_cam_device) { 4488 if (obj(g_cam_device)->getCameraId() == cameraId) { 4489 ALOGV("DEBUG(%s):returning existing camera ID %s", __func__, id); 4490 goto done; 4491 } else { 4492 ALOGE("ERR(%s):Cannot open camera %d. camera %d is already running!", 4493 __func__, cameraId, obj(g_cam_device)->getCameraId()); 4494 return -ENOSYS; 4495 } 4496 } 4497 4498 g_cam_device = (camera_device_t *)malloc(sizeof(camera_device_t)); 4499 if (!g_cam_device) 4500 return -ENOMEM; 4501 4502 g_cam_device->common.tag = HARDWARE_DEVICE_TAG; 4503 g_cam_device->common.version = 1; 4504 g_cam_device->common.module = const_cast<hw_module_t *>(module); 4505 g_cam_device->common.close = HAL_camera_device_close; 4506 4507 g_cam_device->ops = &camera_device_ops; 4508 4509 ALOGV("DEBUG(%s):open camera %s", __func__, id); 4510 4511 g_cam_device->priv = new ExynosCameraHWInterface(cameraId, g_cam_device); 4512 4513done: 4514 *device = (hw_device_t *)g_cam_device; 4515 ALOGV("DEBUG(%s):opened camera %s (%p)", __func__, id, *device); 4516 return 0; 4517} 4518 4519static hw_module_methods_t camera_module_methods = { 4520 open : HAL_camera_device_open 4521}; 4522 4523extern "C" { 4524 struct camera_module HAL_MODULE_INFO_SYM = { 4525 common : { 4526 tag : HARDWARE_MODULE_TAG, 4527 version_major : 1, 4528 version_minor : 0, 4529 id : CAMERA_HARDWARE_MODULE_ID, 4530 name : "orion camera HAL", 4531 author : "Samsung Corporation", 4532 methods : &camera_module_methods, 4533 }, 4534 get_number_of_cameras : HAL_getNumberOfCameras, 4535 get_camera_info : HAL_getCameraInfo 4536 }; 4537} 4538 4539}; // namespace android 4540