Camera2Client.cpp revision 2e19c3c02957208371cdd491e6342ea7ddb440d9
1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#define LOG_TAG "Camera2Client" 18#define ATRACE_TAG ATRACE_TAG_CAMERA 19//#define LOG_NDEBUG 0 20 21#include <utils/Log.h> 22#include <utils/Trace.h> 23 24#include <cutils/properties.h> 25#include <gui/SurfaceTextureClient.h> 26#include <gui/Surface.h> 27#include <media/hardware/MetadataBufferType.h> 28 29#include "Camera2Client.h" 30 31#define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__); 32#define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__); 33 34namespace android { 35 36using namespace camera2; 37 38static int getCallingPid() { 39 return IPCThreadState::self()->getCallingPid(); 40} 41 42static int getCallingUid() { 43 return IPCThreadState::self()->getCallingUid(); 44} 45 46// Interface used by CameraService 47 48Camera2Client::Camera2Client(const sp<CameraService>& cameraService, 49 const sp<ICameraClient>& cameraClient, 50 int cameraId, 51 int cameraFacing, 52 int clientPid): 53 Client(cameraService, cameraClient, 54 cameraId, cameraFacing, clientPid), 55 mParameters(cameraId, cameraFacing), 56 mPreviewStreamId(NO_STREAM), 57 mCallbackStreamId(NO_STREAM), 58 mCallbackHeapId(0), 59 mCaptureStreamId(NO_STREAM), 60 mRecordingStreamId(NO_STREAM), 61 mRecordingHeapCount(kDefaultRecordingHeapCount) 62{ 63 ATRACE_CALL(); 64 ALOGV("%s: Created client for camera %d", __FUNCTION__, cameraId); 65 66 mDevice = new Camera2Device(cameraId); 67 68 SharedParameters::Lock l(mParameters); 69 l.mParameters.state = Parameters::DISCONNECTED; 70} 71 72status_t Camera2Client::checkPid(const char* checkLocation) const { 73 int callingPid = getCallingPid(); 74 if (callingPid == mClientPid) return NO_ERROR; 75 76 ALOGE("%s: attempt to use a locked camera from a different process" 77 " (old pid %d, new pid %d)", checkLocation, mClientPid, callingPid); 78 return PERMISSION_DENIED; 79} 80 81status_t Camera2Client::initialize(camera_module_t *module) 82{ 83 ATRACE_CALL(); 84 ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId); 85 status_t res; 86 87 mFrameProcessor = new FrameProcessor(this); 88 String8 frameThreadName = String8::format("Camera2Client[%d]::FrameProcessor", 89 mCameraId); 90 mFrameProcessor->run(frameThreadName.string()); 91 92 res = mDevice->initialize(module); 93 if (res != OK) { 94 ALOGE("%s: Camera %d: unable to initialize device: %s (%d)", 95 __FUNCTION__, mCameraId, strerror(-res), res); 96 return NO_INIT; 97 } 98 99 res = mDevice->setNotifyCallback(this); 100 101 SharedParameters::Lock l(mParameters); 102 103 res = l.mParameters.initialize(&(mDevice->info())); 104 if (res != OK) { 105 ALOGE("%s: Camera %d: unable to build defaults: %s (%d)", 106 __FUNCTION__, mCameraId, strerror(-res), res); 107 return NO_INIT; 108 } 109 110 if (gLogLevel >= 1) { 111 ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__, 112 mCameraId); 113 ALOGD("%s", l.mParameters.paramsFlattened.string()); 114 } 115 116 return OK; 117} 118 119Camera2Client::~Camera2Client() { 120 ATRACE_CALL(); 121 ALOGV("%s: Camera %d: Shutting down client.", __FUNCTION__, mCameraId); 122 123 mDestructionStarted = true; 124 125 // Rewrite mClientPid to allow shutdown by CameraService 126 mClientPid = getCallingPid(); 127 disconnect(); 128 129 mFrameProcessor->requestExit(); 130 ALOGV("%s: Camera %d: Shutdown complete", __FUNCTION__, mCameraId); 131} 132 133status_t Camera2Client::dump(int fd, const Vector<String16>& args) { 134 String8 result; 135 result.appendFormat("Client2[%d] (%p) PID: %d, dump:\n", 136 mCameraId, 137 getCameraClient()->asBinder().get(), 138 mClientPid); 139 result.append(" State: "); 140#define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break; 141 142 const Parameters& p = mParameters.unsafeAccess(); 143 144 result.append(Parameters::getStateName(p.state)); 145 146 result.append("\n Current parameters:\n"); 147 result.appendFormat(" Preview size: %d x %d\n", 148 p.previewWidth, p.previewHeight); 149 result.appendFormat(" Preview FPS range: %d - %d\n", 150 p.previewFpsRange[0], p.previewFpsRange[1]); 151 result.appendFormat(" Preview HAL pixel format: 0x%x\n", 152 p.previewFormat); 153 result.appendFormat(" Preview transform: %x\n", 154 p.previewTransform); 155 result.appendFormat(" Picture size: %d x %d\n", 156 p.pictureWidth, p.pictureHeight); 157 result.appendFormat(" Jpeg thumbnail size: %d x %d\n", 158 p.jpegThumbSize[0], p.jpegThumbSize[1]); 159 result.appendFormat(" Jpeg quality: %d, thumbnail quality: %d\n", 160 p.jpegQuality, p.jpegThumbQuality); 161 result.appendFormat(" Jpeg rotation: %d\n", p.jpegRotation); 162 result.appendFormat(" GPS tags %s\n", 163 p.gpsEnabled ? "enabled" : "disabled"); 164 if (p.gpsEnabled) { 165 result.appendFormat(" GPS lat x long x alt: %f x %f x %f\n", 166 p.gpsCoordinates[0], p.gpsCoordinates[1], 167 p.gpsCoordinates[2]); 168 result.appendFormat(" GPS timestamp: %lld\n", 169 p.gpsTimestamp); 170 result.appendFormat(" GPS processing method: %s\n", 171 p.gpsProcessingMethod.string()); 172 } 173 174 result.append(" White balance mode: "); 175 switch (p.wbMode) { 176 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_AUTO) 177 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_INCANDESCENT) 178 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_FLUORESCENT) 179 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_WARM_FLUORESCENT) 180 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_DAYLIGHT) 181 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_CLOUDY_DAYLIGHT) 182 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_TWILIGHT) 183 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_SHADE) 184 default: result.append("UNKNOWN\n"); 185 } 186 187 result.append(" Effect mode: "); 188 switch (p.effectMode) { 189 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_OFF) 190 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MONO) 191 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_NEGATIVE) 192 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_SOLARIZE) 193 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_SEPIA) 194 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_POSTERIZE) 195 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_WHITEBOARD) 196 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_BLACKBOARD) 197 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_AQUA) 198 default: result.append("UNKNOWN\n"); 199 } 200 201 result.append(" Antibanding mode: "); 202 switch (p.antibandingMode) { 203 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_AUTO) 204 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_OFF) 205 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_50HZ) 206 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_60HZ) 207 default: result.append("UNKNOWN\n"); 208 } 209 210 result.append(" Scene mode: "); 211 switch (p.sceneMode) { 212 case ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED: 213 result.append("AUTO\n"); break; 214 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION) 215 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT) 216 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE) 217 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT) 218 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT) 219 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_THEATRE) 220 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BEACH) 221 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SNOW) 222 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SUNSET) 223 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO) 224 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FIREWORKS) 225 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SPORTS) 226 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY) 227 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT) 228 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE) 229 default: result.append("UNKNOWN\n"); 230 } 231 232 result.append(" Flash mode: "); 233 switch (p.flashMode) { 234 CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF) 235 CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO) 236 CASE_APPEND_ENUM(Parameters::FLASH_MODE_ON) 237 CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH) 238 CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE) 239 CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID) 240 default: result.append("UNKNOWN\n"); 241 } 242 243 result.append(" Focus mode: "); 244 switch (p.focusMode) { 245 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO) 246 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO) 247 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) 248 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_PICTURE) 249 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_EDOF) 250 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY) 251 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED) 252 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID) 253 default: result.append("UNKNOWN\n"); 254 } 255 256 result.append(" Focusing areas:\n"); 257 for (size_t i = 0; i < p.focusingAreas.size(); i++) { 258 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n", 259 p.focusingAreas[i].left, 260 p.focusingAreas[i].top, 261 p.focusingAreas[i].right, 262 p.focusingAreas[i].bottom, 263 p.focusingAreas[i].weight); 264 } 265 266 result.appendFormat(" Exposure compensation index: %d\n", 267 p.exposureCompensation); 268 269 result.appendFormat(" AE lock %s, AWB lock %s\n", 270 p.autoExposureLock ? "enabled" : "disabled", 271 p.autoWhiteBalanceLock ? "enabled" : "disabled" ); 272 273 result.appendFormat(" Metering areas:\n"); 274 for (size_t i = 0; i < p.meteringAreas.size(); i++) { 275 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n", 276 p.meteringAreas[i].left, 277 p.meteringAreas[i].top, 278 p.meteringAreas[i].right, 279 p.meteringAreas[i].bottom, 280 p.meteringAreas[i].weight); 281 } 282 283 result.appendFormat(" Zoom index: %d\n", p.zoom); 284 result.appendFormat(" Video size: %d x %d\n", p.videoWidth, 285 p.videoHeight); 286 287 result.appendFormat(" Recording hint is %s\n", 288 p.recordingHint ? "set" : "not set"); 289 290 result.appendFormat(" Video stabilization is %s\n", 291 p.videoStabilization ? "enabled" : "disabled"); 292 293 result.append(" Current streams:\n"); 294 result.appendFormat(" Preview stream ID: %d\n", mPreviewStreamId); 295 result.appendFormat(" Capture stream ID: %d\n", mCaptureStreamId); 296 result.appendFormat(" Recording stream ID: %d\n", mRecordingStreamId); 297 298 result.append(" Current requests:\n"); 299 if (mPreviewRequest.entryCount() != 0) { 300 result.append(" Preview request:\n"); 301 write(fd, result.string(), result.size()); 302 mPreviewRequest.dump(fd, 2, 6); 303 } else { 304 result.append(" Preview request: undefined\n"); 305 write(fd, result.string(), result.size()); 306 } 307 308 if (mCaptureRequest.entryCount() != 0) { 309 result = " Capture request:\n"; 310 write(fd, result.string(), result.size()); 311 mCaptureRequest.dump(fd, 2, 6); 312 } else { 313 result = " Capture request: undefined\n"; 314 write(fd, result.string(), result.size()); 315 } 316 317 if (mRecordingRequest.entryCount() != 0) { 318 result = " Recording request:\n"; 319 write(fd, result.string(), result.size()); 320 mRecordingRequest.dump(fd, 2, 6); 321 } else { 322 result = " Recording request: undefined\n"; 323 write(fd, result.string(), result.size()); 324 } 325 326 mFrameProcessor->dump(fd, args); 327 328 result = " Device dump:\n"; 329 write(fd, result.string(), result.size()); 330 331 status_t res = mDevice->dump(fd, args); 332 if (res != OK) { 333 result = String8::format(" Error dumping device: %s (%d)", 334 strerror(-res), res); 335 write(fd, result.string(), result.size()); 336 } 337 338#undef CASE_APPEND_ENUM 339 return NO_ERROR; 340} 341 342// ICamera interface 343 344void Camera2Client::disconnect() { 345 ATRACE_CALL(); 346 ALOGV("%s: E", __FUNCTION__); 347 Mutex::Autolock icl(mICameraLock); 348 status_t res; 349 if ( (res = checkPid(__FUNCTION__) ) != OK) return; 350 351 if (mDevice == 0) return; 352 353 stopPreviewL(); 354 355 if (mPreviewStreamId != NO_STREAM) { 356 mDevice->deleteStream(mPreviewStreamId); 357 mPreviewStreamId = NO_STREAM; 358 } 359 360 if (mCaptureStreamId != NO_STREAM) { 361 mDevice->deleteStream(mCaptureStreamId); 362 mCaptureStreamId = NO_STREAM; 363 } 364 365 if (mRecordingStreamId != NO_STREAM) { 366 mDevice->deleteStream(mRecordingStreamId); 367 mRecordingStreamId = NO_STREAM; 368 } 369 370 if (mCallbackStreamId != NO_STREAM) { 371 mDevice->deleteStream(mCallbackStreamId); 372 mCallbackStreamId = NO_STREAM; 373 } 374 375 mDevice.clear(); 376 SharedParameters::Lock l(mParameters); 377 l.mParameters.state = Parameters::DISCONNECTED; 378 379 CameraService::Client::disconnect(); 380} 381 382status_t Camera2Client::connect(const sp<ICameraClient>& client) { 383 ATRACE_CALL(); 384 ALOGV("%s: E", __FUNCTION__); 385 Mutex::Autolock icl(mICameraLock); 386 387 if (mClientPid != 0 && getCallingPid() != mClientPid) { 388 ALOGE("%s: Camera %d: Connection attempt from pid %d; " 389 "current locked to pid %d", __FUNCTION__, 390 mCameraId, getCallingPid(), mClientPid); 391 return BAD_VALUE; 392 } 393 394 mClientPid = getCallingPid(); 395 396 Mutex::Autolock iccl(mICameraClientLock); 397 mCameraClient = client; 398 399 SharedParameters::Lock l(mParameters); 400 l.mParameters.state = Parameters::STOPPED; 401 402 return OK; 403} 404 405status_t Camera2Client::lock() { 406 ATRACE_CALL(); 407 ALOGV("%s: E", __FUNCTION__); 408 Mutex::Autolock icl(mICameraLock); 409 ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d", 410 __FUNCTION__, mCameraId, getCallingPid(), mClientPid); 411 412 if (mClientPid == 0) { 413 mClientPid = getCallingPid(); 414 return OK; 415 } 416 417 if (mClientPid != getCallingPid()) { 418 ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d", 419 __FUNCTION__, mCameraId, getCallingPid(), mClientPid); 420 return EBUSY; 421 } 422 423 return OK; 424} 425 426status_t Camera2Client::unlock() { 427 ATRACE_CALL(); 428 ALOGV("%s: E", __FUNCTION__); 429 Mutex::Autolock icl(mICameraLock); 430 ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d", 431 __FUNCTION__, mCameraId, getCallingPid(), mClientPid); 432 433 // TODO: Check for uninterruptable conditions 434 435 if (mClientPid == getCallingPid()) { 436 Mutex::Autolock iccl(mICameraClientLock); 437 438 mClientPid = 0; 439 mCameraClient.clear(); 440 return OK; 441 } 442 443 ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d", 444 __FUNCTION__, mCameraId, getCallingPid(), mClientPid); 445 return EBUSY; 446} 447 448status_t Camera2Client::setPreviewDisplay( 449 const sp<Surface>& surface) { 450 ATRACE_CALL(); 451 ALOGV("%s: E", __FUNCTION__); 452 Mutex::Autolock icl(mICameraLock); 453 status_t res; 454 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 455 456 sp<IBinder> binder; 457 sp<ANativeWindow> window; 458 if (surface != 0) { 459 binder = surface->asBinder(); 460 window = surface; 461 } 462 463 return setPreviewWindowL(binder,window); 464} 465 466status_t Camera2Client::setPreviewTexture( 467 const sp<ISurfaceTexture>& surfaceTexture) { 468 ATRACE_CALL(); 469 ALOGV("%s: E", __FUNCTION__); 470 Mutex::Autolock icl(mICameraLock); 471 status_t res; 472 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 473 474 sp<IBinder> binder; 475 sp<ANativeWindow> window; 476 if (surfaceTexture != 0) { 477 binder = surfaceTexture->asBinder(); 478 window = new SurfaceTextureClient(surfaceTexture); 479 } 480 return setPreviewWindowL(binder, window); 481} 482 483status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder, 484 sp<ANativeWindow> window) { 485 ATRACE_CALL(); 486 status_t res; 487 488 if (binder == mPreviewSurface) { 489 ALOGV("%s: Camera %d: New window is same as old window", 490 __FUNCTION__, mCameraId); 491 return NO_ERROR; 492 } 493 494 SharedParameters::Lock l(mParameters); 495 switch (l.mParameters.state) { 496 case Parameters::DISCONNECTED: 497 case Parameters::RECORD: 498 case Parameters::STILL_CAPTURE: 499 case Parameters::VIDEO_SNAPSHOT: 500 ALOGE("%s: Camera %d: Cannot set preview display while in state %s", 501 __FUNCTION__, mCameraId, 502 Parameters::getStateName(l.mParameters.state)); 503 return INVALID_OPERATION; 504 case Parameters::STOPPED: 505 case Parameters::WAITING_FOR_PREVIEW_WINDOW: 506 // OK 507 break; 508 case Parameters::PREVIEW: 509 // Already running preview - need to stop and create a new stream 510 // TODO: Optimize this so that we don't wait for old stream to drain 511 // before spinning up new stream 512 mDevice->clearStreamingRequest(); 513 l.mParameters.state = Parameters::WAITING_FOR_PREVIEW_WINDOW; 514 break; 515 } 516 517 if (mPreviewStreamId != NO_STREAM) { 518 res = mDevice->waitUntilDrained(); 519 if (res != OK) { 520 ALOGE("%s: Error waiting for preview to drain: %s (%d)", 521 __FUNCTION__, strerror(-res), res); 522 return res; 523 } 524 res = mDevice->deleteStream(mPreviewStreamId); 525 if (res != OK) { 526 ALOGE("%s: Unable to delete old preview stream: %s (%d)", 527 __FUNCTION__, strerror(-res), res); 528 return res; 529 } 530 mPreviewStreamId = NO_STREAM; 531 } 532 533 mPreviewSurface = binder; 534 mPreviewWindow = window; 535 536 if (l.mParameters.state == Parameters::WAITING_FOR_PREVIEW_WINDOW) { 537 return startPreviewL(l.mParameters, false); 538 } 539 540 return OK; 541} 542 543void Camera2Client::setPreviewCallbackFlag(int flag) { 544 ATRACE_CALL(); 545 ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag); 546 Mutex::Autolock icl(mICameraLock); 547 status_t res; 548 if ( checkPid(__FUNCTION__) != OK) return; 549 550 SharedParameters::Lock l(mParameters); 551 setPreviewCallbackFlagL(l.mParameters, flag); 552} 553 554void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) { 555 status_t res = OK; 556 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) { 557 ALOGV("%s: setting oneshot", __FUNCTION__); 558 params.previewCallbackOneShot = true; 559 } 560 if (params.previewCallbackFlags != (uint32_t)flag) { 561 params.previewCallbackFlags = flag; 562 switch(params.state) { 563 case Parameters::PREVIEW: 564 res = startPreviewL(params, true); 565 break; 566 case Parameters::RECORD: 567 case Parameters::VIDEO_SNAPSHOT: 568 res = startRecordingL(params, true); 569 break; 570 default: 571 break; 572 } 573 if (res != OK) { 574 ALOGE("%s: Camera %d: Unable to refresh request in state %s", 575 __FUNCTION__, mCameraId, 576 Parameters::getStateName(params.state)); 577 } 578 } 579 580} 581 582status_t Camera2Client::startPreview() { 583 ATRACE_CALL(); 584 ALOGV("%s: E", __FUNCTION__); 585 Mutex::Autolock icl(mICameraLock); 586 status_t res; 587 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 588 SharedParameters::Lock l(mParameters); 589 return startPreviewL(l.mParameters, false); 590} 591 592status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) { 593 ATRACE_CALL(); 594 status_t res; 595 if (params.state >= Parameters::PREVIEW && !restart) { 596 ALOGE("%s: Can't start preview in state %s", 597 __FUNCTION__, 598 Parameters::getStateName(params.state)); 599 return INVALID_OPERATION; 600 } 601 602 if (mPreviewWindow == 0) { 603 params.state = Parameters::WAITING_FOR_PREVIEW_WINDOW; 604 return OK; 605 } 606 params.state = Parameters::STOPPED; 607 608 res = updatePreviewStream(params); 609 if (res != OK) { 610 ALOGE("%s: Camera %d: Unable to update preview stream: %s (%d)", 611 __FUNCTION__, mCameraId, strerror(-res), res); 612 return res; 613 } 614 bool callbacksEnabled = params.previewCallbackFlags & 615 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK; 616 if (callbacksEnabled) { 617 res = updateCallbackStream(params); 618 if (res != OK) { 619 ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)", 620 __FUNCTION__, mCameraId, strerror(-res), res); 621 return res; 622 } 623 } 624 625 if (mPreviewRequest.entryCount() == 0) { 626 res = updatePreviewRequest(params); 627 if (res != OK) { 628 ALOGE("%s: Camera %d: Unable to create preview request: %s (%d)", 629 __FUNCTION__, mCameraId, strerror(-res), res); 630 return res; 631 } 632 } 633 634 if (callbacksEnabled) { 635 uint8_t outputStreams[2] = 636 { mPreviewStreamId, mCallbackStreamId }; 637 res = mPreviewRequest.update( 638 ANDROID_REQUEST_OUTPUT_STREAMS, 639 outputStreams, 2); 640 } else { 641 uint8_t outputStreams[1] = { mPreviewStreamId }; 642 res = mPreviewRequest.update( 643 ANDROID_REQUEST_OUTPUT_STREAMS, 644 outputStreams, 1); 645 } 646 if (res != OK) { 647 ALOGE("%s: Camera %d: Unable to set up preview request: %s (%d)", 648 __FUNCTION__, mCameraId, strerror(-res), res); 649 return res; 650 } 651 res = mPreviewRequest.sort(); 652 if (res != OK) { 653 ALOGE("%s: Camera %d: Error sorting preview request: %s (%d)", 654 __FUNCTION__, mCameraId, strerror(-res), res); 655 return res; 656 } 657 658 res = mDevice->setStreamingRequest(mPreviewRequest); 659 if (res != OK) { 660 ALOGE("%s: Camera %d: Unable to set preview request to start preview: " 661 "%s (%d)", 662 __FUNCTION__, mCameraId, strerror(-res), res); 663 return res; 664 } 665 params.state = Parameters::PREVIEW; 666 667 return OK; 668} 669 670void Camera2Client::stopPreview() { 671 ATRACE_CALL(); 672 ALOGV("%s: E", __FUNCTION__); 673 Mutex::Autolock icl(mICameraLock); 674 status_t res; 675 if ( (res = checkPid(__FUNCTION__) ) != OK) return; 676 stopPreviewL(); 677} 678 679void Camera2Client::stopPreviewL() { 680 ATRACE_CALL(); 681 Parameters::State state; 682 { 683 SharedParameters::Lock l(mParameters); 684 state = l.mParameters.state; 685 } 686 687 switch (state) { 688 case Parameters::DISCONNECTED: 689 ALOGE("%s: Camera %d: Call before initialized", 690 __FUNCTION__, mCameraId); 691 break; 692 case Parameters::STOPPED: 693 break; 694 case Parameters::STILL_CAPTURE: 695 ALOGE("%s: Camera %d: Cannot stop preview during still capture.", 696 __FUNCTION__, mCameraId); 697 break; 698 case Parameters::RECORD: 699 // no break - identical to preview 700 case Parameters::PREVIEW: 701 mDevice->clearStreamingRequest(); 702 mDevice->waitUntilDrained(); 703 // no break 704 case Parameters::WAITING_FOR_PREVIEW_WINDOW: { 705 SharedParameters::Lock l(mParameters); 706 l.mParameters.state = Parameters::STOPPED; 707 commandStopFaceDetectionL(l.mParameters); 708 break; 709 } 710 default: 711 ALOGE("%s: Camera %d: Unknown state %d", __FUNCTION__, mCameraId, 712 state); 713 } 714} 715 716bool Camera2Client::previewEnabled() { 717 ATRACE_CALL(); 718 Mutex::Autolock icl(mICameraLock); 719 status_t res; 720 if ( (res = checkPid(__FUNCTION__) ) != OK) return false; 721 722 SharedParameters::Lock l(mParameters); 723 return l.mParameters.state == Parameters::PREVIEW; 724} 725 726status_t Camera2Client::storeMetaDataInBuffers(bool enabled) { 727 ATRACE_CALL(); 728 Mutex::Autolock icl(mICameraLock); 729 status_t res; 730 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 731 732 SharedParameters::Lock l(mParameters); 733 switch (l.mParameters.state) { 734 case Parameters::RECORD: 735 case Parameters::VIDEO_SNAPSHOT: 736 ALOGE("%s: Camera %d: Can't be called in state %s", 737 __FUNCTION__, mCameraId, 738 Parameters::getStateName(l.mParameters.state)); 739 return INVALID_OPERATION; 740 default: 741 // OK 742 break; 743 } 744 745 l.mParameters.storeMetadataInBuffers = enabled; 746 747 return OK; 748} 749 750status_t Camera2Client::startRecording() { 751 ATRACE_CALL(); 752 ALOGV("%s: E", __FUNCTION__); 753 Mutex::Autolock icl(mICameraLock); 754 status_t res; 755 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 756 SharedParameters::Lock l(mParameters); 757 758 return startRecordingL(l.mParameters, false); 759} 760 761status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) { 762 status_t res; 763 switch (params.state) { 764 case Parameters::STOPPED: 765 res = startPreviewL(params, false); 766 if (res != OK) return res; 767 break; 768 case Parameters::PREVIEW: 769 // Ready to go 770 break; 771 case Parameters::RECORD: 772 case Parameters::VIDEO_SNAPSHOT: 773 // OK to call this when recording is already on, just skip unless 774 // we're looking to restart 775 if (!restart) return OK; 776 break; 777 default: 778 ALOGE("%s: Camera %d: Can't start recording in state %s", 779 __FUNCTION__, mCameraId, 780 Parameters::getStateName(params.state)); 781 return INVALID_OPERATION; 782 }; 783 784 if (!params.storeMetadataInBuffers) { 785 ALOGE("%s: Camera %d: Recording only supported in metadata mode, but " 786 "non-metadata recording mode requested!", __FUNCTION__, 787 mCameraId); 788 return INVALID_OPERATION; 789 } 790 791 res = updateRecordingStream(params); 792 if (res != OK) { 793 ALOGE("%s: Camera %d: Unable to update recording stream: %s (%d)", 794 __FUNCTION__, mCameraId, strerror(-res), res); 795 return res; 796 } 797 bool callbacksEnabled = params.previewCallbackFlags & 798 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK; 799 if (callbacksEnabled) { 800 res = updateCallbackStream(params); 801 if (res != OK) { 802 ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)", 803 __FUNCTION__, mCameraId, strerror(-res), res); 804 return res; 805 } 806 } 807 808 if (mRecordingRequest.entryCount() == 0) { 809 res = updateRecordingRequest(params); 810 if (res != OK) { 811 ALOGE("%s: Camera %d: Unable to create recording request: %s (%d)", 812 __FUNCTION__, mCameraId, strerror(-res), res); 813 return res; 814 } 815 } 816 817 if (callbacksEnabled) { 818 uint8_t outputStreams[3] = 819 { mPreviewStreamId, mRecordingStreamId, mCallbackStreamId }; 820 res = mRecordingRequest.update( 821 ANDROID_REQUEST_OUTPUT_STREAMS, 822 outputStreams, 3); 823 } else { 824 uint8_t outputStreams[2] = { mPreviewStreamId, mRecordingStreamId }; 825 res = mRecordingRequest.update( 826 ANDROID_REQUEST_OUTPUT_STREAMS, 827 outputStreams, 2); 828 } 829 if (res != OK) { 830 ALOGE("%s: Camera %d: Unable to set up recording request: %s (%d)", 831 __FUNCTION__, mCameraId, strerror(-res), res); 832 return res; 833 } 834 res = mRecordingRequest.sort(); 835 if (res != OK) { 836 ALOGE("%s: Camera %d: Error sorting recording request: %s (%d)", 837 __FUNCTION__, mCameraId, strerror(-res), res); 838 return res; 839 } 840 841 res = mDevice->setStreamingRequest(mRecordingRequest); 842 if (res != OK) { 843 ALOGE("%s: Camera %d: Unable to set recording request to start " 844 "recording: %s (%d)", __FUNCTION__, mCameraId, 845 strerror(-res), res); 846 return res; 847 } 848 if (params.state < Parameters::RECORD) { 849 params.state = Parameters::RECORD; 850 } 851 852 return OK; 853} 854 855void Camera2Client::stopRecording() { 856 ATRACE_CALL(); 857 ALOGV("%s: E", __FUNCTION__); 858 Mutex::Autolock icl(mICameraLock); 859 SharedParameters::Lock l(mParameters); 860 861 status_t res; 862 if ( (res = checkPid(__FUNCTION__) ) != OK) return; 863 864 switch (l.mParameters.state) { 865 case Parameters::RECORD: 866 // OK to stop 867 break; 868 case Parameters::STOPPED: 869 case Parameters::PREVIEW: 870 case Parameters::STILL_CAPTURE: 871 case Parameters::VIDEO_SNAPSHOT: 872 default: 873 ALOGE("%s: Camera %d: Can't stop recording in state %s", 874 __FUNCTION__, mCameraId, 875 Parameters::getStateName(l.mParameters.state)); 876 return; 877 }; 878 879 // Back to preview. Since record can only be reached through preview, 880 // all preview stream setup should be up to date. 881 res = mDevice->setStreamingRequest(mPreviewRequest); 882 if (res != OK) { 883 ALOGE("%s: Camera %d: Unable to switch back to preview request: " 884 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 885 return; 886 } 887 888 // TODO: Should recording heap be freed? Can't do it yet since requests 889 // could still be in flight. 890 891 l.mParameters.state = Parameters::PREVIEW; 892} 893 894bool Camera2Client::recordingEnabled() { 895 ATRACE_CALL(); 896 Mutex::Autolock icl(mICameraLock); 897 898 if ( checkPid(__FUNCTION__) != OK) return false; 899 900 return recordingEnabledL(); 901} 902 903bool Camera2Client::recordingEnabledL() { 904 ATRACE_CALL(); 905 SharedParameters::Lock l(mParameters); 906 907 return (l.mParameters.state == Parameters::RECORD 908 || l.mParameters.state == Parameters::VIDEO_SNAPSHOT); 909} 910 911void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) { 912 ATRACE_CALL(); 913 Mutex::Autolock icl(mICameraLock); 914 status_t res; 915 if ( checkPid(__FUNCTION__) != OK) return; 916 917 SharedParameters::Lock l(mParameters); 918 919 // Make sure this is for the current heap 920 ssize_t offset; 921 size_t size; 922 sp<IMemoryHeap> heap = mem->getMemory(&offset, &size); 923 if (heap->getHeapID() != mRecordingHeap->mHeap->getHeapID()) { 924 ALOGW("%s: Camera %d: Mismatched heap ID, ignoring release " 925 "(got %x, expected %x)", __FUNCTION__, mCameraId, 926 heap->getHeapID(), mRecordingHeap->mHeap->getHeapID()); 927 return; 928 } 929 uint8_t *data = (uint8_t*)heap->getBase() + offset; 930 uint32_t type = *(uint32_t*)data; 931 if (type != kMetadataBufferTypeGrallocSource) { 932 ALOGE("%s: Camera %d: Recording frame type invalid (got %x, expected %x)", 933 __FUNCTION__, mCameraId, type, kMetadataBufferTypeGrallocSource); 934 return; 935 } 936 937 // Release the buffer back to the recording queue 938 939 buffer_handle_t imgHandle = *(buffer_handle_t*)(data + 4); 940 941 size_t itemIndex; 942 for (itemIndex = 0; itemIndex < mRecordingBuffers.size(); itemIndex++) { 943 const BufferItemConsumer::BufferItem item = mRecordingBuffers[itemIndex]; 944 if (item.mBuf != BufferItemConsumer::INVALID_BUFFER_SLOT && 945 item.mGraphicBuffer->handle == imgHandle) { 946 break; 947 } 948 } 949 if (itemIndex == mRecordingBuffers.size()) { 950 ALOGE("%s: Camera %d: Can't find buffer_handle_t %p in list of " 951 "outstanding buffers", __FUNCTION__, mCameraId, imgHandle); 952 return; 953 } 954 955 ALOGV("%s: Camera %d: Freeing buffer_handle_t %p", __FUNCTION__, mCameraId, 956 imgHandle); 957 958 res = mRecordingConsumer->releaseBuffer(mRecordingBuffers[itemIndex]); 959 if (res != OK) { 960 ALOGE("%s: Camera %d: Unable to free recording frame (buffer_handle_t: %p):" 961 "%s (%d)", 962 __FUNCTION__, mCameraId, imgHandle, strerror(-res), res); 963 return; 964 } 965 mRecordingBuffers.replaceAt(itemIndex); 966 967 mRecordingHeapFree++; 968} 969 970status_t Camera2Client::autoFocus() { 971 ATRACE_CALL(); 972 Mutex::Autolock icl(mICameraLock); 973 status_t res; 974 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 975 976 int triggerId; 977 { 978 SharedParameters::Lock l(mParameters); 979 l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter; 980 triggerId = l.mParameters.currentAfTriggerId; 981 } 982 983 mDevice->triggerAutofocus(triggerId); 984 985 return OK; 986} 987 988status_t Camera2Client::cancelAutoFocus() { 989 ATRACE_CALL(); 990 Mutex::Autolock icl(mICameraLock); 991 status_t res; 992 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 993 994 int triggerId; 995 { 996 SharedParameters::Lock l(mParameters); 997 triggerId = ++l.mParameters.afTriggerCounter; 998 } 999 1000 mDevice->triggerCancelAutofocus(triggerId); 1001 1002 return OK; 1003} 1004 1005status_t Camera2Client::takePicture(int msgType) { 1006 ATRACE_CALL(); 1007 Mutex::Autolock icl(mICameraLock); 1008 status_t res; 1009 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 1010 1011 SharedParameters::Lock l(mParameters); 1012 switch (l.mParameters.state) { 1013 case Parameters::DISCONNECTED: 1014 case Parameters::STOPPED: 1015 case Parameters::WAITING_FOR_PREVIEW_WINDOW: 1016 ALOGE("%s: Camera %d: Cannot take picture without preview enabled", 1017 __FUNCTION__, mCameraId); 1018 return INVALID_OPERATION; 1019 case Parameters::PREVIEW: 1020 case Parameters::RECORD: 1021 // Good to go for takePicture 1022 break; 1023 case Parameters::STILL_CAPTURE: 1024 case Parameters::VIDEO_SNAPSHOT: 1025 ALOGE("%s: Camera %d: Already taking a picture", 1026 __FUNCTION__, mCameraId); 1027 return INVALID_OPERATION; 1028 } 1029 1030 ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId); 1031 1032 res = updateCaptureStream(l.mParameters); 1033 if (res != OK) { 1034 ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)", 1035 __FUNCTION__, mCameraId, strerror(-res), res); 1036 return res; 1037 } 1038 1039 if (mCaptureRequest.entryCount() == 0) { 1040 res = updateCaptureRequest(l.mParameters); 1041 if (res != OK) { 1042 ALOGE("%s: Camera %d: Can't create still image capture request: " 1043 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 1044 return res; 1045 } 1046 } 1047 1048 bool callbacksEnabled = l.mParameters.previewCallbackFlags & 1049 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK; 1050 bool recordingEnabled = (l.mParameters.state == Parameters::RECORD); 1051 1052 int streamSwitch = (callbacksEnabled ? 0x2 : 0x0) + 1053 (recordingEnabled ? 0x1 : 0x0); 1054 switch ( streamSwitch ) { 1055 case 0: { // No recording, callbacks 1056 uint8_t streamIds[2] = { mPreviewStreamId, mCaptureStreamId }; 1057 res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, 1058 streamIds, 2); 1059 break; 1060 } 1061 case 1: { // Recording 1062 uint8_t streamIds[3] = { mPreviewStreamId, mRecordingStreamId, 1063 mCaptureStreamId }; 1064 res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, 1065 streamIds, 3); 1066 break; 1067 } 1068 case 2: { // Callbacks 1069 uint8_t streamIds[3] = { mPreviewStreamId, mCallbackStreamId, 1070 mCaptureStreamId }; 1071 res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, 1072 streamIds, 3); 1073 break; 1074 } 1075 case 3: { // Both 1076 uint8_t streamIds[4] = { mPreviewStreamId, mCallbackStreamId, 1077 mRecordingStreamId, mCaptureStreamId }; 1078 res = mCaptureRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS, 1079 streamIds, 4); 1080 break; 1081 } 1082 }; 1083 if (res != OK) { 1084 ALOGE("%s: Camera %d: Unable to set up still image capture request: " 1085 "%s (%d)", 1086 __FUNCTION__, mCameraId, strerror(-res), res); 1087 return res; 1088 } 1089 res = mCaptureRequest.sort(); 1090 if (res != OK) { 1091 ALOGE("%s: Camera %d: Unable to sort capture request: %s (%d)", 1092 __FUNCTION__, mCameraId, strerror(-res), res); 1093 return res; 1094 } 1095 1096 CameraMetadata captureCopy = mCaptureRequest; 1097 if (captureCopy.entryCount() == 0) { 1098 ALOGE("%s: Camera %d: Unable to copy capture request for HAL device", 1099 __FUNCTION__, mCameraId); 1100 return NO_MEMORY; 1101 } 1102 1103 if (l.mParameters.state == Parameters::PREVIEW) { 1104 res = mDevice->clearStreamingRequest(); 1105 if (res != OK) { 1106 ALOGE("%s: Camera %d: Unable to stop preview for still capture: " 1107 "%s (%d)", 1108 __FUNCTION__, mCameraId, strerror(-res), res); 1109 return res; 1110 } 1111 } 1112 // TODO: Capture should be atomic with setStreamingRequest here 1113 res = mDevice->capture(captureCopy); 1114 if (res != OK) { 1115 ALOGE("%s: Camera %d: Unable to submit still image capture request: " 1116 "%s (%d)", 1117 __FUNCTION__, mCameraId, strerror(-res), res); 1118 return res; 1119 } 1120 1121 switch (l.mParameters.state) { 1122 case Parameters::PREVIEW: 1123 l.mParameters.state = Parameters::STILL_CAPTURE; 1124 res = commandStopFaceDetectionL(l.mParameters); 1125 if (res != OK) { 1126 ALOGE("%s: Camera %d: Unable to stop face detection for still capture", 1127 __FUNCTION__, mCameraId); 1128 return res; 1129 } 1130 break; 1131 case Parameters::RECORD: 1132 l.mParameters.state = Parameters::VIDEO_SNAPSHOT; 1133 break; 1134 default: 1135 ALOGE("%s: Camera %d: Unknown state for still capture!", 1136 __FUNCTION__, mCameraId); 1137 return INVALID_OPERATION; 1138 } 1139 1140 return OK; 1141} 1142 1143status_t Camera2Client::setParameters(const String8& params) { 1144 ATRACE_CALL(); 1145 ALOGV("%s: E", __FUNCTION__); 1146 Mutex::Autolock icl(mICameraLock); 1147 status_t res; 1148 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 1149 1150 SharedParameters::Lock l(mParameters); 1151 1152 res = l.mParameters.set(params); 1153 if (res != OK) return res; 1154 1155 res = updateRequests(l.mParameters); 1156 1157 return res; 1158} 1159 1160String8 Camera2Client::getParameters() const { 1161 ATRACE_CALL(); 1162 Mutex::Autolock icl(mICameraLock); 1163 if ( checkPid(__FUNCTION__) != OK) return String8(); 1164 1165 SharedParameters::ReadLock l(mParameters); 1166 1167 // TODO: Deal with focus distances 1168 return l.mParameters.paramsFlattened; 1169} 1170 1171status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) { 1172 ATRACE_CALL(); 1173 Mutex::Autolock icl(mICameraLock); 1174 status_t res; 1175 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 1176 1177 ALOGV("%s: Camera %d: Command %d (%d, %d)", __FUNCTION__, mCameraId, 1178 cmd, arg1, arg2); 1179 1180 switch (cmd) { 1181 case CAMERA_CMD_START_SMOOTH_ZOOM: 1182 return commandStartSmoothZoomL(); 1183 case CAMERA_CMD_STOP_SMOOTH_ZOOM: 1184 return commandStopSmoothZoomL(); 1185 case CAMERA_CMD_SET_DISPLAY_ORIENTATION: 1186 return commandSetDisplayOrientationL(arg1); 1187 case CAMERA_CMD_ENABLE_SHUTTER_SOUND: 1188 return commandEnableShutterSoundL(arg1 == 1); 1189 case CAMERA_CMD_PLAY_RECORDING_SOUND: 1190 return commandPlayRecordingSoundL(); 1191 case CAMERA_CMD_START_FACE_DETECTION: 1192 return commandStartFaceDetectionL(arg1); 1193 case CAMERA_CMD_STOP_FACE_DETECTION: { 1194 SharedParameters::Lock l(mParameters); 1195 return commandStopFaceDetectionL(l.mParameters); 1196 } 1197 case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG: 1198 return commandEnableFocusMoveMsgL(arg1 == 1); 1199 case CAMERA_CMD_PING: 1200 return commandPingL(); 1201 case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT: 1202 return commandSetVideoBufferCountL(arg1); 1203 default: 1204 ALOGE("%s: Unknown command %d (arguments %d, %d)", 1205 __FUNCTION__, cmd, arg1, arg2); 1206 return BAD_VALUE; 1207 } 1208} 1209 1210status_t Camera2Client::commandStartSmoothZoomL() { 1211 ALOGE("%s: Unimplemented!", __FUNCTION__); 1212 return OK; 1213} 1214 1215status_t Camera2Client::commandStopSmoothZoomL() { 1216 ALOGE("%s: Unimplemented!", __FUNCTION__); 1217 return OK; 1218} 1219 1220status_t Camera2Client::commandSetDisplayOrientationL(int degrees) { 1221 int transform = Parameters::degToTransform(degrees, 1222 mCameraFacing == CAMERA_FACING_FRONT); 1223 if (transform == -1) { 1224 ALOGE("%s: Camera %d: Error setting %d as display orientation value", 1225 __FUNCTION__, mCameraId, degrees); 1226 return BAD_VALUE; 1227 } 1228 SharedParameters::Lock l(mParameters); 1229 if (transform != l.mParameters.previewTransform && 1230 mPreviewStreamId != NO_STREAM) { 1231 mDevice->setStreamTransform(mPreviewStreamId, transform); 1232 } 1233 l.mParameters.previewTransform = transform; 1234 return OK; 1235} 1236 1237status_t Camera2Client::commandEnableShutterSoundL(bool enable) { 1238 SharedParameters::Lock l(mParameters); 1239 if (enable) { 1240 l.mParameters.playShutterSound = true; 1241 return OK; 1242 } 1243 1244 // Disabling shutter sound may not be allowed. In that case only 1245 // allow the mediaserver process to disable the sound. 1246 char value[PROPERTY_VALUE_MAX]; 1247 property_get("ro.camera.sound.forced", value, "0"); 1248 if (strncmp(value, "0", 2) != 0) { 1249 // Disabling shutter sound is not allowed. Deny if the current 1250 // process is not mediaserver. 1251 if (getCallingPid() != getpid()) { 1252 ALOGE("Failed to disable shutter sound. Permission denied (pid %d)", 1253 getCallingPid()); 1254 return PERMISSION_DENIED; 1255 } 1256 } 1257 1258 l.mParameters.playShutterSound = false; 1259 return OK; 1260} 1261 1262status_t Camera2Client::commandPlayRecordingSoundL() { 1263 mCameraService->playSound(CameraService::SOUND_RECORDING); 1264 return OK; 1265} 1266 1267status_t Camera2Client::commandStartFaceDetectionL(int type) { 1268 ALOGV("%s: Camera %d: Starting face detection", 1269 __FUNCTION__, mCameraId); 1270 status_t res; 1271 SharedParameters::Lock l(mParameters); 1272 switch (l.mParameters.state) { 1273 case Parameters::DISCONNECTED: 1274 case Parameters::STOPPED: 1275 case Parameters::WAITING_FOR_PREVIEW_WINDOW: 1276 case Parameters::STILL_CAPTURE: 1277 ALOGE("%s: Camera %d: Cannot start face detection without preview active", 1278 __FUNCTION__, mCameraId); 1279 return INVALID_OPERATION; 1280 case Parameters::PREVIEW: 1281 case Parameters::RECORD: 1282 case Parameters::VIDEO_SNAPSHOT: 1283 // Good to go for starting face detect 1284 break; 1285 } 1286 // Ignoring type 1287 if (l.mParameters.fastInfo.bestFaceDetectMode == 1288 ANDROID_STATS_FACE_DETECTION_OFF) { 1289 ALOGE("%s: Camera %d: Face detection not supported", 1290 __FUNCTION__, mCameraId); 1291 return INVALID_OPERATION; 1292 } 1293 if (l.mParameters.enableFaceDetect) return OK; 1294 1295 l.mParameters.enableFaceDetect = true; 1296 1297 res = updateRequests(l.mParameters); 1298 1299 return res; 1300} 1301 1302status_t Camera2Client::commandStopFaceDetectionL(Parameters ¶ms) { 1303 status_t res = OK; 1304 ALOGV("%s: Camera %d: Stopping face detection", 1305 __FUNCTION__, mCameraId); 1306 1307 if (!params.enableFaceDetect) return OK; 1308 1309 params.enableFaceDetect = false; 1310 1311 if (params.state == Parameters::PREVIEW 1312 || params.state == Parameters::RECORD 1313 || params.state == Parameters::VIDEO_SNAPSHOT) { 1314 res = updateRequests(params); 1315 } 1316 1317 return res; 1318} 1319 1320status_t Camera2Client::commandEnableFocusMoveMsgL(bool enable) { 1321 SharedParameters::Lock l(mParameters); 1322 l.mParameters.enableFocusMoveMessages = enable; 1323 1324 return OK; 1325} 1326 1327status_t Camera2Client::commandPingL() { 1328 // Always ping back if access is proper and device is alive 1329 SharedParameters::Lock l(mParameters); 1330 if (l.mParameters.state != Parameters::DISCONNECTED) { 1331 return OK; 1332 } else { 1333 return NO_INIT; 1334 } 1335} 1336 1337status_t Camera2Client::commandSetVideoBufferCountL(size_t count) { 1338 if (recordingEnabledL()) { 1339 ALOGE("%s: Camera %d: Error setting video buffer count after " 1340 "recording was started", __FUNCTION__, mCameraId); 1341 return INVALID_OPERATION; 1342 } 1343 1344 // 32 is the current upper limit on the video buffer count for BufferQueue 1345 if (count > 32) { 1346 ALOGE("%s: Camera %d: Error setting %d as video buffer count value", 1347 __FUNCTION__, mCameraId, count); 1348 return BAD_VALUE; 1349 } 1350 1351 // Need to reallocate memory for heap 1352 if (mRecordingHeapCount != count) { 1353 if (mRecordingHeap != 0) { 1354 mRecordingHeap.clear(); 1355 mRecordingHeap = NULL; 1356 } 1357 mRecordingHeapCount = count; 1358 } 1359 1360 return OK; 1361} 1362 1363/** Device-related methods */ 1364 1365void Camera2Client::notifyError(int errorCode, int arg1, int arg2) { 1366 ALOGE("Error condition %d reported by HAL, arguments %d, %d", errorCode, arg1, arg2); 1367} 1368 1369void Camera2Client::notifyShutter(int frameNumber, nsecs_t timestamp) { 1370 ALOGV("%s: Shutter notification for frame %d at time %lld", __FUNCTION__, 1371 frameNumber, timestamp); 1372} 1373 1374void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { 1375 ALOGV("%s: Autofocus state now %d, last trigger %d", 1376 __FUNCTION__, newState, triggerId); 1377 bool sendCompletedMessage = false; 1378 bool sendMovingMessage = false; 1379 1380 bool success = false; 1381 bool afInMotion = false; 1382 { 1383 SharedParameters::Lock l(mParameters); 1384 switch (l.mParameters.focusMode) { 1385 case Parameters::FOCUS_MODE_AUTO: 1386 case Parameters::FOCUS_MODE_MACRO: 1387 // Don't send notifications upstream if they're not for the current AF 1388 // trigger. For example, if cancel was called in between, or if we 1389 // already sent a notification about this AF call. 1390 if (triggerId != l.mParameters.currentAfTriggerId) break; 1391 switch (newState) { 1392 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: 1393 success = true; 1394 // no break 1395 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: 1396 sendCompletedMessage = true; 1397 l.mParameters.currentAfTriggerId = -1; 1398 break; 1399 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: 1400 // Just starting focusing, ignore 1401 break; 1402 case ANDROID_CONTROL_AF_STATE_INACTIVE: 1403 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: 1404 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: 1405 default: 1406 // Unexpected in AUTO/MACRO mode 1407 ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d", 1408 __FUNCTION__, newState); 1409 break; 1410 } 1411 break; 1412 case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO: 1413 case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE: 1414 switch (newState) { 1415 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: 1416 success = true; 1417 // no break 1418 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: 1419 // Don't send notifications upstream if they're not for 1420 // the current AF trigger. For example, if cancel was 1421 // called in between, or if we already sent a 1422 // notification about this AF call. 1423 // Send both a 'AF done' callback and a 'AF move' callback 1424 if (triggerId != l.mParameters.currentAfTriggerId) break; 1425 sendCompletedMessage = true; 1426 afInMotion = false; 1427 if (l.mParameters.enableFocusMoveMessages && 1428 l.mParameters.afInMotion) { 1429 sendMovingMessage = true; 1430 } 1431 l.mParameters.currentAfTriggerId = -1; 1432 break; 1433 case ANDROID_CONTROL_AF_STATE_INACTIVE: 1434 // Cancel was called, or we switched state; care if 1435 // currently moving 1436 afInMotion = false; 1437 if (l.mParameters.enableFocusMoveMessages && 1438 l.mParameters.afInMotion) { 1439 sendMovingMessage = true; 1440 } 1441 break; 1442 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: 1443 // Start passive scan, inform upstream 1444 afInMotion = true; 1445 // no break 1446 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: 1447 // Stop passive scan, inform upstream 1448 if (l.mParameters.enableFocusMoveMessages) { 1449 sendMovingMessage = true; 1450 } 1451 break; 1452 } 1453 l.mParameters.afInMotion = afInMotion; 1454 break; 1455 case Parameters::FOCUS_MODE_EDOF: 1456 case Parameters::FOCUS_MODE_INFINITY: 1457 case Parameters::FOCUS_MODE_FIXED: 1458 default: 1459 if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) { 1460 ALOGE("%s: Unexpected AF state change %d (ID %d) in focus mode %d", 1461 __FUNCTION__, newState, triggerId, l.mParameters.focusMode); 1462 } 1463 } 1464 } 1465 if (sendMovingMessage) { 1466 Mutex::Autolock iccl(mICameraClientLock); 1467 if (mCameraClient != 0) { 1468 mCameraClient->notifyCallback(CAMERA_MSG_FOCUS_MOVE, 1469 afInMotion ? 1 : 0, 0); 1470 } 1471 } 1472 if (sendCompletedMessage) { 1473 Mutex::Autolock iccl(mICameraClientLock); 1474 if (mCameraClient != 0) { 1475 mCameraClient->notifyCallback(CAMERA_MSG_FOCUS, success ? 1 : 0, 0); 1476 } 1477 } 1478} 1479 1480void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) { 1481 ALOGV("%s: Autoexposure state now %d, last trigger %d", 1482 __FUNCTION__, newState, triggerId); 1483} 1484 1485void Camera2Client::notifyAutoWhitebalance(uint8_t newState, int triggerId) { 1486 ALOGV("%s: Auto-whitebalance state now %d, last trigger %d", 1487 __FUNCTION__, newState, triggerId); 1488} 1489 1490Camera2Client::FrameProcessor::FrameProcessor(wp<Camera2Client> client): 1491 Thread(false), mClient(client) { 1492} 1493 1494Camera2Client::FrameProcessor::~FrameProcessor() { 1495 ALOGV("%s: Exit", __FUNCTION__); 1496} 1497 1498void Camera2Client::FrameProcessor::dump(int fd, const Vector<String16>& args) { 1499 String8 result(" Latest received frame:\n"); 1500 write(fd, result.string(), result.size()); 1501 mLastFrame.dump(fd, 2, 6); 1502} 1503 1504bool Camera2Client::FrameProcessor::threadLoop() { 1505 status_t res; 1506 1507 sp<Camera2Device> device; 1508 { 1509 sp<Camera2Client> client = mClient.promote(); 1510 if (client == 0) return false; 1511 device = client->mDevice; 1512 } 1513 1514 res = device->waitForNextFrame(kWaitDuration); 1515 if (res == OK) { 1516 sp<Camera2Client> client = mClient.promote(); 1517 if (client == 0) return false; 1518 processNewFrames(client); 1519 } else if (res != TIMED_OUT) { 1520 ALOGE("Camera2Client::FrameProcessor: Error waiting for new " 1521 "frames: %s (%d)", strerror(-res), res); 1522 } 1523 1524 return true; 1525} 1526 1527void Camera2Client::FrameProcessor::processNewFrames(sp<Camera2Client> &client) { 1528 status_t res; 1529 CameraMetadata frame; 1530 while ( (res = client->mDevice->getNextFrame(&frame)) == OK) { 1531 camera_metadata_entry_t entry; 1532 entry = frame.find(ANDROID_REQUEST_FRAME_COUNT); 1533 if (entry.count == 0) { 1534 ALOGE("%s: Camera %d: Error reading frame number: %s (%d)", 1535 __FUNCTION__, client->mCameraId, strerror(-res), res); 1536 break; 1537 } 1538 1539 res = processFaceDetect(frame, client); 1540 if (res != OK) break; 1541 1542 mLastFrame.acquire(frame); 1543 } 1544 if (res != NOT_ENOUGH_DATA) { 1545 ALOGE("%s: Camera %d: Error getting next frame: %s (%d)", 1546 __FUNCTION__, client->mCameraId, strerror(-res), res); 1547 return; 1548 } 1549 1550 return; 1551} 1552 1553status_t Camera2Client::FrameProcessor::processFaceDetect( 1554 const CameraMetadata &frame, sp<Camera2Client> &client) { 1555 status_t res; 1556 camera_metadata_ro_entry_t entry; 1557 bool enableFaceDetect; 1558 int maxFaces; 1559 { 1560 SharedParameters::Lock l(client->mParameters); 1561 enableFaceDetect = l.mParameters.enableFaceDetect; 1562 } 1563 entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE); 1564 1565 // TODO: This should be an error once implementations are compliant 1566 if (entry.count == 0) { 1567 return OK; 1568 } 1569 1570 uint8_t faceDetectMode = entry.data.u8[0]; 1571 1572 camera_frame_metadata metadata; 1573 Vector<camera_face_t> faces; 1574 metadata.number_of_faces = 0; 1575 1576 if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) { 1577 SharedParameters::Lock l(client->mParameters); 1578 entry = frame.find(ANDROID_STATS_FACE_RECTANGLES); 1579 if (entry.count == 0) { 1580 ALOGE("%s: Camera %d: Unable to read face rectangles", 1581 __FUNCTION__, client->mCameraId); 1582 return res; 1583 } 1584 metadata.number_of_faces = entry.count / 4; 1585 if (metadata.number_of_faces > 1586 l.mParameters.fastInfo.maxFaces) { 1587 ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)", 1588 __FUNCTION__, client->mCameraId, 1589 metadata.number_of_faces, l.mParameters.fastInfo.maxFaces); 1590 return res; 1591 } 1592 const int32_t *faceRects = entry.data.i32; 1593 1594 entry = frame.find(ANDROID_STATS_FACE_SCORES); 1595 if (entry.count == 0) { 1596 ALOGE("%s: Camera %d: Unable to read face scores", 1597 __FUNCTION__, client->mCameraId); 1598 return res; 1599 } 1600 const uint8_t *faceScores = entry.data.u8; 1601 1602 const int32_t *faceLandmarks = NULL; 1603 const int32_t *faceIds = NULL; 1604 1605 if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) { 1606 entry = frame.find(ANDROID_STATS_FACE_LANDMARKS); 1607 if (entry.count == 0) { 1608 ALOGE("%s: Camera %d: Unable to read face landmarks", 1609 __FUNCTION__, client->mCameraId); 1610 return res; 1611 } 1612 faceLandmarks = entry.data.i32; 1613 1614 entry = frame.find(ANDROID_STATS_FACE_IDS); 1615 1616 if (entry.count == 0) { 1617 ALOGE("%s: Camera %d: Unable to read face IDs", 1618 __FUNCTION__, client->mCameraId); 1619 return res; 1620 } 1621 faceIds = entry.data.i32; 1622 } 1623 1624 faces.setCapacity(metadata.number_of_faces); 1625 1626 for (int i = 0; i < metadata.number_of_faces; i++) { 1627 camera_face_t face; 1628 1629 face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]); 1630 face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]); 1631 face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]); 1632 face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]); 1633 1634 face.score = faceScores[i]; 1635 if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) { 1636 face.id = faceIds[i]; 1637 face.left_eye[0] = 1638 l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]); 1639 face.left_eye[1] = 1640 l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]); 1641 face.right_eye[0] = 1642 l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]); 1643 face.right_eye[1] = 1644 l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]); 1645 face.mouth[0] = 1646 l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]); 1647 face.mouth[1] = 1648 l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]); 1649 } else { 1650 face.id = 0; 1651 face.left_eye[0] = face.left_eye[1] = -2000; 1652 face.right_eye[0] = face.right_eye[1] = -2000; 1653 face.mouth[0] = face.mouth[1] = -2000; 1654 } 1655 faces.push_back(face); 1656 } 1657 1658 metadata.faces = faces.editArray(); 1659 } 1660 1661 if (metadata.number_of_faces != 0) { 1662 Mutex::Autolock iccl(client->mICameraClientLock); 1663 if (client->mCameraClient != NULL) { 1664 client->mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA, 1665 NULL, &metadata); 1666 } 1667 } 1668 return OK; 1669} 1670 1671void Camera2Client::onCallbackAvailable() { 1672 ATRACE_CALL(); 1673 status_t res; 1674 ALOGV("%s: Camera %d: Preview callback available", __FUNCTION__, mCameraId); 1675 1676 int callbackHeapId; 1677 sp<Camera2Heap> callbackHeap; 1678 size_t heapIdx; 1679 1680 CpuConsumer::LockedBuffer imgBuffer; 1681 ALOGV("%s: Getting buffer", __FUNCTION__); 1682 res = mCallbackConsumer->lockNextBuffer(&imgBuffer); 1683 if (res != OK) { 1684 ALOGE("%s: Camera %d: Error receiving next callback buffer: " 1685 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 1686 return; 1687 } 1688 1689 { 1690 SharedParameters::Lock l(mParameters); 1691 1692 if ( l.mParameters.state != Parameters::PREVIEW 1693 && l.mParameters.state != Parameters::RECORD 1694 && l.mParameters.state != Parameters::VIDEO_SNAPSHOT) { 1695 ALOGV("%s: Camera %d: No longer streaming", 1696 __FUNCTION__, mCameraId); 1697 mCallbackConsumer->unlockBuffer(imgBuffer); 1698 return; 1699 } 1700 1701 if (! (l.mParameters.previewCallbackFlags & 1702 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) ) { 1703 ALOGV("%s: No longer enabled, dropping", __FUNCTION__); 1704 mCallbackConsumer->unlockBuffer(imgBuffer); 1705 return; 1706 } 1707 if ((l.mParameters.previewCallbackFlags & 1708 CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) && 1709 !l.mParameters.previewCallbackOneShot) { 1710 ALOGV("%s: One shot mode, already sent, dropping", __FUNCTION__); 1711 mCallbackConsumer->unlockBuffer(imgBuffer); 1712 return; 1713 } 1714 1715 if (imgBuffer.format != l.mParameters.previewFormat) { 1716 ALOGE("%s: Camera %d: Unexpected format for callback: " 1717 "%x, expected %x", __FUNCTION__, mCameraId, 1718 imgBuffer.format, l.mParameters.previewFormat); 1719 mCallbackConsumer->unlockBuffer(imgBuffer); 1720 return; 1721 } 1722 1723 size_t bufferSize = calculateBufferSize(imgBuffer.width, imgBuffer.height, 1724 imgBuffer.format, imgBuffer.stride); 1725 size_t currentBufferSize = (mCallbackHeap == 0) ? 1726 0 : (mCallbackHeap->mHeap->getSize() / kCallbackHeapCount); 1727 if (bufferSize != currentBufferSize) { 1728 mCallbackHeap.clear(); 1729 mCallbackHeap = new Camera2Heap(bufferSize, kCallbackHeapCount, 1730 "Camera2Client::CallbackHeap"); 1731 if (mCallbackHeap->mHeap->getSize() == 0) { 1732 ALOGE("%s: Camera %d: Unable to allocate memory for callbacks", 1733 __FUNCTION__, mCameraId); 1734 mCallbackConsumer->unlockBuffer(imgBuffer); 1735 return; 1736 } 1737 1738 mCallbackHeapHead = 0; 1739 mCallbackHeapFree = kCallbackHeapCount; 1740 mCallbackHeapId++; 1741 } 1742 1743 if (mCallbackHeapFree == 0) { 1744 ALOGE("%s: Camera %d: No free callback buffers, dropping frame", 1745 __FUNCTION__, mCameraId); 1746 mCallbackConsumer->unlockBuffer(imgBuffer); 1747 return; 1748 } 1749 heapIdx = mCallbackHeapHead; 1750 callbackHeap = mCallbackHeap; 1751 callbackHeapId = mCallbackHeapId; 1752 1753 mCallbackHeapHead = (mCallbackHeapHead + 1) & kCallbackHeapCount; 1754 mCallbackHeapFree--; 1755 1756 // TODO: Get rid of this memcpy by passing the gralloc queue all the way 1757 // to app 1758 1759 ssize_t offset; 1760 size_t size; 1761 sp<IMemoryHeap> heap = 1762 mCallbackHeap->mBuffers[heapIdx]->getMemory(&offset, 1763 &size); 1764 uint8_t *data = (uint8_t*)heap->getBase() + offset; 1765 memcpy(data, imgBuffer.data, bufferSize); 1766 1767 ALOGV("%s: Freeing buffer", __FUNCTION__); 1768 mCallbackConsumer->unlockBuffer(imgBuffer); 1769 1770 // In one-shot mode, stop sending callbacks after the first one 1771 if (l.mParameters.previewCallbackFlags & 1772 CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) { 1773 ALOGV("%s: clearing oneshot", __FUNCTION__); 1774 l.mParameters.previewCallbackOneShot = false; 1775 } 1776 } 1777 1778 // Call outside parameter lock to allow re-entrancy from notification 1779 { 1780 Mutex::Autolock iccl(mICameraClientLock); 1781 if (mCameraClient != 0) { 1782 ALOGV("%s: Camera %d: Invoking client data callback", 1783 __FUNCTION__, mCameraId); 1784 mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_FRAME, 1785 callbackHeap->mBuffers[heapIdx], NULL); 1786 } 1787 } 1788 1789 SharedParameters::Lock l(mParameters); 1790 // Only increment free if we're still using the same heap 1791 if (mCallbackHeapId == callbackHeapId) { 1792 mCallbackHeapFree++; 1793 } 1794 1795 ALOGV("%s: exit", __FUNCTION__); 1796} 1797 1798void Camera2Client::onCaptureAvailable() { 1799 ATRACE_CALL(); 1800 status_t res; 1801 sp<Camera2Heap> captureHeap; 1802 ALOGV("%s: Camera %d: Still capture available", __FUNCTION__, mCameraId); 1803 1804 { 1805 SharedParameters::Lock l(mParameters); 1806 CpuConsumer::LockedBuffer imgBuffer; 1807 1808 res = mCaptureConsumer->lockNextBuffer(&imgBuffer); 1809 if (res != OK) { 1810 ALOGE("%s: Camera %d: Error receiving still image buffer: %s (%d)", 1811 __FUNCTION__, mCameraId, strerror(-res), res); 1812 return; 1813 } 1814 1815 // TODO: Signal errors here upstream 1816 if (l.mParameters.state != Parameters::STILL_CAPTURE && 1817 l.mParameters.state != Parameters::VIDEO_SNAPSHOT) { 1818 ALOGE("%s: Camera %d: Still image produced unexpectedly!", 1819 __FUNCTION__, mCameraId); 1820 mCaptureConsumer->unlockBuffer(imgBuffer); 1821 return; 1822 } 1823 1824 if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) { 1825 ALOGE("%s: Camera %d: Unexpected format for still image: " 1826 "%x, expected %x", __FUNCTION__, mCameraId, 1827 imgBuffer.format, 1828 HAL_PIXEL_FORMAT_BLOB); 1829 mCaptureConsumer->unlockBuffer(imgBuffer); 1830 return; 1831 } 1832 1833 // TODO: Optimize this to avoid memcopy 1834 void* captureMemory = mCaptureHeap->mHeap->getBase(); 1835 size_t size = mCaptureHeap->mHeap->getSize(); 1836 memcpy(captureMemory, imgBuffer.data, size); 1837 1838 mCaptureConsumer->unlockBuffer(imgBuffer); 1839 1840 switch (l.mParameters.state) { 1841 case Parameters::STILL_CAPTURE: 1842 l.mParameters.state = Parameters::STOPPED; 1843 break; 1844 case Parameters::VIDEO_SNAPSHOT: 1845 l.mParameters.state = Parameters::RECORD; 1846 break; 1847 default: 1848 ALOGE("%s: Camera %d: Unexpected state %d", __FUNCTION__, 1849 mCameraId, l.mParameters.state); 1850 break; 1851 } 1852 1853 captureHeap = mCaptureHeap; 1854 } 1855 // Call outside parameter locks to allow re-entrancy from notification 1856 Mutex::Autolock iccl(mICameraClientLock); 1857 if (mCameraClient != 0) { 1858 mCameraClient->dataCallback(CAMERA_MSG_COMPRESSED_IMAGE, 1859 captureHeap->mBuffers[0], NULL); 1860 } 1861} 1862 1863void Camera2Client::onRecordingFrameAvailable() { 1864 ATRACE_CALL(); 1865 status_t res; 1866 sp<Camera2Heap> recordingHeap; 1867 size_t heapIdx = 0; 1868 nsecs_t timestamp; 1869 { 1870 SharedParameters::Lock l(mParameters); 1871 1872 BufferItemConsumer::BufferItem imgBuffer; 1873 res = mRecordingConsumer->acquireBuffer(&imgBuffer); 1874 if (res != OK) { 1875 ALOGE("%s: Camera %d: Error receiving recording buffer: %s (%d)", 1876 __FUNCTION__, mCameraId, strerror(-res), res); 1877 return; 1878 } 1879 timestamp = imgBuffer.mTimestamp; 1880 1881 mRecordingFrameCount++; 1882 ALOGV("OnRecordingFrame: Frame %d", mRecordingFrameCount); 1883 1884 // TODO: Signal errors here upstream 1885 if (l.mParameters.state != Parameters::RECORD && 1886 l.mParameters.state != Parameters::VIDEO_SNAPSHOT) { 1887 ALOGV("%s: Camera %d: Discarding recording image buffers received after " 1888 "recording done", 1889 __FUNCTION__, mCameraId); 1890 mRecordingConsumer->releaseBuffer(imgBuffer); 1891 return; 1892 } 1893 1894 if (mRecordingHeap == 0) { 1895 const size_t bufferSize = 4 + sizeof(buffer_handle_t); 1896 ALOGV("%s: Camera %d: Creating recording heap with %d buffers of " 1897 "size %d bytes", __FUNCTION__, mCameraId, 1898 mRecordingHeapCount, bufferSize); 1899 1900 mRecordingHeap = new Camera2Heap(bufferSize, mRecordingHeapCount, 1901 "Camera2Client::RecordingHeap"); 1902 if (mRecordingHeap->mHeap->getSize() == 0) { 1903 ALOGE("%s: Camera %d: Unable to allocate memory for recording", 1904 __FUNCTION__, mCameraId); 1905 mRecordingConsumer->releaseBuffer(imgBuffer); 1906 return; 1907 } 1908 for (size_t i = 0; i < mRecordingBuffers.size(); i++) { 1909 if (mRecordingBuffers[i].mBuf != 1910 BufferItemConsumer::INVALID_BUFFER_SLOT) { 1911 ALOGE("%s: Camera %d: Non-empty recording buffers list!", 1912 __FUNCTION__, mCameraId); 1913 } 1914 } 1915 mRecordingBuffers.clear(); 1916 mRecordingBuffers.setCapacity(mRecordingHeapCount); 1917 mRecordingBuffers.insertAt(0, mRecordingHeapCount); 1918 1919 mRecordingHeapHead = 0; 1920 mRecordingHeapFree = mRecordingHeapCount; 1921 } 1922 1923 if ( mRecordingHeapFree == 0) { 1924 ALOGE("%s: Camera %d: No free recording buffers, dropping frame", 1925 __FUNCTION__, mCameraId); 1926 mRecordingConsumer->releaseBuffer(imgBuffer); 1927 return; 1928 } 1929 1930 heapIdx = mRecordingHeapHead; 1931 mRecordingHeapHead = (mRecordingHeapHead + 1) % mRecordingHeapCount; 1932 mRecordingHeapFree--; 1933 1934 ALOGV("%s: Camera %d: Timestamp %lld", 1935 __FUNCTION__, mCameraId, timestamp); 1936 1937 ssize_t offset; 1938 size_t size; 1939 sp<IMemoryHeap> heap = 1940 mRecordingHeap->mBuffers[heapIdx]->getMemory(&offset, 1941 &size); 1942 1943 uint8_t *data = (uint8_t*)heap->getBase() + offset; 1944 uint32_t type = kMetadataBufferTypeGrallocSource; 1945 *((uint32_t*)data) = type; 1946 *((buffer_handle_t*)(data + 4)) = imgBuffer.mGraphicBuffer->handle; 1947 ALOGV("%s: Camera %d: Sending out buffer_handle_t %p", 1948 __FUNCTION__, mCameraId, imgBuffer.mGraphicBuffer->handle); 1949 mRecordingBuffers.replaceAt(imgBuffer, heapIdx); 1950 recordingHeap = mRecordingHeap; 1951 } 1952 1953 // Call outside locked parameters to allow re-entrancy from notification 1954 Mutex::Autolock iccl(mICameraClientLock); 1955 if (mCameraClient != 0) { 1956 mCameraClient->dataCallbackTimestamp(timestamp, 1957 CAMERA_MSG_VIDEO_FRAME, 1958 recordingHeap->mBuffers[heapIdx]); 1959 } 1960} 1961 1962/** Utility methods */ 1963 1964status_t Camera2Client::updateRequests(const Parameters ¶ms) { 1965 status_t res; 1966 1967 res = updatePreviewRequest(params); 1968 if (res != OK) { 1969 ALOGE("%s: Camera %d: Unable to update preview request: %s (%d)", 1970 __FUNCTION__, mCameraId, strerror(-res), res); 1971 return res; 1972 } 1973 res = updateCaptureRequest(params); 1974 if (res != OK) { 1975 ALOGE("%s: Camera %d: Unable to update capture request: %s (%d)", 1976 __FUNCTION__, mCameraId, strerror(-res), res); 1977 return res; 1978 } 1979 1980 res = updateRecordingRequest(params); 1981 if (res != OK) { 1982 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)", 1983 __FUNCTION__, mCameraId, strerror(-res), res); 1984 return res; 1985 } 1986 1987 if (params.state == Parameters::PREVIEW) { 1988 res = mDevice->setStreamingRequest(mPreviewRequest); 1989 if (res != OK) { 1990 ALOGE("%s: Camera %d: Error streaming new preview request: %s (%d)", 1991 __FUNCTION__, mCameraId, strerror(-res), res); 1992 return res; 1993 } 1994 } else if (params.state == Parameters::RECORD || 1995 params.state == Parameters::VIDEO_SNAPSHOT) { 1996 res = mDevice->setStreamingRequest(mRecordingRequest); 1997 if (res != OK) { 1998 ALOGE("%s: Camera %d: Error streaming new record request: %s (%d)", 1999 __FUNCTION__, mCameraId, strerror(-res), res); 2000 return res; 2001 } 2002 } 2003 return res; 2004} 2005 2006status_t Camera2Client::updatePreviewStream(const Parameters ¶ms) { 2007 ATRACE_CALL(); 2008 status_t res; 2009 2010 if (mPreviewStreamId != NO_STREAM) { 2011 // Check if stream parameters have to change 2012 uint32_t currentWidth, currentHeight; 2013 res = mDevice->getStreamInfo(mPreviewStreamId, 2014 ¤tWidth, ¤tHeight, 0); 2015 if (res != OK) { 2016 ALOGE("%s: Camera %d: Error querying preview stream info: " 2017 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 2018 return res; 2019 } 2020 if (currentWidth != (uint32_t)params.previewWidth || 2021 currentHeight != (uint32_t)params.previewHeight) { 2022 ALOGV("%s: Camera %d: Preview size switch: %d x %d -> %d x %d", 2023 __FUNCTION__, mCameraId, currentWidth, currentHeight, 2024 params.previewWidth, params.previewHeight); 2025 res = mDevice->waitUntilDrained(); 2026 if (res != OK) { 2027 ALOGE("%s: Camera %d: Error waiting for preview to drain: " 2028 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 2029 return res; 2030 } 2031 res = mDevice->deleteStream(mPreviewStreamId); 2032 if (res != OK) { 2033 ALOGE("%s: Camera %d: Unable to delete old output stream " 2034 "for preview: %s (%d)", __FUNCTION__, mCameraId, 2035 strerror(-res), res); 2036 return res; 2037 } 2038 mPreviewStreamId = NO_STREAM; 2039 } 2040 } 2041 2042 if (mPreviewStreamId == NO_STREAM) { 2043 res = mDevice->createStream(mPreviewWindow, 2044 params.previewWidth, params.previewHeight, 2045 CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, 0, 2046 &mPreviewStreamId); 2047 if (res != OK) { 2048 ALOGE("%s: Camera %d: Unable to create preview stream: %s (%d)", 2049 __FUNCTION__, mCameraId, strerror(-res), res); 2050 return res; 2051 } 2052 } 2053 2054 res = mDevice->setStreamTransform(mPreviewStreamId, 2055 params.previewTransform); 2056 if (res != OK) { 2057 ALOGE("%s: Camera %d: Unable to set preview stream transform: " 2058 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 2059 return res; 2060 } 2061 2062 return OK; 2063} 2064 2065status_t Camera2Client::updatePreviewRequest(const Parameters ¶ms) { 2066 ATRACE_CALL(); 2067 status_t res; 2068 if (mPreviewRequest.entryCount() == 0) { 2069 res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW, 2070 &mPreviewRequest); 2071 if (res != OK) { 2072 ALOGE("%s: Camera %d: Unable to create default preview request: " 2073 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 2074 return res; 2075 } 2076 } 2077 2078 res = updateRequestCommon(&mPreviewRequest, params); 2079 if (res != OK) { 2080 ALOGE("%s: Camera %d: Unable to update common entries of preview " 2081 "request: %s (%d)", __FUNCTION__, mCameraId, 2082 strerror(-res), res); 2083 return res; 2084 } 2085 2086 return OK; 2087} 2088 2089status_t Camera2Client::updateCallbackStream(const Parameters ¶ms) { 2090 status_t res; 2091 2092 if (mCallbackConsumer == 0) { 2093 // Create CPU buffer queue endpoint 2094 mCallbackConsumer = new CpuConsumer(kCallbackHeapCount); 2095 mCallbackWaiter = new CallbackWaiter(this); 2096 mCallbackConsumer->setFrameAvailableListener(mCallbackWaiter); 2097 mCallbackConsumer->setName(String8("Camera2Client::CallbackConsumer")); 2098 mCallbackWindow = new SurfaceTextureClient( 2099 mCallbackConsumer->getProducerInterface()); 2100 } 2101 2102 if (mCallbackStreamId != NO_STREAM) { 2103 // Check if stream parameters have to change 2104 uint32_t currentWidth, currentHeight, currentFormat; 2105 res = mDevice->getStreamInfo(mCallbackStreamId, 2106 ¤tWidth, ¤tHeight, ¤tFormat); 2107 if (res != OK) { 2108 ALOGE("%s: Camera %d: Error querying callback output stream info: " 2109 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 2110 return res; 2111 } 2112 if (currentWidth != (uint32_t)params.previewWidth || 2113 currentHeight != (uint32_t)params.previewHeight || 2114 currentFormat != (uint32_t)params.previewFormat) { 2115 // Since size should only change while preview is not running, 2116 // assuming that all existing use of old callback stream is 2117 // completed. 2118 res = mDevice->deleteStream(mCallbackStreamId); 2119 if (res != OK) { 2120 ALOGE("%s: Camera %d: Unable to delete old output stream " 2121 "for callbacks: %s (%d)", __FUNCTION__, mCameraId, 2122 strerror(-res), res); 2123 return res; 2124 } 2125 mCallbackStreamId = NO_STREAM; 2126 } 2127 } 2128 2129 if (mCallbackStreamId == NO_STREAM) { 2130 ALOGV("Creating callback stream: %d %d format 0x%x", 2131 params.previewWidth, params.previewHeight, 2132 params.previewFormat); 2133 res = mDevice->createStream(mCallbackWindow, 2134 params.previewWidth, params.previewHeight, 2135 params.previewFormat, 0, &mCallbackStreamId); 2136 if (res != OK) { 2137 ALOGE("%s: Camera %d: Can't create output stream for callbacks: " 2138 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 2139 return res; 2140 } 2141 } 2142 2143 return OK; 2144} 2145 2146 2147status_t Camera2Client::updateCaptureStream(const Parameters ¶ms) { 2148 ATRACE_CALL(); 2149 status_t res; 2150 // Find out buffer size for JPEG 2151 camera_metadata_ro_entry_t maxJpegSize = 2152 mParameters.staticInfo(ANDROID_JPEG_MAX_SIZE); 2153 if (maxJpegSize.count == 0) { 2154 ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!", 2155 __FUNCTION__, mCameraId); 2156 return INVALID_OPERATION; 2157 } 2158 2159 if (mCaptureConsumer == 0) { 2160 // Create CPU buffer queue endpoint 2161 mCaptureConsumer = new CpuConsumer(1); 2162 mCaptureConsumer->setFrameAvailableListener(new CaptureWaiter(this)); 2163 mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer")); 2164 mCaptureWindow = new SurfaceTextureClient( 2165 mCaptureConsumer->getProducerInterface()); 2166 // Create memory for API consumption 2167 mCaptureHeap = new Camera2Heap(maxJpegSize.data.i32[0], 1, 2168 "Camera2Client::CaptureHeap"); 2169 if (mCaptureHeap->mHeap->getSize() == 0) { 2170 ALOGE("%s: Camera %d: Unable to allocate memory for capture", 2171 __FUNCTION__, mCameraId); 2172 return NO_MEMORY; 2173 } 2174 } 2175 2176 if (mCaptureStreamId != NO_STREAM) { 2177 // Check if stream parameters have to change 2178 uint32_t currentWidth, currentHeight; 2179 res = mDevice->getStreamInfo(mCaptureStreamId, 2180 ¤tWidth, ¤tHeight, 0); 2181 if (res != OK) { 2182 ALOGE("%s: Camera %d: Error querying capture output stream info: " 2183 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 2184 return res; 2185 } 2186 if (currentWidth != (uint32_t)params.pictureWidth || 2187 currentHeight != (uint32_t)params.pictureHeight) { 2188 res = mDevice->deleteStream(mCaptureStreamId); 2189 if (res != OK) { 2190 ALOGE("%s: Camera %d: Unable to delete old output stream " 2191 "for capture: %s (%d)", __FUNCTION__, mCameraId, 2192 strerror(-res), res); 2193 return res; 2194 } 2195 mCaptureStreamId = NO_STREAM; 2196 } 2197 } 2198 2199 if (mCaptureStreamId == NO_STREAM) { 2200 // Create stream for HAL production 2201 res = mDevice->createStream(mCaptureWindow, 2202 params.pictureWidth, params.pictureHeight, 2203 HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0], 2204 &mCaptureStreamId); 2205 if (res != OK) { 2206 ALOGE("%s: Camera %d: Can't create output stream for capture: " 2207 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 2208 return res; 2209 } 2210 2211 } 2212 return OK; 2213} 2214 2215status_t Camera2Client::updateCaptureRequest(const Parameters ¶ms) { 2216 ATRACE_CALL(); 2217 status_t res; 2218 if (mCaptureRequest.entryCount() == 0) { 2219 res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_STILL_CAPTURE, 2220 &mCaptureRequest); 2221 if (res != OK) { 2222 ALOGE("%s: Camera %d: Unable to create default still image request:" 2223 " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 2224 return res; 2225 } 2226 } 2227 2228 res = updateRequestCommon(&mCaptureRequest, params); 2229 if (res != OK) { 2230 ALOGE("%s: Camera %d: Unable to update common entries of capture " 2231 "request: %s (%d)", __FUNCTION__, mCameraId, 2232 strerror(-res), res); 2233 return res; 2234 } 2235 2236 res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_SIZE, 2237 params.jpegThumbSize, 2); 2238 if (res != OK) return res; 2239 res = mCaptureRequest.update(ANDROID_JPEG_THUMBNAIL_QUALITY, 2240 ¶ms.jpegThumbQuality, 1); 2241 if (res != OK) return res; 2242 res = mCaptureRequest.update(ANDROID_JPEG_QUALITY, 2243 ¶ms.jpegQuality, 1); 2244 if (res != OK) return res; 2245 res = mCaptureRequest.update( 2246 ANDROID_JPEG_ORIENTATION, 2247 ¶ms.jpegRotation, 1); 2248 if (res != OK) return res; 2249 2250 if (params.gpsEnabled) { 2251 res = mCaptureRequest.update( 2252 ANDROID_JPEG_GPS_COORDINATES, 2253 params.gpsCoordinates, 3); 2254 if (res != OK) return res; 2255 res = mCaptureRequest.update( 2256 ANDROID_JPEG_GPS_TIMESTAMP, 2257 ¶ms.gpsTimestamp, 1); 2258 if (res != OK) return res; 2259 res = mCaptureRequest.update( 2260 ANDROID_JPEG_GPS_PROCESSING_METHOD, 2261 params.gpsProcessingMethod); 2262 if (res != OK) return res; 2263 } else { 2264 res = mCaptureRequest.erase(ANDROID_JPEG_GPS_COORDINATES); 2265 if (res != OK) return res; 2266 res = mCaptureRequest.erase(ANDROID_JPEG_GPS_TIMESTAMP); 2267 if (res != OK) return res; 2268 res = mCaptureRequest.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD); 2269 if (res != OK) return res; 2270 } 2271 2272 return OK; 2273} 2274 2275status_t Camera2Client::updateRecordingRequest(const Parameters ¶ms) { 2276 ATRACE_CALL(); 2277 status_t res; 2278 if (mRecordingRequest.entryCount() == 0) { 2279 res = mDevice->createDefaultRequest(CAMERA2_TEMPLATE_VIDEO_RECORD, 2280 &mRecordingRequest); 2281 if (res != OK) { 2282 ALOGE("%s: Camera %d: Unable to create default recording request:" 2283 " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 2284 return res; 2285 } 2286 } 2287 2288 res = updateRequestCommon(&mRecordingRequest, params); 2289 if (res != OK) { 2290 ALOGE("%s: Camera %d: Unable to update common entries of recording " 2291 "request: %s (%d)", __FUNCTION__, mCameraId, 2292 strerror(-res), res); 2293 return res; 2294 } 2295 2296 return OK; 2297} 2298 2299status_t Camera2Client::updateRecordingStream(const Parameters ¶ms) { 2300 status_t res; 2301 2302 if (mRecordingConsumer == 0) { 2303 // Create CPU buffer queue endpoint. We need one more buffer here so that we can 2304 // always acquire and free a buffer when the heap is full; otherwise the consumer 2305 // will have buffers in flight we'll never clear out. 2306 mRecordingConsumer = new BufferItemConsumer( 2307 GRALLOC_USAGE_HW_VIDEO_ENCODER, 2308 mRecordingHeapCount + 1, 2309 true); 2310 mRecordingConsumer->setFrameAvailableListener(new RecordingWaiter(this)); 2311 mRecordingConsumer->setName(String8("Camera2Client::RecordingConsumer")); 2312 mRecordingWindow = new SurfaceTextureClient( 2313 mRecordingConsumer->getProducerInterface()); 2314 // Allocate memory later, since we don't know buffer size until receipt 2315 } 2316 2317 if (mRecordingStreamId != NO_STREAM) { 2318 // Check if stream parameters have to change 2319 uint32_t currentWidth, currentHeight; 2320 res = mDevice->getStreamInfo(mRecordingStreamId, 2321 ¤tWidth, ¤tHeight, 0); 2322 if (res != OK) { 2323 ALOGE("%s: Camera %d: Error querying recording output stream info: " 2324 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 2325 return res; 2326 } 2327 if (currentWidth != (uint32_t)params.videoWidth || 2328 currentHeight != (uint32_t)params.videoHeight) { 2329 // TODO: Should wait to be sure previous recording has finished 2330 res = mDevice->deleteStream(mRecordingStreamId); 2331 if (res != OK) { 2332 ALOGE("%s: Camera %d: Unable to delete old output stream " 2333 "for recording: %s (%d)", __FUNCTION__, mCameraId, 2334 strerror(-res), res); 2335 return res; 2336 } 2337 mRecordingStreamId = NO_STREAM; 2338 } 2339 } 2340 2341 if (mRecordingStreamId == NO_STREAM) { 2342 mRecordingFrameCount = 0; 2343 res = mDevice->createStream(mRecordingWindow, 2344 params.videoWidth, params.videoHeight, 2345 CAMERA2_HAL_PIXEL_FORMAT_OPAQUE, 0, &mRecordingStreamId); 2346 if (res != OK) { 2347 ALOGE("%s: Camera %d: Can't create output stream for recording: " 2348 "%s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 2349 return res; 2350 } 2351 } 2352 2353 return OK; 2354} 2355 2356status_t Camera2Client::updateRequestCommon(CameraMetadata *request, 2357 const Parameters ¶ms) const { 2358 ATRACE_CALL(); 2359 status_t res; 2360 res = request->update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, 2361 params.previewFpsRange, 2); 2362 if (res != OK) return res; 2363 2364 uint8_t wbMode = params.autoWhiteBalanceLock ? 2365 (uint8_t)ANDROID_CONTROL_AWB_LOCKED : params.wbMode; 2366 res = request->update(ANDROID_CONTROL_AWB_MODE, 2367 &wbMode, 1); 2368 if (res != OK) return res; 2369 res = request->update(ANDROID_CONTROL_EFFECT_MODE, 2370 ¶ms.effectMode, 1); 2371 if (res != OK) return res; 2372 res = request->update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, 2373 ¶ms.antibandingMode, 1); 2374 if (res != OK) return res; 2375 2376 uint8_t controlMode = 2377 (params.sceneMode == ANDROID_CONTROL_SCENE_MODE_UNSUPPORTED) ? 2378 ANDROID_CONTROL_AUTO : ANDROID_CONTROL_USE_SCENE_MODE; 2379 res = request->update(ANDROID_CONTROL_MODE, 2380 &controlMode, 1); 2381 if (res != OK) return res; 2382 if (controlMode == ANDROID_CONTROL_USE_SCENE_MODE) { 2383 res = request->update(ANDROID_CONTROL_SCENE_MODE, 2384 ¶ms.sceneMode, 1); 2385 if (res != OK) return res; 2386 } 2387 2388 uint8_t flashMode = ANDROID_FLASH_OFF; 2389 uint8_t aeMode; 2390 switch (params.flashMode) { 2391 case Parameters::FLASH_MODE_OFF: 2392 aeMode = ANDROID_CONTROL_AE_ON; break; 2393 case Parameters::FLASH_MODE_AUTO: 2394 aeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH; break; 2395 case Parameters::FLASH_MODE_ON: 2396 aeMode = ANDROID_CONTROL_AE_ON_ALWAYS_FLASH; break; 2397 case Parameters::FLASH_MODE_TORCH: 2398 aeMode = ANDROID_CONTROL_AE_ON; 2399 flashMode = ANDROID_FLASH_TORCH; 2400 break; 2401 case Parameters::FLASH_MODE_RED_EYE: 2402 aeMode = ANDROID_CONTROL_AE_ON_AUTO_FLASH_REDEYE; break; 2403 default: 2404 ALOGE("%s: Camera %d: Unknown flash mode %d", __FUNCTION__, 2405 mCameraId, params.flashMode); 2406 return BAD_VALUE; 2407 } 2408 if (params.autoExposureLock) aeMode = ANDROID_CONTROL_AE_LOCKED; 2409 2410 res = request->update(ANDROID_FLASH_MODE, 2411 &flashMode, 1); 2412 if (res != OK) return res; 2413 res = request->update(ANDROID_CONTROL_AE_MODE, 2414 &aeMode, 1); 2415 if (res != OK) return res; 2416 2417 float focusDistance = 0; // infinity focus in diopters 2418 uint8_t focusMode; 2419 switch (params.focusMode) { 2420 case Parameters::FOCUS_MODE_AUTO: 2421 case Parameters::FOCUS_MODE_MACRO: 2422 case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO: 2423 case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE: 2424 case Parameters::FOCUS_MODE_EDOF: 2425 focusMode = params.focusMode; 2426 break; 2427 case Parameters::FOCUS_MODE_INFINITY: 2428 case Parameters::FOCUS_MODE_FIXED: 2429 focusMode = ANDROID_CONTROL_AF_OFF; 2430 break; 2431 default: 2432 ALOGE("%s: Camera %d: Unknown focus mode %d", __FUNCTION__, 2433 mCameraId, params.focusMode); 2434 return BAD_VALUE; 2435 } 2436 res = request->update(ANDROID_LENS_FOCUS_DISTANCE, 2437 &focusDistance, 1); 2438 if (res != OK) return res; 2439 res = request->update(ANDROID_CONTROL_AF_MODE, 2440 &focusMode, 1); 2441 if (res != OK) return res; 2442 2443 size_t focusingAreasSize = params.focusingAreas.size() * 5; 2444 int32_t *focusingAreas = new int32_t[focusingAreasSize]; 2445 for (size_t i = 0; i < focusingAreasSize; i += 5) { 2446 if (params.focusingAreas[i].weight != 0) { 2447 focusingAreas[i + 0] = 2448 params.normalizedXToArray(params.focusingAreas[i].left); 2449 focusingAreas[i + 1] = 2450 params.normalizedYToArray(params.focusingAreas[i].top); 2451 focusingAreas[i + 2] = 2452 params.normalizedXToArray(params.focusingAreas[i].right); 2453 focusingAreas[i + 3] = 2454 params.normalizedYToArray(params.focusingAreas[i].bottom); 2455 } else { 2456 focusingAreas[i + 0] = 0; 2457 focusingAreas[i + 1] = 0; 2458 focusingAreas[i + 2] = 0; 2459 focusingAreas[i + 3] = 0; 2460 } 2461 focusingAreas[i + 4] = params.focusingAreas[i].weight; 2462 } 2463 res = request->update(ANDROID_CONTROL_AF_REGIONS, 2464 focusingAreas,focusingAreasSize); 2465 if (res != OK) return res; 2466 delete[] focusingAreas; 2467 2468 res = request->update(ANDROID_CONTROL_AE_EXP_COMPENSATION, 2469 ¶ms.exposureCompensation, 1); 2470 if (res != OK) return res; 2471 2472 size_t meteringAreasSize = params.meteringAreas.size() * 5; 2473 int32_t *meteringAreas = new int32_t[meteringAreasSize]; 2474 for (size_t i = 0; i < meteringAreasSize; i += 5) { 2475 if (params.meteringAreas[i].weight != 0) { 2476 meteringAreas[i + 0] = 2477 params.normalizedXToArray(params.meteringAreas[i].left); 2478 meteringAreas[i + 1] = 2479 params.normalizedYToArray(params.meteringAreas[i].top); 2480 meteringAreas[i + 2] = 2481 params.normalizedXToArray(params.meteringAreas[i].right); 2482 meteringAreas[i + 3] = 2483 params.normalizedYToArray(params.meteringAreas[i].bottom); 2484 } else { 2485 meteringAreas[i + 0] = 0; 2486 meteringAreas[i + 1] = 0; 2487 meteringAreas[i + 2] = 0; 2488 meteringAreas[i + 3] = 0; 2489 } 2490 meteringAreas[i + 4] = params.meteringAreas[i].weight; 2491 } 2492 res = request->update(ANDROID_CONTROL_AE_REGIONS, 2493 meteringAreas, meteringAreasSize); 2494 if (res != OK) return res; 2495 2496 res = request->update(ANDROID_CONTROL_AWB_REGIONS, 2497 meteringAreas, meteringAreasSize); 2498 if (res != OK) return res; 2499 delete[] meteringAreas; 2500 2501 // Need to convert zoom index into a crop rectangle. The rectangle is 2502 // chosen to maximize its area on the sensor 2503 2504 camera_metadata_ro_entry_t maxDigitalZoom = 2505 mParameters.staticInfo(ANDROID_SCALER_AVAILABLE_MAX_ZOOM); 2506 float zoomIncrement = (maxDigitalZoom.data.f[0] - 1) / 2507 (params.NUM_ZOOM_STEPS-1); 2508 float zoomRatio = 1 + zoomIncrement * params.zoom; 2509 2510 float zoomLeft, zoomTop, zoomWidth, zoomHeight; 2511 if (params.previewWidth >= params.previewHeight) { 2512 zoomWidth = params.fastInfo.arrayWidth / zoomRatio; 2513 zoomHeight = zoomWidth * 2514 params.previewHeight / params.previewWidth; 2515 } else { 2516 zoomHeight = params.fastInfo.arrayHeight / zoomRatio; 2517 zoomWidth = zoomHeight * 2518 params.previewWidth / params.previewHeight; 2519 } 2520 zoomLeft = (params.fastInfo.arrayWidth - zoomWidth) / 2; 2521 zoomTop = (params.fastInfo.arrayHeight - zoomHeight) / 2; 2522 2523 int32_t cropRegion[3] = { zoomLeft, zoomTop, zoomWidth }; 2524 res = request->update(ANDROID_SCALER_CROP_REGION, 2525 cropRegion, 3); 2526 if (res != OK) return res; 2527 2528 // TODO: Decide how to map recordingHint, or whether just to ignore it 2529 2530 uint8_t vstabMode = params.videoStabilization ? 2531 ANDROID_CONTROL_VIDEO_STABILIZATION_ON : 2532 ANDROID_CONTROL_VIDEO_STABILIZATION_OFF; 2533 res = request->update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, 2534 &vstabMode, 1); 2535 if (res != OK) return res; 2536 2537 uint8_t faceDetectMode = params.enableFaceDetect ? 2538 params.fastInfo.bestFaceDetectMode : 2539 (uint8_t)ANDROID_STATS_FACE_DETECTION_OFF; 2540 res = request->update(ANDROID_STATS_FACE_DETECT_MODE, 2541 &faceDetectMode, 1); 2542 if (res != OK) return res; 2543 2544 return OK; 2545} 2546 2547size_t Camera2Client::calculateBufferSize(int width, int height, 2548 int format, int stride) { 2549 switch (format) { 2550 case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16 2551 return width * height * 2; 2552 case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21 2553 return width * height * 3 / 2; 2554 case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2 2555 return width * height * 2; 2556 case HAL_PIXEL_FORMAT_YV12: { // YV12 2557 size_t ySize = stride * height; 2558 size_t uvStride = (stride / 2 + 0xF) & ~0xF; 2559 size_t uvSize = uvStride * height / 2; 2560 return ySize + uvSize * 2; 2561 } 2562 case HAL_PIXEL_FORMAT_RGB_565: 2563 return width * height * 2; 2564 case HAL_PIXEL_FORMAT_RGBA_8888: 2565 return width * height * 4; 2566 case HAL_PIXEL_FORMAT_RAW_SENSOR: 2567 return width * height * 2; 2568 default: 2569 ALOGE("%s: Unknown preview format: %x", 2570 __FUNCTION__, format); 2571 return 0; 2572 } 2573} 2574 2575} // namespace android 2576