Camera2Client.cpp revision 95dd5ba5bf83716f2eed5fe72366c4212464d710
1/* 2 * Copyright (C) 2012 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#define LOG_TAG "Camera2Client" 18#define ATRACE_TAG ATRACE_TAG_CAMERA 19//#define LOG_NDEBUG 0 20 21#include <inttypes.h> 22#include <utils/Log.h> 23#include <utils/Trace.h> 24 25#include <cutils/properties.h> 26#include <gui/Surface.h> 27 28#include "api1/Camera2Client.h" 29 30#include "api1/client2/StreamingProcessor.h" 31#include "api1/client2/JpegProcessor.h" 32#include "api1/client2/CaptureSequencer.h" 33#include "api1/client2/CallbackProcessor.h" 34#include "api1/client2/ZslProcessor.h" 35#include "api1/client2/ZslProcessor3.h" 36 37#define ALOG1(...) ALOGD_IF(gLogLevel >= 1, __VA_ARGS__); 38#define ALOG2(...) ALOGD_IF(gLogLevel >= 2, __VA_ARGS__); 39 40namespace android { 41using namespace camera2; 42 43static int getCallingPid() { 44 return IPCThreadState::self()->getCallingPid(); 45} 46 47// Interface used by CameraService 48 49Camera2Client::Camera2Client(const sp<CameraService>& cameraService, 50 const sp<ICameraClient>& cameraClient, 51 const String16& clientPackageName, 52 int cameraId, 53 int cameraFacing, 54 int clientPid, 55 uid_t clientUid, 56 int servicePid, 57 int deviceVersion): 58 Camera2ClientBase(cameraService, cameraClient, clientPackageName, 59 cameraId, cameraFacing, clientPid, clientUid, servicePid), 60 mParameters(cameraId, cameraFacing), 61 mDeviceVersion(deviceVersion) 62{ 63 ATRACE_CALL(); 64 65 SharedParameters::Lock l(mParameters); 66 l.mParameters.state = Parameters::DISCONNECTED; 67} 68 69status_t Camera2Client::initialize(camera_module_t *module) 70{ 71 ATRACE_CALL(); 72 ALOGV("%s: Initializing client for camera %d", __FUNCTION__, mCameraId); 73 status_t res; 74 75 res = Camera2ClientBase::initialize(module); 76 if (res != OK) { 77 return res; 78 } 79 80 { 81 SharedParameters::Lock l(mParameters); 82 83 res = l.mParameters.initialize(&(mDevice->info())); 84 if (res != OK) { 85 ALOGE("%s: Camera %d: unable to build defaults: %s (%d)", 86 __FUNCTION__, mCameraId, strerror(-res), res); 87 return NO_INIT; 88 } 89 } 90 91 String8 threadName; 92 93 mStreamingProcessor = new StreamingProcessor(this); 94 threadName = String8::format("C2-%d-StreamProc", 95 mCameraId); 96 mStreamingProcessor->run(threadName.string()); 97 98 mFrameProcessor = new FrameProcessor(mDevice, this); 99 threadName = String8::format("C2-%d-FrameProc", 100 mCameraId); 101 mFrameProcessor->run(threadName.string()); 102 103 mCaptureSequencer = new CaptureSequencer(this); 104 threadName = String8::format("C2-%d-CaptureSeq", 105 mCameraId); 106 mCaptureSequencer->run(threadName.string()); 107 108 mJpegProcessor = new JpegProcessor(this, mCaptureSequencer); 109 threadName = String8::format("C2-%d-JpegProc", 110 mCameraId); 111 mJpegProcessor->run(threadName.string()); 112 113 switch (mDeviceVersion) { 114 case CAMERA_DEVICE_API_VERSION_2_0: { 115 sp<ZslProcessor> zslProc = 116 new ZslProcessor(this, mCaptureSequencer); 117 mZslProcessor = zslProc; 118 mZslProcessorThread = zslProc; 119 break; 120 } 121 case CAMERA_DEVICE_API_VERSION_3_0: 122 case CAMERA_DEVICE_API_VERSION_3_1: 123 case CAMERA_DEVICE_API_VERSION_3_2: { 124 sp<ZslProcessor3> zslProc = 125 new ZslProcessor3(this, mCaptureSequencer); 126 mZslProcessor = zslProc; 127 mZslProcessorThread = zslProc; 128 break; 129 } 130 default: 131 break; 132 } 133 threadName = String8::format("C2-%d-ZslProc", 134 mCameraId); 135 mZslProcessorThread->run(threadName.string()); 136 137 mCallbackProcessor = new CallbackProcessor(this); 138 threadName = String8::format("C2-%d-CallbkProc", 139 mCameraId); 140 mCallbackProcessor->run(threadName.string()); 141 142 if (gLogLevel >= 1) { 143 SharedParameters::Lock l(mParameters); 144 ALOGD("%s: Default parameters converted from camera %d:", __FUNCTION__, 145 mCameraId); 146 ALOGD("%s", l.mParameters.paramsFlattened.string()); 147 } 148 149 return OK; 150} 151 152Camera2Client::~Camera2Client() { 153 ATRACE_CALL(); 154 ALOGV("~Camera2Client"); 155 156 mDestructionStarted = true; 157 158 disconnect(); 159 160 ALOGI("Camera %d: Closed", mCameraId); 161} 162 163status_t Camera2Client::dump(int fd, const Vector<String16>& args) { 164 String8 result; 165 result.appendFormat("Client2[%d] (%p) Client: %s PID: %d, dump:\n", 166 mCameraId, 167 getRemoteCallback()->asBinder().get(), 168 String8(mClientPackageName).string(), 169 mClientPid); 170 result.append(" State: "); 171#define CASE_APPEND_ENUM(x) case x: result.append(#x "\n"); break; 172 173 const Parameters& p = mParameters.unsafeAccess(); 174 175 result.append(Parameters::getStateName(p.state)); 176 177 result.append("\n Current parameters:\n"); 178 result.appendFormat(" Preview size: %d x %d\n", 179 p.previewWidth, p.previewHeight); 180 result.appendFormat(" Preview FPS range: %d - %d\n", 181 p.previewFpsRange[0], p.previewFpsRange[1]); 182 result.appendFormat(" Preview HAL pixel format: 0x%x\n", 183 p.previewFormat); 184 result.appendFormat(" Preview transform: %x\n", 185 p.previewTransform); 186 result.appendFormat(" Picture size: %d x %d\n", 187 p.pictureWidth, p.pictureHeight); 188 result.appendFormat(" Jpeg thumbnail size: %d x %d\n", 189 p.jpegThumbSize[0], p.jpegThumbSize[1]); 190 result.appendFormat(" Jpeg quality: %d, thumbnail quality: %d\n", 191 p.jpegQuality, p.jpegThumbQuality); 192 result.appendFormat(" Jpeg rotation: %d\n", p.jpegRotation); 193 result.appendFormat(" GPS tags %s\n", 194 p.gpsEnabled ? "enabled" : "disabled"); 195 if (p.gpsEnabled) { 196 result.appendFormat(" GPS lat x long x alt: %f x %f x %f\n", 197 p.gpsCoordinates[0], p.gpsCoordinates[1], 198 p.gpsCoordinates[2]); 199 result.appendFormat(" GPS timestamp: %" PRId64 "\n", 200 p.gpsTimestamp); 201 result.appendFormat(" GPS processing method: %s\n", 202 p.gpsProcessingMethod.string()); 203 } 204 205 result.append(" White balance mode: "); 206 switch (p.wbMode) { 207 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_AUTO) 208 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_INCANDESCENT) 209 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_FLUORESCENT) 210 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT) 211 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_DAYLIGHT) 212 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT) 213 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_TWILIGHT) 214 CASE_APPEND_ENUM(ANDROID_CONTROL_AWB_MODE_SHADE) 215 default: result.append("UNKNOWN\n"); 216 } 217 218 result.append(" Effect mode: "); 219 switch (p.effectMode) { 220 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_OFF) 221 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_MONO) 222 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_NEGATIVE) 223 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SOLARIZE) 224 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_SEPIA) 225 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_POSTERIZE) 226 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD) 227 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD) 228 CASE_APPEND_ENUM(ANDROID_CONTROL_EFFECT_MODE_AQUA) 229 default: result.append("UNKNOWN\n"); 230 } 231 232 result.append(" Antibanding mode: "); 233 switch (p.antibandingMode) { 234 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO) 235 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF) 236 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ) 237 CASE_APPEND_ENUM(ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ) 238 default: result.append("UNKNOWN\n"); 239 } 240 241 result.append(" Scene mode: "); 242 switch (p.sceneMode) { 243 case ANDROID_CONTROL_SCENE_MODE_DISABLED: 244 result.append("AUTO\n"); break; 245 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_ACTION) 246 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PORTRAIT) 247 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_LANDSCAPE) 248 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT) 249 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT) 250 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_THEATRE) 251 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BEACH) 252 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SNOW) 253 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SUNSET) 254 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO) 255 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_FIREWORKS) 256 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_SPORTS) 257 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_PARTY) 258 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT) 259 CASE_APPEND_ENUM(ANDROID_CONTROL_SCENE_MODE_BARCODE) 260 default: result.append("UNKNOWN\n"); 261 } 262 263 result.append(" Flash mode: "); 264 switch (p.flashMode) { 265 CASE_APPEND_ENUM(Parameters::FLASH_MODE_OFF) 266 CASE_APPEND_ENUM(Parameters::FLASH_MODE_AUTO) 267 CASE_APPEND_ENUM(Parameters::FLASH_MODE_ON) 268 CASE_APPEND_ENUM(Parameters::FLASH_MODE_TORCH) 269 CASE_APPEND_ENUM(Parameters::FLASH_MODE_RED_EYE) 270 CASE_APPEND_ENUM(Parameters::FLASH_MODE_INVALID) 271 default: result.append("UNKNOWN\n"); 272 } 273 274 result.append(" Focus mode: "); 275 switch (p.focusMode) { 276 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_AUTO) 277 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_MACRO) 278 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) 279 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_CONTINUOUS_PICTURE) 280 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_EDOF) 281 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INFINITY) 282 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_FIXED) 283 CASE_APPEND_ENUM(Parameters::FOCUS_MODE_INVALID) 284 default: result.append("UNKNOWN\n"); 285 } 286 287 result.append(" Focus state: "); 288 switch (p.focusState) { 289 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_INACTIVE) 290 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN) 291 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED) 292 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED) 293 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN) 294 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED) 295 CASE_APPEND_ENUM(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) 296 default: result.append("UNKNOWN\n"); 297 } 298 299 result.append(" Focusing areas:\n"); 300 for (size_t i = 0; i < p.focusingAreas.size(); i++) { 301 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n", 302 p.focusingAreas[i].left, 303 p.focusingAreas[i].top, 304 p.focusingAreas[i].right, 305 p.focusingAreas[i].bottom, 306 p.focusingAreas[i].weight); 307 } 308 309 result.appendFormat(" Exposure compensation index: %d\n", 310 p.exposureCompensation); 311 312 result.appendFormat(" AE lock %s, AWB lock %s\n", 313 p.autoExposureLock ? "enabled" : "disabled", 314 p.autoWhiteBalanceLock ? "enabled" : "disabled" ); 315 316 result.appendFormat(" Metering areas:\n"); 317 for (size_t i = 0; i < p.meteringAreas.size(); i++) { 318 result.appendFormat(" [ (%d, %d, %d, %d), weight %d ]\n", 319 p.meteringAreas[i].left, 320 p.meteringAreas[i].top, 321 p.meteringAreas[i].right, 322 p.meteringAreas[i].bottom, 323 p.meteringAreas[i].weight); 324 } 325 326 result.appendFormat(" Zoom index: %d\n", p.zoom); 327 result.appendFormat(" Video size: %d x %d\n", p.videoWidth, 328 p.videoHeight); 329 330 result.appendFormat(" Recording hint is %s\n", 331 p.recordingHint ? "set" : "not set"); 332 333 result.appendFormat(" Video stabilization is %s\n", 334 p.videoStabilization ? "enabled" : "disabled"); 335 336 result.appendFormat(" Selected still capture FPS range: %d - %d\n", 337 p.fastInfo.bestStillCaptureFpsRange[0], 338 p.fastInfo.bestStillCaptureFpsRange[1]); 339 340 result.append(" Current streams:\n"); 341 result.appendFormat(" Preview stream ID: %d\n", 342 getPreviewStreamId()); 343 result.appendFormat(" Capture stream ID: %d\n", 344 getCaptureStreamId()); 345 result.appendFormat(" Recording stream ID: %d\n", 346 getRecordingStreamId()); 347 348 result.append(" Quirks for this camera:\n"); 349 bool haveQuirk = false; 350 if (p.quirks.triggerAfWithAuto) { 351 result.appendFormat(" triggerAfWithAuto\n"); 352 haveQuirk = true; 353 } 354 if (p.quirks.useZslFormat) { 355 result.appendFormat(" useZslFormat\n"); 356 haveQuirk = true; 357 } 358 if (p.quirks.meteringCropRegion) { 359 result.appendFormat(" meteringCropRegion\n"); 360 haveQuirk = true; 361 } 362 if (p.quirks.partialResults) { 363 result.appendFormat(" usePartialResult\n"); 364 haveQuirk = true; 365 } 366 if (!haveQuirk) { 367 result.appendFormat(" none\n"); 368 } 369 370 write(fd, result.string(), result.size()); 371 372 mStreamingProcessor->dump(fd, args); 373 374 mCaptureSequencer->dump(fd, args); 375 376 mFrameProcessor->dump(fd, args); 377 378 mZslProcessor->dump(fd, args); 379 380 return dumpDevice(fd, args); 381#undef CASE_APPEND_ENUM 382} 383 384// ICamera interface 385 386void Camera2Client::disconnect() { 387 ATRACE_CALL(); 388 Mutex::Autolock icl(mBinderSerializationLock); 389 390 // Allow both client and the media server to disconnect at all times 391 int callingPid = getCallingPid(); 392 if (callingPid != mClientPid && callingPid != mServicePid) return; 393 394 if (mDevice == 0) return; 395 396 ALOGV("Camera %d: Shutting down", mCameraId); 397 398 /** 399 * disconnect() cannot call any methods that might need to promote a 400 * wp<Camera2Client>, since disconnect can be called from the destructor, at 401 * which point all such promotions will fail. 402 */ 403 404 stopPreviewL(); 405 406 { 407 SharedParameters::Lock l(mParameters); 408 if (l.mParameters.state == Parameters::DISCONNECTED) return; 409 l.mParameters.state = Parameters::DISCONNECTED; 410 } 411 412 mStreamingProcessor->requestExit(); 413 mFrameProcessor->requestExit(); 414 mCaptureSequencer->requestExit(); 415 mJpegProcessor->requestExit(); 416 mZslProcessorThread->requestExit(); 417 mCallbackProcessor->requestExit(); 418 419 ALOGV("Camera %d: Waiting for threads", mCameraId); 420 421 mStreamingProcessor->join(); 422 mFrameProcessor->join(); 423 mCaptureSequencer->join(); 424 mJpegProcessor->join(); 425 mZslProcessorThread->join(); 426 mCallbackProcessor->join(); 427 428 ALOGV("Camera %d: Deleting streams", mCameraId); 429 430 mStreamingProcessor->deletePreviewStream(); 431 mStreamingProcessor->deleteRecordingStream(); 432 mJpegProcessor->deleteStream(); 433 mCallbackProcessor->deleteStream(); 434 mZslProcessor->deleteStream(); 435 436 ALOGV("Camera %d: Disconnecting device", mCameraId); 437 438 mDevice->disconnect(); 439 440 mDevice.clear(); 441 442 CameraService::Client::disconnect(); 443} 444 445status_t Camera2Client::connect(const sp<ICameraClient>& client) { 446 ATRACE_CALL(); 447 ALOGV("%s: E", __FUNCTION__); 448 Mutex::Autolock icl(mBinderSerializationLock); 449 450 if (mClientPid != 0 && getCallingPid() != mClientPid) { 451 ALOGE("%s: Camera %d: Connection attempt from pid %d; " 452 "current locked to pid %d", __FUNCTION__, 453 mCameraId, getCallingPid(), mClientPid); 454 return BAD_VALUE; 455 } 456 457 mClientPid = getCallingPid(); 458 459 mRemoteCallback = client; 460 mSharedCameraCallbacks = client; 461 462 return OK; 463} 464 465status_t Camera2Client::lock() { 466 ATRACE_CALL(); 467 ALOGV("%s: E", __FUNCTION__); 468 Mutex::Autolock icl(mBinderSerializationLock); 469 ALOGV("%s: Camera %d: Lock call from pid %d; current client pid %d", 470 __FUNCTION__, mCameraId, getCallingPid(), mClientPid); 471 472 if (mClientPid == 0) { 473 mClientPid = getCallingPid(); 474 return OK; 475 } 476 477 if (mClientPid != getCallingPid()) { 478 ALOGE("%s: Camera %d: Lock call from pid %d; currently locked to pid %d", 479 __FUNCTION__, mCameraId, getCallingPid(), mClientPid); 480 return EBUSY; 481 } 482 483 return OK; 484} 485 486status_t Camera2Client::unlock() { 487 ATRACE_CALL(); 488 ALOGV("%s: E", __FUNCTION__); 489 Mutex::Autolock icl(mBinderSerializationLock); 490 ALOGV("%s: Camera %d: Unlock call from pid %d; current client pid %d", 491 __FUNCTION__, mCameraId, getCallingPid(), mClientPid); 492 493 if (mClientPid == getCallingPid()) { 494 SharedParameters::Lock l(mParameters); 495 if (l.mParameters.state == Parameters::RECORD || 496 l.mParameters.state == Parameters::VIDEO_SNAPSHOT) { 497 ALOGD("Not allowed to unlock camera during recording."); 498 return INVALID_OPERATION; 499 } 500 mClientPid = 0; 501 mRemoteCallback.clear(); 502 mSharedCameraCallbacks.clear(); 503 return OK; 504 } 505 506 ALOGE("%s: Camera %d: Unlock call from pid %d; currently locked to pid %d", 507 __FUNCTION__, mCameraId, getCallingPid(), mClientPid); 508 return EBUSY; 509} 510 511status_t Camera2Client::setPreviewTarget( 512 const sp<IGraphicBufferProducer>& bufferProducer) { 513 ATRACE_CALL(); 514 ALOGV("%s: E", __FUNCTION__); 515 Mutex::Autolock icl(mBinderSerializationLock); 516 status_t res; 517 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 518 519 sp<IBinder> binder; 520 sp<ANativeWindow> window; 521 if (bufferProducer != 0) { 522 binder = bufferProducer->asBinder(); 523 // Using controlledByApp flag to ensure that the buffer queue remains in 524 // async mode for the old camera API, where many applications depend 525 // on that behavior. 526 window = new Surface(bufferProducer, /*controlledByApp*/ true); 527 } 528 return setPreviewWindowL(binder, window); 529} 530 531status_t Camera2Client::setPreviewWindowL(const sp<IBinder>& binder, 532 sp<ANativeWindow> window) { 533 ATRACE_CALL(); 534 status_t res; 535 536 if (binder == mPreviewSurface) { 537 ALOGV("%s: Camera %d: New window is same as old window", 538 __FUNCTION__, mCameraId); 539 return NO_ERROR; 540 } 541 542 Parameters::State state; 543 { 544 SharedParameters::Lock l(mParameters); 545 state = l.mParameters.state; 546 } 547 switch (state) { 548 case Parameters::DISCONNECTED: 549 case Parameters::RECORD: 550 case Parameters::STILL_CAPTURE: 551 case Parameters::VIDEO_SNAPSHOT: 552 ALOGE("%s: Camera %d: Cannot set preview display while in state %s", 553 __FUNCTION__, mCameraId, 554 Parameters::getStateName(state)); 555 return INVALID_OPERATION; 556 case Parameters::STOPPED: 557 case Parameters::WAITING_FOR_PREVIEW_WINDOW: 558 // OK 559 break; 560 case Parameters::PREVIEW: 561 // Already running preview - need to stop and create a new stream 562 res = stopStream(); 563 if (res != OK) { 564 ALOGE("%s: Unable to stop preview to swap windows: %s (%d)", 565 __FUNCTION__, strerror(-res), res); 566 return res; 567 } 568 state = Parameters::WAITING_FOR_PREVIEW_WINDOW; 569 break; 570 } 571 572 mPreviewSurface = binder; 573 res = mStreamingProcessor->setPreviewWindow(window); 574 if (res != OK) { 575 ALOGE("%s: Unable to set new preview window: %s (%d)", 576 __FUNCTION__, strerror(-res), res); 577 return res; 578 } 579 580 if (state == Parameters::WAITING_FOR_PREVIEW_WINDOW) { 581 SharedParameters::Lock l(mParameters); 582 l.mParameters.state = state; 583 return startPreviewL(l.mParameters, false); 584 } 585 586 return OK; 587} 588 589void Camera2Client::setPreviewCallbackFlag(int flag) { 590 ATRACE_CALL(); 591 ALOGV("%s: Camera %d: Flag 0x%x", __FUNCTION__, mCameraId, flag); 592 Mutex::Autolock icl(mBinderSerializationLock); 593 594 if ( checkPid(__FUNCTION__) != OK) return; 595 596 SharedParameters::Lock l(mParameters); 597 setPreviewCallbackFlagL(l.mParameters, flag); 598} 599 600void Camera2Client::setPreviewCallbackFlagL(Parameters ¶ms, int flag) { 601 status_t res = OK; 602 603 switch(params.state) { 604 case Parameters::STOPPED: 605 case Parameters::WAITING_FOR_PREVIEW_WINDOW: 606 case Parameters::PREVIEW: 607 case Parameters::STILL_CAPTURE: 608 // OK 609 break; 610 default: 611 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) { 612 ALOGE("%s: Camera %d: Can't use preview callbacks " 613 "in state %d", __FUNCTION__, mCameraId, params.state); 614 return; 615 } 616 } 617 618 if (flag & CAMERA_FRAME_CALLBACK_FLAG_ONE_SHOT_MASK) { 619 ALOGV("%s: setting oneshot", __FUNCTION__); 620 params.previewCallbackOneShot = true; 621 } 622 if (params.previewCallbackFlags != (uint32_t)flag) { 623 624 if (params.previewCallbackSurface && flag != CAMERA_FRAME_CALLBACK_FLAG_NOOP) { 625 // Disable any existing preview callback window when enabling 626 // preview callback flags 627 res = mCallbackProcessor->setCallbackWindow(NULL); 628 if (res != OK) { 629 ALOGE("%s: Camera %d: Unable to clear preview callback surface:" 630 " %s (%d)", __FUNCTION__, mCameraId, strerror(-res), res); 631 return; 632 } 633 params.previewCallbackSurface = false; 634 } 635 636 params.previewCallbackFlags = flag; 637 638 if (params.state == Parameters::PREVIEW) { 639 res = startPreviewL(params, true); 640 if (res != OK) { 641 ALOGE("%s: Camera %d: Unable to refresh request in state %s", 642 __FUNCTION__, mCameraId, 643 Parameters::getStateName(params.state)); 644 } 645 } 646 } 647} 648 649status_t Camera2Client::setPreviewCallbackTarget( 650 const sp<IGraphicBufferProducer>& callbackProducer) { 651 ATRACE_CALL(); 652 ALOGV("%s: E", __FUNCTION__); 653 Mutex::Autolock icl(mBinderSerializationLock); 654 status_t res; 655 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 656 657 sp<ANativeWindow> window; 658 if (callbackProducer != 0) { 659 window = new Surface(callbackProducer); 660 } 661 662 res = mCallbackProcessor->setCallbackWindow(window); 663 if (res != OK) { 664 ALOGE("%s: Camera %d: Unable to set preview callback surface: %s (%d)", 665 __FUNCTION__, mCameraId, strerror(-res), res); 666 return res; 667 } 668 669 SharedParameters::Lock l(mParameters); 670 671 if (window != NULL) { 672 // Disable traditional callbacks when a valid callback target is given 673 l.mParameters.previewCallbackFlags = CAMERA_FRAME_CALLBACK_FLAG_NOOP; 674 l.mParameters.previewCallbackOneShot = false; 675 l.mParameters.previewCallbackSurface = true; 676 } else { 677 // Disable callback target if given a NULL interface. 678 l.mParameters.previewCallbackSurface = false; 679 } 680 681 switch(l.mParameters.state) { 682 case Parameters::PREVIEW: 683 res = startPreviewL(l.mParameters, true); 684 break; 685 case Parameters::RECORD: 686 case Parameters::VIDEO_SNAPSHOT: 687 res = startRecordingL(l.mParameters, true); 688 break; 689 default: 690 break; 691 } 692 if (res != OK) { 693 ALOGE("%s: Camera %d: Unable to refresh request in state %s", 694 __FUNCTION__, mCameraId, 695 Parameters::getStateName(l.mParameters.state)); 696 } 697 698 return OK; 699} 700 701 702status_t Camera2Client::startPreview() { 703 ATRACE_CALL(); 704 ALOGV("%s: E", __FUNCTION__); 705 Mutex::Autolock icl(mBinderSerializationLock); 706 status_t res; 707 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 708 SharedParameters::Lock l(mParameters); 709 return startPreviewL(l.mParameters, false); 710} 711 712status_t Camera2Client::startPreviewL(Parameters ¶ms, bool restart) { 713 ATRACE_CALL(); 714 status_t res; 715 716 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart); 717 718 if ( (params.state == Parameters::PREVIEW || 719 params.state == Parameters::RECORD || 720 params.state == Parameters::VIDEO_SNAPSHOT) 721 && !restart) { 722 // Succeed attempt to re-enter a streaming state 723 ALOGI("%s: Camera %d: Preview already active, ignoring restart", 724 __FUNCTION__, mCameraId); 725 return OK; 726 } 727 if (params.state > Parameters::PREVIEW && !restart) { 728 ALOGE("%s: Can't start preview in state %s", 729 __FUNCTION__, 730 Parameters::getStateName(params.state)); 731 return INVALID_OPERATION; 732 } 733 734 if (!mStreamingProcessor->haveValidPreviewWindow()) { 735 params.state = Parameters::WAITING_FOR_PREVIEW_WINDOW; 736 return OK; 737 } 738 params.state = Parameters::STOPPED; 739 int lastPreviewStreamId = mStreamingProcessor->getPreviewStreamId(); 740 741 res = mStreamingProcessor->updatePreviewStream(params); 742 if (res != OK) { 743 ALOGE("%s: Camera %d: Unable to update preview stream: %s (%d)", 744 __FUNCTION__, mCameraId, strerror(-res), res); 745 return res; 746 } 747 748 bool previewStreamChanged = mStreamingProcessor->getPreviewStreamId() != lastPreviewStreamId; 749 750 // We could wait to create the JPEG output stream until first actual use 751 // (first takePicture call). However, this would substantially increase the 752 // first capture latency on HAL3 devices, and potentially on some HAL2 753 // devices. So create it unconditionally at preview start. As a drawback, 754 // this increases gralloc memory consumption for applications that don't 755 // ever take a picture. 756 // TODO: Find a better compromise, though this likely would involve HAL 757 // changes. 758 res = updateProcessorStream(mJpegProcessor, params); 759 if (res != OK) { 760 ALOGE("%s: Camera %d: Can't pre-configure still image " 761 "stream: %s (%d)", 762 __FUNCTION__, mCameraId, strerror(-res), res); 763 return res; 764 } 765 766 Vector<int32_t> outputStreams; 767 bool callbacksEnabled = (params.previewCallbackFlags & 768 CAMERA_FRAME_CALLBACK_FLAG_ENABLE_MASK) || 769 params.previewCallbackSurface; 770 771 if (callbacksEnabled) { 772 // Can't have recording stream hanging around when enabling callbacks, 773 // since it exceeds the max stream count on some devices. 774 if (mStreamingProcessor->getRecordingStreamId() != NO_STREAM) { 775 ALOGV("%s: Camera %d: Clearing out recording stream before " 776 "creating callback stream", __FUNCTION__, mCameraId); 777 res = mStreamingProcessor->stopStream(); 778 if (res != OK) { 779 ALOGE("%s: Camera %d: Can't stop streaming to delete " 780 "recording stream", __FUNCTION__, mCameraId); 781 return res; 782 } 783 res = mStreamingProcessor->deleteRecordingStream(); 784 if (res != OK) { 785 ALOGE("%s: Camera %d: Unable to delete recording stream before " 786 "enabling callbacks: %s (%d)", __FUNCTION__, mCameraId, 787 strerror(-res), res); 788 return res; 789 } 790 } 791 792 res = mCallbackProcessor->updateStream(params); 793 if (res != OK) { 794 ALOGE("%s: Camera %d: Unable to update callback stream: %s (%d)", 795 __FUNCTION__, mCameraId, strerror(-res), res); 796 return res; 797 } 798 outputStreams.push(getCallbackStreamId()); 799 } else if (previewStreamChanged && mCallbackProcessor->getStreamId() != NO_STREAM) { 800 /** 801 * Delete the unused callback stream when preview stream is changed and 802 * preview is not enabled. Don't need stop preview stream as preview is in 803 * STOPPED state now. 804 */ 805 ALOGV("%s: Camera %d: Delete unused preview callback stream.", __FUNCTION__, mCameraId); 806 res = mCallbackProcessor->deleteStream(); 807 if (res != OK) { 808 ALOGE("%s: Camera %d: Unable to delete callback stream %s (%d)", 809 __FUNCTION__, mCameraId, strerror(-res), res); 810 return res; 811 } 812 } 813 if (params.zslMode && !params.recordingHint) { 814 res = updateProcessorStream(mZslProcessor, params); 815 if (res != OK) { 816 ALOGE("%s: Camera %d: Unable to update ZSL stream: %s (%d)", 817 __FUNCTION__, mCameraId, strerror(-res), res); 818 return res; 819 } 820 outputStreams.push(getZslStreamId()); 821 } else { 822 mZslProcessor->deleteStream(); 823 } 824 825 outputStreams.push(getPreviewStreamId()); 826 827 if (!params.recordingHint) { 828 if (!restart) { 829 res = mStreamingProcessor->updatePreviewRequest(params); 830 if (res != OK) { 831 ALOGE("%s: Camera %d: Can't set up preview request: " 832 "%s (%d)", __FUNCTION__, mCameraId, 833 strerror(-res), res); 834 return res; 835 } 836 } 837 res = mStreamingProcessor->startStream(StreamingProcessor::PREVIEW, 838 outputStreams); 839 } else { 840 if (!restart) { 841 res = mStreamingProcessor->updateRecordingRequest(params); 842 if (res != OK) { 843 ALOGE("%s: Camera %d: Can't set up preview request with " 844 "record hint: %s (%d)", __FUNCTION__, mCameraId, 845 strerror(-res), res); 846 return res; 847 } 848 } 849 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD, 850 outputStreams); 851 } 852 if (res != OK) { 853 ALOGE("%s: Camera %d: Unable to start streaming preview: %s (%d)", 854 __FUNCTION__, mCameraId, strerror(-res), res); 855 return res; 856 } 857 858 params.state = Parameters::PREVIEW; 859 return OK; 860} 861 862void Camera2Client::stopPreview() { 863 ATRACE_CALL(); 864 ALOGV("%s: E", __FUNCTION__); 865 Mutex::Autolock icl(mBinderSerializationLock); 866 status_t res; 867 if ( (res = checkPid(__FUNCTION__) ) != OK) return; 868 stopPreviewL(); 869} 870 871void Camera2Client::stopPreviewL() { 872 ATRACE_CALL(); 873 status_t res; 874 const nsecs_t kStopCaptureTimeout = 3000000000LL; // 3 seconds 875 Parameters::State state; 876 { 877 SharedParameters::Lock l(mParameters); 878 state = l.mParameters.state; 879 } 880 881 switch (state) { 882 case Parameters::DISCONNECTED: 883 // Nothing to do. 884 break; 885 case Parameters::STOPPED: 886 case Parameters::VIDEO_SNAPSHOT: 887 case Parameters::STILL_CAPTURE: 888 mCaptureSequencer->waitUntilIdle(kStopCaptureTimeout); 889 // no break 890 case Parameters::RECORD: 891 case Parameters::PREVIEW: 892 syncWithDevice(); 893 res = stopStream(); 894 if (res != OK) { 895 ALOGE("%s: Camera %d: Can't stop streaming: %s (%d)", 896 __FUNCTION__, mCameraId, strerror(-res), res); 897 } 898 res = mDevice->waitUntilDrained(); 899 if (res != OK) { 900 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)", 901 __FUNCTION__, mCameraId, strerror(-res), res); 902 } 903 // no break 904 case Parameters::WAITING_FOR_PREVIEW_WINDOW: { 905 SharedParameters::Lock l(mParameters); 906 l.mParameters.state = Parameters::STOPPED; 907 commandStopFaceDetectionL(l.mParameters); 908 break; 909 } 910 default: 911 ALOGE("%s: Camera %d: Unknown state %d", __FUNCTION__, mCameraId, 912 state); 913 } 914} 915 916bool Camera2Client::previewEnabled() { 917 ATRACE_CALL(); 918 Mutex::Autolock icl(mBinderSerializationLock); 919 status_t res; 920 if ( (res = checkPid(__FUNCTION__) ) != OK) return false; 921 922 SharedParameters::Lock l(mParameters); 923 return l.mParameters.state == Parameters::PREVIEW; 924} 925 926status_t Camera2Client::storeMetaDataInBuffers(bool enabled) { 927 ATRACE_CALL(); 928 Mutex::Autolock icl(mBinderSerializationLock); 929 status_t res; 930 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 931 932 SharedParameters::Lock l(mParameters); 933 switch (l.mParameters.state) { 934 case Parameters::RECORD: 935 case Parameters::VIDEO_SNAPSHOT: 936 ALOGE("%s: Camera %d: Can't be called in state %s", 937 __FUNCTION__, mCameraId, 938 Parameters::getStateName(l.mParameters.state)); 939 return INVALID_OPERATION; 940 default: 941 // OK 942 break; 943 } 944 945 l.mParameters.storeMetadataInBuffers = enabled; 946 947 return OK; 948} 949 950status_t Camera2Client::startRecording() { 951 ATRACE_CALL(); 952 ALOGV("%s: E", __FUNCTION__); 953 Mutex::Autolock icl(mBinderSerializationLock); 954 status_t res; 955 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 956 SharedParameters::Lock l(mParameters); 957 958 return startRecordingL(l.mParameters, false); 959} 960 961status_t Camera2Client::startRecordingL(Parameters ¶ms, bool restart) { 962 status_t res; 963 964 ALOGV("%s: state == %d, restart = %d", __FUNCTION__, params.state, restart); 965 966 switch (params.state) { 967 case Parameters::STOPPED: 968 res = startPreviewL(params, false); 969 if (res != OK) return res; 970 break; 971 case Parameters::PREVIEW: 972 // Ready to go 973 break; 974 case Parameters::RECORD: 975 case Parameters::VIDEO_SNAPSHOT: 976 // OK to call this when recording is already on, just skip unless 977 // we're looking to restart 978 if (!restart) return OK; 979 break; 980 default: 981 ALOGE("%s: Camera %d: Can't start recording in state %s", 982 __FUNCTION__, mCameraId, 983 Parameters::getStateName(params.state)); 984 return INVALID_OPERATION; 985 }; 986 987 if (!params.storeMetadataInBuffers) { 988 ALOGE("%s: Camera %d: Recording only supported in metadata mode, but " 989 "non-metadata recording mode requested!", __FUNCTION__, 990 mCameraId); 991 return INVALID_OPERATION; 992 } 993 994 if (!restart) { 995 mCameraService->playSound(CameraService::SOUND_RECORDING); 996 mStreamingProcessor->updateRecordingRequest(params); 997 if (res != OK) { 998 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)", 999 __FUNCTION__, mCameraId, strerror(-res), res); 1000 return res; 1001 } 1002 } 1003 1004 // Not all devices can support a preview callback stream and a recording 1005 // stream at the same time, so assume none of them can. 1006 if (mCallbackProcessor->getStreamId() != NO_STREAM) { 1007 ALOGV("%s: Camera %d: Clearing out callback stream before " 1008 "creating recording stream", __FUNCTION__, mCameraId); 1009 res = mStreamingProcessor->stopStream(); 1010 if (res != OK) { 1011 ALOGE("%s: Camera %d: Can't stop streaming to delete callback stream", 1012 __FUNCTION__, mCameraId); 1013 return res; 1014 } 1015 res = mCallbackProcessor->deleteStream(); 1016 if (res != OK) { 1017 ALOGE("%s: Camera %d: Unable to delete callback stream before " 1018 "record: %s (%d)", __FUNCTION__, mCameraId, 1019 strerror(-res), res); 1020 return res; 1021 } 1022 } 1023 // Disable callbacks if they're enabled; can't record and use callbacks, 1024 // and we can't fail record start without stagefright asserting. 1025 params.previewCallbackFlags = 0; 1026 1027 res = updateProcessorStream< 1028 StreamingProcessor, 1029 &StreamingProcessor::updateRecordingStream>(mStreamingProcessor, 1030 params); 1031 if (res != OK) { 1032 ALOGE("%s: Camera %d: Unable to update recording stream: %s (%d)", 1033 __FUNCTION__, mCameraId, strerror(-res), res); 1034 return res; 1035 } 1036 1037 Vector<int32_t> outputStreams; 1038 outputStreams.push(getPreviewStreamId()); 1039 outputStreams.push(getRecordingStreamId()); 1040 1041 res = mStreamingProcessor->startStream(StreamingProcessor::RECORD, 1042 outputStreams); 1043 if (res != OK) { 1044 ALOGE("%s: Camera %d: Unable to start recording stream: %s (%d)", 1045 __FUNCTION__, mCameraId, strerror(-res), res); 1046 return res; 1047 } 1048 1049 if (params.state < Parameters::RECORD) { 1050 params.state = Parameters::RECORD; 1051 } 1052 1053 return OK; 1054} 1055 1056void Camera2Client::stopRecording() { 1057 ATRACE_CALL(); 1058 ALOGV("%s: E", __FUNCTION__); 1059 Mutex::Autolock icl(mBinderSerializationLock); 1060 SharedParameters::Lock l(mParameters); 1061 1062 status_t res; 1063 if ( (res = checkPid(__FUNCTION__) ) != OK) return; 1064 1065 switch (l.mParameters.state) { 1066 case Parameters::RECORD: 1067 // OK to stop 1068 break; 1069 case Parameters::STOPPED: 1070 case Parameters::PREVIEW: 1071 case Parameters::STILL_CAPTURE: 1072 case Parameters::VIDEO_SNAPSHOT: 1073 default: 1074 ALOGE("%s: Camera %d: Can't stop recording in state %s", 1075 __FUNCTION__, mCameraId, 1076 Parameters::getStateName(l.mParameters.state)); 1077 return; 1078 }; 1079 1080 mCameraService->playSound(CameraService::SOUND_RECORDING); 1081 1082 res = startPreviewL(l.mParameters, true); 1083 if (res != OK) { 1084 ALOGE("%s: Camera %d: Unable to return to preview", 1085 __FUNCTION__, mCameraId); 1086 } 1087} 1088 1089bool Camera2Client::recordingEnabled() { 1090 ATRACE_CALL(); 1091 Mutex::Autolock icl(mBinderSerializationLock); 1092 1093 if ( checkPid(__FUNCTION__) != OK) return false; 1094 1095 return recordingEnabledL(); 1096} 1097 1098bool Camera2Client::recordingEnabledL() { 1099 ATRACE_CALL(); 1100 SharedParameters::Lock l(mParameters); 1101 1102 return (l.mParameters.state == Parameters::RECORD 1103 || l.mParameters.state == Parameters::VIDEO_SNAPSHOT); 1104} 1105 1106void Camera2Client::releaseRecordingFrame(const sp<IMemory>& mem) { 1107 ATRACE_CALL(); 1108 Mutex::Autolock icl(mBinderSerializationLock); 1109 if ( checkPid(__FUNCTION__) != OK) return; 1110 1111 mStreamingProcessor->releaseRecordingFrame(mem); 1112} 1113 1114status_t Camera2Client::autoFocus() { 1115 ATRACE_CALL(); 1116 Mutex::Autolock icl(mBinderSerializationLock); 1117 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); 1118 status_t res; 1119 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 1120 1121 int triggerId; 1122 bool notifyImmediately = false; 1123 bool notifySuccess = false; 1124 { 1125 SharedParameters::Lock l(mParameters); 1126 if (l.mParameters.state < Parameters::PREVIEW) { 1127 return INVALID_OPERATION; 1128 } 1129 1130 /** 1131 * If the camera does not support auto-focus, it is a no-op and 1132 * onAutoFocus(boolean, Camera) callback will be called immediately 1133 * with a fake value of success set to true. 1134 * 1135 * Similarly, if focus mode is set to INFINITY, there's no reason to 1136 * bother the HAL. 1137 */ 1138 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED || 1139 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) { 1140 notifyImmediately = true; 1141 notifySuccess = true; 1142 } 1143 /** 1144 * If we're in CAF mode, and AF has already been locked, just fire back 1145 * the callback right away; the HAL would not send a notification since 1146 * no state change would happen on a AF trigger. 1147 */ 1148 if ( (l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_PICTURE || 1149 l.mParameters.focusMode == Parameters::FOCUS_MODE_CONTINUOUS_VIDEO) && 1150 l.mParameters.focusState == ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED ) { 1151 notifyImmediately = true; 1152 notifySuccess = true; 1153 } 1154 /** 1155 * Send immediate notification back to client 1156 */ 1157 if (notifyImmediately) { 1158 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); 1159 if (l.mRemoteCallback != 0) { 1160 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, 1161 notifySuccess ? 1 : 0, 0); 1162 } 1163 return OK; 1164 } 1165 /** 1166 * Handle quirk mode for AF in scene modes 1167 */ 1168 if (l.mParameters.quirks.triggerAfWithAuto && 1169 l.mParameters.sceneMode != ANDROID_CONTROL_SCENE_MODE_DISABLED && 1170 l.mParameters.focusMode != Parameters::FOCUS_MODE_AUTO && 1171 !l.mParameters.focusingAreas[0].isEmpty()) { 1172 ALOGV("%s: Quirk: Switching from focusMode %d to AUTO", 1173 __FUNCTION__, l.mParameters.focusMode); 1174 l.mParameters.shadowFocusMode = l.mParameters.focusMode; 1175 l.mParameters.focusMode = Parameters::FOCUS_MODE_AUTO; 1176 updateRequests(l.mParameters); 1177 } 1178 1179 l.mParameters.currentAfTriggerId = ++l.mParameters.afTriggerCounter; 1180 triggerId = l.mParameters.currentAfTriggerId; 1181 } 1182 ATRACE_ASYNC_BEGIN(kAutofocusLabel, triggerId); 1183 1184 syncWithDevice(); 1185 1186 mDevice->triggerAutofocus(triggerId); 1187 1188 return OK; 1189} 1190 1191status_t Camera2Client::cancelAutoFocus() { 1192 ATRACE_CALL(); 1193 Mutex::Autolock icl(mBinderSerializationLock); 1194 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); 1195 status_t res; 1196 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 1197 1198 int triggerId; 1199 { 1200 SharedParameters::Lock l(mParameters); 1201 // Canceling does nothing in FIXED or INFINITY modes 1202 if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED || 1203 l.mParameters.focusMode == Parameters::FOCUS_MODE_INFINITY) { 1204 return OK; 1205 } 1206 1207 // An active AF trigger is canceled 1208 if (l.mParameters.afTriggerCounter == l.mParameters.currentAfTriggerId) { 1209 ATRACE_ASYNC_END(kAutofocusLabel, l.mParameters.currentAfTriggerId); 1210 } 1211 1212 triggerId = ++l.mParameters.afTriggerCounter; 1213 1214 // When using triggerAfWithAuto quirk, may need to reset focus mode to 1215 // the real state at this point. No need to cancel explicitly if 1216 // changing the AF mode. 1217 if (l.mParameters.shadowFocusMode != Parameters::FOCUS_MODE_INVALID) { 1218 ALOGV("%s: Quirk: Restoring focus mode to %d", __FUNCTION__, 1219 l.mParameters.shadowFocusMode); 1220 l.mParameters.focusMode = l.mParameters.shadowFocusMode; 1221 l.mParameters.shadowFocusMode = Parameters::FOCUS_MODE_INVALID; 1222 updateRequests(l.mParameters); 1223 1224 return OK; 1225 } 1226 } 1227 syncWithDevice(); 1228 1229 mDevice->triggerCancelAutofocus(triggerId); 1230 1231 return OK; 1232} 1233 1234status_t Camera2Client::takePicture(int msgType) { 1235 ATRACE_CALL(); 1236 Mutex::Autolock icl(mBinderSerializationLock); 1237 status_t res; 1238 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 1239 1240 int takePictureCounter; 1241 { 1242 SharedParameters::Lock l(mParameters); 1243 switch (l.mParameters.state) { 1244 case Parameters::DISCONNECTED: 1245 case Parameters::STOPPED: 1246 case Parameters::WAITING_FOR_PREVIEW_WINDOW: 1247 ALOGE("%s: Camera %d: Cannot take picture without preview enabled", 1248 __FUNCTION__, mCameraId); 1249 return INVALID_OPERATION; 1250 case Parameters::PREVIEW: 1251 // Good to go for takePicture 1252 res = commandStopFaceDetectionL(l.mParameters); 1253 if (res != OK) { 1254 ALOGE("%s: Camera %d: Unable to stop face detection for still capture", 1255 __FUNCTION__, mCameraId); 1256 return res; 1257 } 1258 l.mParameters.state = Parameters::STILL_CAPTURE; 1259 break; 1260 case Parameters::RECORD: 1261 // Good to go for video snapshot 1262 l.mParameters.state = Parameters::VIDEO_SNAPSHOT; 1263 break; 1264 case Parameters::STILL_CAPTURE: 1265 case Parameters::VIDEO_SNAPSHOT: 1266 ALOGE("%s: Camera %d: Already taking a picture", 1267 __FUNCTION__, mCameraId); 1268 return INVALID_OPERATION; 1269 } 1270 1271 ALOGV("%s: Camera %d: Starting picture capture", __FUNCTION__, mCameraId); 1272 1273 res = updateProcessorStream(mJpegProcessor, l.mParameters); 1274 if (res != OK) { 1275 ALOGE("%s: Camera %d: Can't set up still image stream: %s (%d)", 1276 __FUNCTION__, mCameraId, strerror(-res), res); 1277 return res; 1278 } 1279 takePictureCounter = ++l.mParameters.takePictureCounter; 1280 } 1281 1282 ATRACE_ASYNC_BEGIN(kTakepictureLabel, takePictureCounter); 1283 1284 // Need HAL to have correct settings before (possibly) triggering precapture 1285 syncWithDevice(); 1286 1287 res = mCaptureSequencer->startCapture(msgType); 1288 if (res != OK) { 1289 ALOGE("%s: Camera %d: Unable to start capture: %s (%d)", 1290 __FUNCTION__, mCameraId, strerror(-res), res); 1291 } 1292 1293 return res; 1294} 1295 1296status_t Camera2Client::setParameters(const String8& params) { 1297 ATRACE_CALL(); 1298 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); 1299 Mutex::Autolock icl(mBinderSerializationLock); 1300 status_t res; 1301 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 1302 1303 SharedParameters::Lock l(mParameters); 1304 1305 res = l.mParameters.set(params); 1306 if (res != OK) return res; 1307 1308 res = updateRequests(l.mParameters); 1309 1310 return res; 1311} 1312 1313String8 Camera2Client::getParameters() const { 1314 ATRACE_CALL(); 1315 ALOGV("%s: Camera %d", __FUNCTION__, mCameraId); 1316 Mutex::Autolock icl(mBinderSerializationLock); 1317 if ( checkPid(__FUNCTION__) != OK) return String8(); 1318 1319 SharedParameters::ReadLock l(mParameters); 1320 1321 return l.mParameters.get(); 1322} 1323 1324status_t Camera2Client::sendCommand(int32_t cmd, int32_t arg1, int32_t arg2) { 1325 ATRACE_CALL(); 1326 Mutex::Autolock icl(mBinderSerializationLock); 1327 status_t res; 1328 if ( (res = checkPid(__FUNCTION__) ) != OK) return res; 1329 1330 ALOGV("%s: Camera %d: Command %d (%d, %d)", __FUNCTION__, mCameraId, 1331 cmd, arg1, arg2); 1332 1333 switch (cmd) { 1334 case CAMERA_CMD_START_SMOOTH_ZOOM: 1335 return commandStartSmoothZoomL(); 1336 case CAMERA_CMD_STOP_SMOOTH_ZOOM: 1337 return commandStopSmoothZoomL(); 1338 case CAMERA_CMD_SET_DISPLAY_ORIENTATION: 1339 return commandSetDisplayOrientationL(arg1); 1340 case CAMERA_CMD_ENABLE_SHUTTER_SOUND: 1341 return commandEnableShutterSoundL(arg1 == 1); 1342 case CAMERA_CMD_PLAY_RECORDING_SOUND: 1343 return commandPlayRecordingSoundL(); 1344 case CAMERA_CMD_START_FACE_DETECTION: 1345 return commandStartFaceDetectionL(arg1); 1346 case CAMERA_CMD_STOP_FACE_DETECTION: { 1347 SharedParameters::Lock l(mParameters); 1348 return commandStopFaceDetectionL(l.mParameters); 1349 } 1350 case CAMERA_CMD_ENABLE_FOCUS_MOVE_MSG: 1351 return commandEnableFocusMoveMsgL(arg1 == 1); 1352 case CAMERA_CMD_PING: 1353 return commandPingL(); 1354 case CAMERA_CMD_SET_VIDEO_BUFFER_COUNT: 1355 return commandSetVideoBufferCountL(arg1); 1356 default: 1357 ALOGE("%s: Unknown command %d (arguments %d, %d)", 1358 __FUNCTION__, cmd, arg1, arg2); 1359 return BAD_VALUE; 1360 } 1361} 1362 1363status_t Camera2Client::commandStartSmoothZoomL() { 1364 ALOGE("%s: Unimplemented!", __FUNCTION__); 1365 return OK; 1366} 1367 1368status_t Camera2Client::commandStopSmoothZoomL() { 1369 ALOGE("%s: Unimplemented!", __FUNCTION__); 1370 return OK; 1371} 1372 1373status_t Camera2Client::commandSetDisplayOrientationL(int degrees) { 1374 int transform = Parameters::degToTransform(degrees, 1375 mCameraFacing == CAMERA_FACING_FRONT); 1376 if (transform == -1) { 1377 ALOGE("%s: Camera %d: Error setting %d as display orientation value", 1378 __FUNCTION__, mCameraId, degrees); 1379 return BAD_VALUE; 1380 } 1381 SharedParameters::Lock l(mParameters); 1382 if (transform != l.mParameters.previewTransform && 1383 getPreviewStreamId() != NO_STREAM) { 1384 mDevice->setStreamTransform(getPreviewStreamId(), transform); 1385 } 1386 l.mParameters.previewTransform = transform; 1387 return OK; 1388} 1389 1390status_t Camera2Client::commandEnableShutterSoundL(bool enable) { 1391 SharedParameters::Lock l(mParameters); 1392 if (enable) { 1393 l.mParameters.playShutterSound = true; 1394 return OK; 1395 } 1396 1397 // Disabling shutter sound may not be allowed. In that case only 1398 // allow the mediaserver process to disable the sound. 1399 char value[PROPERTY_VALUE_MAX]; 1400 property_get("ro.camera.sound.forced", value, "0"); 1401 if (strncmp(value, "0", 2) != 0) { 1402 // Disabling shutter sound is not allowed. Deny if the current 1403 // process is not mediaserver. 1404 if (getCallingPid() != getpid()) { 1405 ALOGE("Failed to disable shutter sound. Permission denied (pid %d)", 1406 getCallingPid()); 1407 return PERMISSION_DENIED; 1408 } 1409 } 1410 1411 l.mParameters.playShutterSound = false; 1412 return OK; 1413} 1414 1415status_t Camera2Client::commandPlayRecordingSoundL() { 1416 mCameraService->playSound(CameraService::SOUND_RECORDING); 1417 return OK; 1418} 1419 1420status_t Camera2Client::commandStartFaceDetectionL(int /*type*/) { 1421 ALOGV("%s: Camera %d: Starting face detection", 1422 __FUNCTION__, mCameraId); 1423 status_t res; 1424 SharedParameters::Lock l(mParameters); 1425 switch (l.mParameters.state) { 1426 case Parameters::DISCONNECTED: 1427 case Parameters::STOPPED: 1428 case Parameters::WAITING_FOR_PREVIEW_WINDOW: 1429 case Parameters::STILL_CAPTURE: 1430 ALOGE("%s: Camera %d: Cannot start face detection without preview active", 1431 __FUNCTION__, mCameraId); 1432 return INVALID_OPERATION; 1433 case Parameters::PREVIEW: 1434 case Parameters::RECORD: 1435 case Parameters::VIDEO_SNAPSHOT: 1436 // Good to go for starting face detect 1437 break; 1438 } 1439 // Ignoring type 1440 if (l.mParameters.fastInfo.bestFaceDetectMode == 1441 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) { 1442 ALOGE("%s: Camera %d: Face detection not supported", 1443 __FUNCTION__, mCameraId); 1444 return BAD_VALUE; 1445 } 1446 if (l.mParameters.enableFaceDetect) return OK; 1447 1448 l.mParameters.enableFaceDetect = true; 1449 1450 res = updateRequests(l.mParameters); 1451 1452 return res; 1453} 1454 1455status_t Camera2Client::commandStopFaceDetectionL(Parameters ¶ms) { 1456 status_t res = OK; 1457 ALOGV("%s: Camera %d: Stopping face detection", 1458 __FUNCTION__, mCameraId); 1459 1460 if (!params.enableFaceDetect) return OK; 1461 1462 params.enableFaceDetect = false; 1463 1464 if (params.state == Parameters::PREVIEW 1465 || params.state == Parameters::RECORD 1466 || params.state == Parameters::VIDEO_SNAPSHOT) { 1467 res = updateRequests(params); 1468 } 1469 1470 return res; 1471} 1472 1473status_t Camera2Client::commandEnableFocusMoveMsgL(bool enable) { 1474 SharedParameters::Lock l(mParameters); 1475 l.mParameters.enableFocusMoveMessages = enable; 1476 1477 return OK; 1478} 1479 1480status_t Camera2Client::commandPingL() { 1481 // Always ping back if access is proper and device is alive 1482 SharedParameters::Lock l(mParameters); 1483 if (l.mParameters.state != Parameters::DISCONNECTED) { 1484 return OK; 1485 } else { 1486 return NO_INIT; 1487 } 1488} 1489 1490status_t Camera2Client::commandSetVideoBufferCountL(size_t count) { 1491 if (recordingEnabledL()) { 1492 ALOGE("%s: Camera %d: Error setting video buffer count after " 1493 "recording was started", __FUNCTION__, mCameraId); 1494 return INVALID_OPERATION; 1495 } 1496 1497 return mStreamingProcessor->setRecordingBufferCount(count); 1498} 1499 1500/** Device-related methods */ 1501void Camera2Client::notifyAutoFocus(uint8_t newState, int triggerId) { 1502 ALOGV("%s: Autofocus state now %d, last trigger %d", 1503 __FUNCTION__, newState, triggerId); 1504 bool sendCompletedMessage = false; 1505 bool sendMovingMessage = false; 1506 1507 bool success = false; 1508 bool afInMotion = false; 1509 { 1510 SharedParameters::Lock l(mParameters); 1511 // Trace end of AF state 1512 char tmp[32]; 1513 if (l.mParameters.afStateCounter > 0) { 1514 camera_metadata_enum_snprint( 1515 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp)); 1516 ATRACE_ASYNC_END(tmp, l.mParameters.afStateCounter); 1517 } 1518 1519 // Update state 1520 l.mParameters.focusState = newState; 1521 l.mParameters.afStateCounter++; 1522 1523 // Trace start of AF state 1524 1525 camera_metadata_enum_snprint( 1526 ANDROID_CONTROL_AF_STATE, l.mParameters.focusState, tmp, sizeof(tmp)); 1527 ATRACE_ASYNC_BEGIN(tmp, l.mParameters.afStateCounter); 1528 1529 switch (l.mParameters.focusMode) { 1530 case Parameters::FOCUS_MODE_AUTO: 1531 case Parameters::FOCUS_MODE_MACRO: 1532 // Don't send notifications upstream if they're not for the current AF 1533 // trigger. For example, if cancel was called in between, or if we 1534 // already sent a notification about this AF call. 1535 if (triggerId != l.mParameters.currentAfTriggerId) break; 1536 switch (newState) { 1537 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: 1538 success = true; 1539 // no break 1540 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: 1541 sendCompletedMessage = true; 1542 l.mParameters.currentAfTriggerId = -1; 1543 break; 1544 case ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN: 1545 // Just starting focusing, ignore 1546 break; 1547 case ANDROID_CONTROL_AF_STATE_INACTIVE: 1548 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: 1549 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: 1550 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED: 1551 default: 1552 // Unexpected in AUTO/MACRO mode 1553 ALOGE("%s: Unexpected AF state transition in AUTO/MACRO mode: %d", 1554 __FUNCTION__, newState); 1555 break; 1556 } 1557 break; 1558 case Parameters::FOCUS_MODE_CONTINUOUS_VIDEO: 1559 case Parameters::FOCUS_MODE_CONTINUOUS_PICTURE: 1560 switch (newState) { 1561 case ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED: 1562 success = true; 1563 // no break 1564 case ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED: 1565 // Don't send notifications upstream if they're not for 1566 // the current AF trigger. For example, if cancel was 1567 // called in between, or if we already sent a 1568 // notification about this AF call. 1569 // Send both a 'AF done' callback and a 'AF move' callback 1570 if (triggerId != l.mParameters.currentAfTriggerId) break; 1571 sendCompletedMessage = true; 1572 afInMotion = false; 1573 if (l.mParameters.enableFocusMoveMessages && 1574 l.mParameters.afInMotion) { 1575 sendMovingMessage = true; 1576 } 1577 l.mParameters.currentAfTriggerId = -1; 1578 break; 1579 case ANDROID_CONTROL_AF_STATE_INACTIVE: 1580 // Cancel was called, or we switched state; care if 1581 // currently moving 1582 afInMotion = false; 1583 if (l.mParameters.enableFocusMoveMessages && 1584 l.mParameters.afInMotion) { 1585 sendMovingMessage = true; 1586 } 1587 break; 1588 case ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN: 1589 // Start passive scan, inform upstream 1590 afInMotion = true; 1591 // no break 1592 case ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED: 1593 case ANDROID_CONTROL_AF_STATE_PASSIVE_UNFOCUSED: 1594 // Stop passive scan, inform upstream 1595 if (l.mParameters.enableFocusMoveMessages) { 1596 sendMovingMessage = true; 1597 } 1598 break; 1599 } 1600 l.mParameters.afInMotion = afInMotion; 1601 break; 1602 case Parameters::FOCUS_MODE_EDOF: 1603 case Parameters::FOCUS_MODE_INFINITY: 1604 case Parameters::FOCUS_MODE_FIXED: 1605 default: 1606 if (newState != ANDROID_CONTROL_AF_STATE_INACTIVE) { 1607 ALOGE("%s: Unexpected AF state change %d " 1608 "(ID %d) in focus mode %d", 1609 __FUNCTION__, newState, triggerId, 1610 l.mParameters.focusMode); 1611 } 1612 } 1613 } 1614 if (sendMovingMessage) { 1615 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); 1616 if (l.mRemoteCallback != 0) { 1617 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS_MOVE, 1618 afInMotion ? 1 : 0, 0); 1619 } 1620 } 1621 if (sendCompletedMessage) { 1622 ATRACE_ASYNC_END(kAutofocusLabel, triggerId); 1623 SharedCameraCallbacks::Lock l(mSharedCameraCallbacks); 1624 if (l.mRemoteCallback != 0) { 1625 l.mRemoteCallback->notifyCallback(CAMERA_MSG_FOCUS, 1626 success ? 1 : 0, 0); 1627 } 1628 } 1629} 1630 1631void Camera2Client::notifyAutoExposure(uint8_t newState, int triggerId) { 1632 ALOGV("%s: Autoexposure state now %d, last trigger %d", 1633 __FUNCTION__, newState, triggerId); 1634 mCaptureSequencer->notifyAutoExposure(newState, triggerId); 1635} 1636 1637camera2::SharedParameters& Camera2Client::getParameters() { 1638 return mParameters; 1639} 1640 1641int Camera2Client::getPreviewStreamId() const { 1642 return mStreamingProcessor->getPreviewStreamId(); 1643} 1644 1645int Camera2Client::getCaptureStreamId() const { 1646 return mJpegProcessor->getStreamId(); 1647} 1648 1649int Camera2Client::getCallbackStreamId() const { 1650 return mCallbackProcessor->getStreamId(); 1651} 1652 1653int Camera2Client::getRecordingStreamId() const { 1654 return mStreamingProcessor->getRecordingStreamId(); 1655} 1656 1657int Camera2Client::getZslStreamId() const { 1658 return mZslProcessor->getStreamId(); 1659} 1660 1661status_t Camera2Client::registerFrameListener(int32_t minId, int32_t maxId, 1662 wp<camera2::FrameProcessor::FilteredListener> listener) { 1663 return mFrameProcessor->registerListener(minId, maxId, listener); 1664} 1665 1666status_t Camera2Client::removeFrameListener(int32_t minId, int32_t maxId, 1667 wp<camera2::FrameProcessor::FilteredListener> listener) { 1668 return mFrameProcessor->removeListener(minId, maxId, listener); 1669} 1670 1671status_t Camera2Client::stopStream() { 1672 return mStreamingProcessor->stopStream(); 1673} 1674 1675const int32_t Camera2Client::kPreviewRequestIdStart; 1676const int32_t Camera2Client::kPreviewRequestIdEnd; 1677const int32_t Camera2Client::kRecordingRequestIdStart; 1678const int32_t Camera2Client::kRecordingRequestIdEnd; 1679const int32_t Camera2Client::kCaptureRequestIdStart; 1680const int32_t Camera2Client::kCaptureRequestIdEnd; 1681 1682/** Utility methods */ 1683 1684status_t Camera2Client::updateRequests(Parameters ¶ms) { 1685 status_t res; 1686 1687 ALOGV("%s: Camera %d: state = %d", __FUNCTION__, getCameraId(), params.state); 1688 1689 res = mStreamingProcessor->incrementStreamingIds(); 1690 if (res != OK) { 1691 ALOGE("%s: Camera %d: Unable to increment request IDs: %s (%d)", 1692 __FUNCTION__, mCameraId, strerror(-res), res); 1693 return res; 1694 } 1695 1696 res = mStreamingProcessor->updatePreviewRequest(params); 1697 if (res != OK) { 1698 ALOGE("%s: Camera %d: Unable to update preview request: %s (%d)", 1699 __FUNCTION__, mCameraId, strerror(-res), res); 1700 return res; 1701 } 1702 res = mStreamingProcessor->updateRecordingRequest(params); 1703 if (res != OK) { 1704 ALOGE("%s: Camera %d: Unable to update recording request: %s (%d)", 1705 __FUNCTION__, mCameraId, strerror(-res), res); 1706 return res; 1707 } 1708 1709 if (params.state == Parameters::PREVIEW) { 1710 res = startPreviewL(params, true); 1711 if (res != OK) { 1712 ALOGE("%s: Camera %d: Error streaming new preview request: %s (%d)", 1713 __FUNCTION__, mCameraId, strerror(-res), res); 1714 return res; 1715 } 1716 } else if (params.state == Parameters::RECORD || 1717 params.state == Parameters::VIDEO_SNAPSHOT) { 1718 res = startRecordingL(params, true); 1719 if (res != OK) { 1720 ALOGE("%s: Camera %d: Error streaming new record request: %s (%d)", 1721 __FUNCTION__, mCameraId, strerror(-res), res); 1722 return res; 1723 } 1724 } 1725 return res; 1726} 1727 1728 1729size_t Camera2Client::calculateBufferSize(int width, int height, 1730 int format, int stride) { 1731 switch (format) { 1732 case HAL_PIXEL_FORMAT_YCbCr_422_SP: // NV16 1733 return width * height * 2; 1734 case HAL_PIXEL_FORMAT_YCrCb_420_SP: // NV21 1735 return width * height * 3 / 2; 1736 case HAL_PIXEL_FORMAT_YCbCr_422_I: // YUY2 1737 return width * height * 2; 1738 case HAL_PIXEL_FORMAT_YV12: { // YV12 1739 size_t ySize = stride * height; 1740 size_t uvStride = (stride / 2 + 0xF) & ~0xF; 1741 size_t uvSize = uvStride * height / 2; 1742 return ySize + uvSize * 2; 1743 } 1744 case HAL_PIXEL_FORMAT_RGB_565: 1745 return width * height * 2; 1746 case HAL_PIXEL_FORMAT_RGBA_8888: 1747 return width * height * 4; 1748 case HAL_PIXEL_FORMAT_RAW_SENSOR: 1749 return width * height * 2; 1750 default: 1751 ALOGE("%s: Unknown preview format: %x", 1752 __FUNCTION__, format); 1753 return 0; 1754 } 1755} 1756 1757status_t Camera2Client::syncWithDevice() { 1758 ATRACE_CALL(); 1759 const nsecs_t kMaxSyncTimeout = 500000000; // 500 ms 1760 status_t res; 1761 1762 int32_t activeRequestId = mStreamingProcessor->getActiveRequestId(); 1763 if (activeRequestId == 0) return OK; 1764 1765 res = mDevice->waitUntilRequestReceived(activeRequestId, kMaxSyncTimeout); 1766 if (res == TIMED_OUT) { 1767 ALOGE("%s: Camera %d: Timed out waiting sync with HAL", 1768 __FUNCTION__, mCameraId); 1769 } else if (res != OK) { 1770 ALOGE("%s: Camera %d: Error while waiting to sync with HAL", 1771 __FUNCTION__, mCameraId); 1772 } 1773 return res; 1774} 1775 1776template <typename ProcessorT> 1777status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor, 1778 camera2::Parameters params) { 1779 // No default template arguments until C++11, so we need this overload 1780 return updateProcessorStream<ProcessorT, &ProcessorT::updateStream>( 1781 processor, params); 1782} 1783 1784template <typename ProcessorT, 1785 status_t (ProcessorT::*updateStreamF)(const Parameters &)> 1786status_t Camera2Client::updateProcessorStream(sp<ProcessorT> processor, 1787 Parameters params) { 1788 status_t res; 1789 1790 // Get raw pointer since sp<T> doesn't have operator->* 1791 ProcessorT *processorPtr = processor.get(); 1792 res = (processorPtr->*updateStreamF)(params); 1793 1794 /** 1795 * Can't update the stream if it's busy? 1796 * 1797 * Then we need to stop the device (by temporarily clearing the request 1798 * queue) and then try again. Resume streaming once we're done. 1799 */ 1800 if (res == -EBUSY) { 1801 ALOGV("%s: Camera %d: Pausing to update stream", __FUNCTION__, 1802 mCameraId); 1803 res = mStreamingProcessor->togglePauseStream(/*pause*/true); 1804 if (res != OK) { 1805 ALOGE("%s: Camera %d: Can't pause streaming: %s (%d)", 1806 __FUNCTION__, mCameraId, strerror(-res), res); 1807 } 1808 1809 res = mDevice->waitUntilDrained(); 1810 if (res != OK) { 1811 ALOGE("%s: Camera %d: Waiting to stop streaming failed: %s (%d)", 1812 __FUNCTION__, mCameraId, strerror(-res), res); 1813 } 1814 1815 res = (processorPtr->*updateStreamF)(params); 1816 if (res != OK) { 1817 ALOGE("%s: Camera %d: Failed to update processing stream " 1818 " despite having halted streaming first: %s (%d)", 1819 __FUNCTION__, mCameraId, strerror(-res), res); 1820 } 1821 1822 res = mStreamingProcessor->togglePauseStream(/*pause*/false); 1823 if (res != OK) { 1824 ALOGE("%s: Camera %d: Can't unpause streaming: %s (%d)", 1825 __FUNCTION__, mCameraId, strerror(-res), res); 1826 } 1827 } 1828 1829 return res; 1830} 1831 1832const char* Camera2Client::kAutofocusLabel = "autofocus"; 1833const char* Camera2Client::kTakepictureLabel = "take_picture"; 1834 1835} // namespace android 1836