ExternalCameraDeviceSession.cpp revision 190e5601d6efdac6134fdf626001590015a4a255
1/* 2 * Copyright (C) 2018 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16#define LOG_TAG "ExtCamDevSsn@3.4" 17//#define LOG_NDEBUG 0 18#include <log/log.h> 19 20#include <inttypes.h> 21#include "ExternalCameraDeviceSession.h" 22 23#include "android-base/macros.h" 24#include <utils/Timers.h> 25#include <linux/videodev2.h> 26#include <sync/sync.h> 27 28#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs 29#include <libyuv.h> 30 31#include <jpeglib.h> 32 33 34namespace android { 35namespace hardware { 36namespace camera { 37namespace device { 38namespace V3_4 { 39namespace implementation { 40 41namespace { 42// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer. 43static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */; 44 45const int kBadFramesAfterStreamOn = 1; // drop x frames after streamOn to get rid of some initial 46 // bad frames. TODO: develop a better bad frame detection 47 // method 48 49} // Anonymous namespace 50 51// Static instances 52const int ExternalCameraDeviceSession::kMaxProcessedStream; 53const int ExternalCameraDeviceSession::kMaxStallStream; 54HandleImporter ExternalCameraDeviceSession::sHandleImporter; 55 56ExternalCameraDeviceSession::ExternalCameraDeviceSession( 57 const sp<ICameraDeviceCallback>& callback, 58 const ExternalCameraConfig& cfg, 59 const std::vector<SupportedV4L2Format>& sortedFormats, 60 const CroppingType& croppingType, 61 const common::V1_0::helper::CameraMetadata& chars, 62 unique_fd v4l2Fd) : 63 mCallback(callback), 64 mCfg(cfg), 65 mCameraCharacteristics(chars), 66 mSupportedFormats(sortedFormats), 67 mCroppingType(croppingType), 68 mV4l2Fd(std::move(v4l2Fd)), 69 mOutputThread(new OutputThread(this, mCroppingType)), 70 mMaxThumbResolution(getMaxThumbResolution()), 71 mMaxJpegResolution(getMaxJpegResolution()) { 72 mInitFail = initialize(); 73} 74 75bool ExternalCameraDeviceSession::initialize() { 76 if (mV4l2Fd.get() < 0) { 77 ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get()); 78 return true; 79 } 80 81 status_t status = initDefaultRequests(); 82 if (status != OK) { 83 ALOGE("%s: init default requests failed!", __FUNCTION__); 84 return true; 85 } 86 87 mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>( 88 kMetadataMsgQueueSize, false /* non blocking */); 89 if (!mRequestMetadataQueue->isValid()) { 90 ALOGE("%s: invalid request fmq", __FUNCTION__); 91 return true; 92 } 93 mResultMetadataQueue = std::make_shared<RequestMetadataQueue>( 94 kMetadataMsgQueueSize, false /* non blocking */); 95 if (!mResultMetadataQueue->isValid()) { 96 ALOGE("%s: invalid result fmq", __FUNCTION__); 97 return true; 98 } 99 100 // TODO: check is PRIORITY_DISPLAY enough? 101 mOutputThread->run("ExtCamOut", PRIORITY_DISPLAY); 102 return false; 103} 104 105Status ExternalCameraDeviceSession::initStatus() const { 106 Mutex::Autolock _l(mLock); 107 Status status = Status::OK; 108 if (mInitFail || mClosed) { 109 ALOGI("%s: sesssion initFailed %d closed %d", __FUNCTION__, mInitFail, mClosed); 110 status = Status::INTERNAL_ERROR; 111 } 112 return status; 113} 114 115ExternalCameraDeviceSession::~ExternalCameraDeviceSession() { 116 if (!isClosed()) { 117 ALOGE("ExternalCameraDeviceSession deleted before close!"); 118 close(); 119 } 120} 121 122void ExternalCameraDeviceSession::dumpState(const native_handle_t*) { 123 // TODO: b/72261676 dump more runtime information 124} 125 126Return<void> ExternalCameraDeviceSession::constructDefaultRequestSettings( 127 V3_2::RequestTemplate type, 128 V3_2::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) { 129 V3_2::CameraMetadata outMetadata; 130 Status status = constructDefaultRequestSettingsRaw( 131 static_cast<RequestTemplate>(type), &outMetadata); 132 _hidl_cb(status, outMetadata); 133 return Void(); 134} 135 136Return<void> ExternalCameraDeviceSession::constructDefaultRequestSettings_3_4( 137 RequestTemplate type, 138 ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) { 139 V3_2::CameraMetadata outMetadata; 140 Status status = constructDefaultRequestSettingsRaw(type, &outMetadata); 141 _hidl_cb(status, outMetadata); 142 return Void(); 143} 144 145Status ExternalCameraDeviceSession::constructDefaultRequestSettingsRaw(RequestTemplate type, 146 V3_2::CameraMetadata *outMetadata) { 147 CameraMetadata emptyMd; 148 Status status = initStatus(); 149 if (status != Status::OK) { 150 return status; 151 } 152 153 switch (type) { 154 case RequestTemplate::PREVIEW: 155 case RequestTemplate::STILL_CAPTURE: 156 case RequestTemplate::VIDEO_RECORD: 157 case RequestTemplate::VIDEO_SNAPSHOT: { 158 *outMetadata = mDefaultRequests[type]; 159 break; 160 } 161 case RequestTemplate::MANUAL: 162 case RequestTemplate::ZERO_SHUTTER_LAG: 163 case RequestTemplate::MOTION_TRACKING_PREVIEW: 164 case RequestTemplate::MOTION_TRACKING_BEST: 165 // Don't support MANUAL, ZSL, MOTION_TRACKING_* templates 166 status = Status::ILLEGAL_ARGUMENT; 167 break; 168 default: 169 ALOGE("%s: unknown request template type %d", __FUNCTION__, static_cast<int>(type)); 170 status = Status::ILLEGAL_ARGUMENT; 171 break; 172 } 173 return status; 174} 175 176Return<void> ExternalCameraDeviceSession::configureStreams( 177 const V3_2::StreamConfiguration& streams, 178 ICameraDeviceSession::configureStreams_cb _hidl_cb) { 179 V3_2::HalStreamConfiguration outStreams; 180 V3_3::HalStreamConfiguration outStreams_v33; 181 Mutex::Autolock _il(mInterfaceLock); 182 183 Status status = configureStreams(streams, &outStreams_v33); 184 size_t size = outStreams_v33.streams.size(); 185 outStreams.streams.resize(size); 186 for (size_t i = 0; i < size; i++) { 187 outStreams.streams[i] = outStreams_v33.streams[i].v3_2; 188 } 189 _hidl_cb(status, outStreams); 190 return Void(); 191} 192 193Return<void> ExternalCameraDeviceSession::configureStreams_3_3( 194 const V3_2::StreamConfiguration& streams, 195 ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb) { 196 V3_3::HalStreamConfiguration outStreams; 197 Mutex::Autolock _il(mInterfaceLock); 198 199 Status status = configureStreams(streams, &outStreams); 200 _hidl_cb(status, outStreams); 201 return Void(); 202} 203 204Return<void> ExternalCameraDeviceSession::configureStreams_3_4( 205 const V3_4::StreamConfiguration& requestedConfiguration, 206 ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb) { 207 V3_2::StreamConfiguration config_v32; 208 V3_3::HalStreamConfiguration outStreams_v33; 209 Mutex::Autolock _il(mInterfaceLock); 210 211 config_v32.operationMode = requestedConfiguration.operationMode; 212 config_v32.streams.resize(requestedConfiguration.streams.size()); 213 for (size_t i = 0; i < config_v32.streams.size(); i++) { 214 config_v32.streams[i] = requestedConfiguration.streams[i].v3_2; 215 } 216 217 // Ignore requestedConfiguration.sessionParams. External camera does not support it 218 Status status = configureStreams(config_v32, &outStreams_v33); 219 220 V3_4::HalStreamConfiguration outStreams; 221 outStreams.streams.resize(outStreams_v33.streams.size()); 222 for (size_t i = 0; i < outStreams.streams.size(); i++) { 223 outStreams.streams[i].v3_3 = outStreams_v33.streams[i]; 224 } 225 _hidl_cb(status, outStreams); 226 return Void(); 227} 228 229Return<void> ExternalCameraDeviceSession::getCaptureRequestMetadataQueue( 230 ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) { 231 Mutex::Autolock _il(mInterfaceLock); 232 _hidl_cb(*mRequestMetadataQueue->getDesc()); 233 return Void(); 234} 235 236Return<void> ExternalCameraDeviceSession::getCaptureResultMetadataQueue( 237 ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) { 238 Mutex::Autolock _il(mInterfaceLock); 239 _hidl_cb(*mResultMetadataQueue->getDesc()); 240 return Void(); 241} 242 243Return<void> ExternalCameraDeviceSession::processCaptureRequest( 244 const hidl_vec<CaptureRequest>& requests, 245 const hidl_vec<BufferCache>& cachesToRemove, 246 ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) { 247 Mutex::Autolock _il(mInterfaceLock); 248 updateBufferCaches(cachesToRemove); 249 250 uint32_t numRequestProcessed = 0; 251 Status s = Status::OK; 252 for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { 253 s = processOneCaptureRequest(requests[i]); 254 if (s != Status::OK) { 255 break; 256 } 257 } 258 259 _hidl_cb(s, numRequestProcessed); 260 return Void(); 261} 262 263Return<void> ExternalCameraDeviceSession::processCaptureRequest_3_4( 264 const hidl_vec<V3_4::CaptureRequest>& requests, 265 const hidl_vec<V3_2::BufferCache>& cachesToRemove, 266 ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) { 267 Mutex::Autolock _il(mInterfaceLock); 268 updateBufferCaches(cachesToRemove); 269 270 uint32_t numRequestProcessed = 0; 271 Status s = Status::OK; 272 for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { 273 s = processOneCaptureRequest(requests[i].v3_2); 274 if (s != Status::OK) { 275 break; 276 } 277 } 278 279 _hidl_cb(s, numRequestProcessed); 280 return Void(); 281} 282 283Return<Status> ExternalCameraDeviceSession::flush() { 284 Mutex::Autolock _il(mInterfaceLock); 285 Status status = initStatus(); 286 if (status != Status::OK) { 287 return status; 288 } 289 mOutputThread->flush(); 290 return Status::OK; 291} 292 293Return<void> ExternalCameraDeviceSession::close() { 294 Mutex::Autolock _il(mInterfaceLock); 295 Mutex::Autolock _l(mLock); 296 if (!mClosed) { 297 // TODO: b/72261676 Cleanup inflight buffers/V4L2 buffer queue 298 ALOGV("%s: closing V4L2 camera FD %d", __FUNCTION__, mV4l2Fd.get()); 299 mV4l2Fd.reset(); 300 mOutputThread->requestExit(); // TODO: join? 301 302 // free all imported buffers 303 for(auto& pair : mCirculatingBuffers) { 304 CirculatingBuffers& buffers = pair.second; 305 for (auto& p2 : buffers) { 306 sHandleImporter.freeBuffer(p2.second); 307 } 308 } 309 310 mClosed = true; 311 } 312 return Void(); 313} 314 315Status ExternalCameraDeviceSession::importRequest( 316 const CaptureRequest& request, 317 hidl_vec<buffer_handle_t*>& allBufPtrs, 318 hidl_vec<int>& allFences) { 319 size_t numOutputBufs = request.outputBuffers.size(); 320 size_t numBufs = numOutputBufs; 321 // Validate all I/O buffers 322 hidl_vec<buffer_handle_t> allBufs; 323 hidl_vec<uint64_t> allBufIds; 324 allBufs.resize(numBufs); 325 allBufIds.resize(numBufs); 326 allBufPtrs.resize(numBufs); 327 allFences.resize(numBufs); 328 std::vector<int32_t> streamIds(numBufs); 329 330 for (size_t i = 0; i < numOutputBufs; i++) { 331 allBufs[i] = request.outputBuffers[i].buffer.getNativeHandle(); 332 allBufIds[i] = request.outputBuffers[i].bufferId; 333 allBufPtrs[i] = &allBufs[i]; 334 streamIds[i] = request.outputBuffers[i].streamId; 335 } 336 337 for (size_t i = 0; i < numBufs; i++) { 338 buffer_handle_t buf = allBufs[i]; 339 uint64_t bufId = allBufIds[i]; 340 CirculatingBuffers& cbs = mCirculatingBuffers[streamIds[i]]; 341 if (cbs.count(bufId) == 0) { 342 if (buf == nullptr) { 343 ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId); 344 return Status::ILLEGAL_ARGUMENT; 345 } 346 // Register a newly seen buffer 347 buffer_handle_t importedBuf = buf; 348 sHandleImporter.importBuffer(importedBuf); 349 if (importedBuf == nullptr) { 350 ALOGE("%s: output buffer %zu is invalid!", __FUNCTION__, i); 351 return Status::INTERNAL_ERROR; 352 } else { 353 cbs[bufId] = importedBuf; 354 } 355 } 356 allBufPtrs[i] = &cbs[bufId]; 357 } 358 359 // All buffers are imported. Now validate output buffer acquire fences 360 for (size_t i = 0; i < numOutputBufs; i++) { 361 if (!sHandleImporter.importFence( 362 request.outputBuffers[i].acquireFence, allFences[i])) { 363 ALOGE("%s: output buffer %zu acquire fence is invalid", __FUNCTION__, i); 364 cleanupInflightFences(allFences, i); 365 return Status::INTERNAL_ERROR; 366 } 367 } 368 return Status::OK; 369} 370 371void ExternalCameraDeviceSession::cleanupInflightFences( 372 hidl_vec<int>& allFences, size_t numFences) { 373 for (size_t j = 0; j < numFences; j++) { 374 sHandleImporter.closeFence(allFences[j]); 375 } 376} 377 378Status ExternalCameraDeviceSession::processOneCaptureRequest(const CaptureRequest& request) { 379 Status status = initStatus(); 380 if (status != Status::OK) { 381 return status; 382 } 383 384 if (request.inputBuffer.streamId != -1) { 385 ALOGE("%s: external camera does not support reprocessing!", __FUNCTION__); 386 return Status::ILLEGAL_ARGUMENT; 387 } 388 389 Mutex::Autolock _l(mLock); 390 if (!mV4l2Streaming) { 391 ALOGE("%s: cannot process request in streamOff state!", __FUNCTION__); 392 return Status::INTERNAL_ERROR; 393 } 394 395 const camera_metadata_t *rawSettings = nullptr; 396 bool converted = true; 397 CameraMetadata settingsFmq; // settings from FMQ 398 if (request.fmqSettingsSize > 0) { 399 // non-blocking read; client must write metadata before calling 400 // processOneCaptureRequest 401 settingsFmq.resize(request.fmqSettingsSize); 402 bool read = mRequestMetadataQueue->read(settingsFmq.data(), request.fmqSettingsSize); 403 if (read) { 404 converted = V3_2::implementation::convertFromHidl(settingsFmq, &rawSettings); 405 } else { 406 ALOGE("%s: capture request settings metadata couldn't be read from fmq!", __FUNCTION__); 407 converted = false; 408 } 409 } else { 410 converted = V3_2::implementation::convertFromHidl(request.settings, &rawSettings); 411 } 412 413 if (converted && rawSettings != nullptr) { 414 mLatestReqSetting = rawSettings; 415 } 416 417 if (!converted) { 418 ALOGE("%s: capture request settings metadata is corrupt!", __FUNCTION__); 419 return Status::ILLEGAL_ARGUMENT; 420 } 421 422 if (mFirstRequest && rawSettings == nullptr) { 423 ALOGE("%s: capture request settings must not be null for first request!", 424 __FUNCTION__); 425 return Status::ILLEGAL_ARGUMENT; 426 } 427 428 hidl_vec<buffer_handle_t*> allBufPtrs; 429 hidl_vec<int> allFences; 430 size_t numOutputBufs = request.outputBuffers.size(); 431 432 if (numOutputBufs == 0) { 433 ALOGE("%s: capture request must have at least one output buffer!", __FUNCTION__); 434 return Status::ILLEGAL_ARGUMENT; 435 } 436 437 status = importRequest(request, allBufPtrs, allFences); 438 if (status != Status::OK) { 439 return status; 440 } 441 442 // TODO: program fps range per capture request here 443 // or limit the set of availableFpsRange 444 445 sp<V4L2Frame> frameIn = dequeueV4l2FrameLocked(); 446 if ( frameIn == nullptr) { 447 ALOGE("%s: V4L2 deque frame failed!", __FUNCTION__); 448 return Status::INTERNAL_ERROR; 449 } 450 // TODO: This can probably be replaced by use v4lbuffer timestamp 451 // if the device supports it 452 nsecs_t shutterTs = systemTime(SYSTEM_TIME_MONOTONIC); 453 454 455 // TODO: reduce object copy in this path 456 HalRequest halReq = { 457 .frameNumber = request.frameNumber, 458 .setting = mLatestReqSetting, 459 .frameIn = frameIn, 460 .shutterTs = shutterTs}; 461 halReq.buffers.resize(numOutputBufs); 462 for (size_t i = 0; i < numOutputBufs; i++) { 463 HalStreamBuffer& halBuf = halReq.buffers[i]; 464 int streamId = halBuf.streamId = request.outputBuffers[i].streamId; 465 halBuf.bufferId = request.outputBuffers[i].bufferId; 466 const Stream& stream = mStreamMap[streamId]; 467 halBuf.width = stream.width; 468 halBuf.height = stream.height; 469 halBuf.format = stream.format; 470 halBuf.usage = stream.usage; 471 halBuf.bufPtr = allBufPtrs[i]; 472 halBuf.acquireFence = allFences[i]; 473 halBuf.fenceTimeout = false; 474 } 475 mInflightFrames.insert(halReq.frameNumber); 476 // Send request to OutputThread for the rest of processing 477 mOutputThread->submitRequest(halReq); 478 mFirstRequest = false; 479 return Status::OK; 480} 481 482void ExternalCameraDeviceSession::notifyShutter(uint32_t frameNumber, nsecs_t shutterTs) { 483 NotifyMsg msg; 484 msg.type = MsgType::SHUTTER; 485 msg.msg.shutter.frameNumber = frameNumber; 486 msg.msg.shutter.timestamp = shutterTs; 487 mCallback->notify({msg}); 488} 489 490void ExternalCameraDeviceSession::notifyError( 491 uint32_t frameNumber, int32_t streamId, ErrorCode ec) { 492 NotifyMsg msg; 493 msg.type = MsgType::ERROR; 494 msg.msg.error.frameNumber = frameNumber; 495 msg.msg.error.errorStreamId = streamId; 496 msg.msg.error.errorCode = ec; 497 mCallback->notify({msg}); 498} 499 500//TODO: refactor with processCaptureResult 501Status ExternalCameraDeviceSession::processCaptureRequestError(const HalRequest& req) { 502 // Return V4L2 buffer to V4L2 buffer queue 503 enqueueV4l2Frame(req.frameIn); 504 505 // NotifyShutter 506 notifyShutter(req.frameNumber, req.shutterTs); 507 508 notifyError(/*frameNum*/req.frameNumber, /*stream*/-1, ErrorCode::ERROR_REQUEST); 509 510 // Fill output buffers 511 hidl_vec<CaptureResult> results; 512 results.resize(1); 513 CaptureResult& result = results[0]; 514 result.frameNumber = req.frameNumber; 515 result.partialResult = 1; 516 result.inputBuffer.streamId = -1; 517 result.outputBuffers.resize(req.buffers.size()); 518 for (size_t i = 0; i < req.buffers.size(); i++) { 519 result.outputBuffers[i].streamId = req.buffers[i].streamId; 520 result.outputBuffers[i].bufferId = req.buffers[i].bufferId; 521 result.outputBuffers[i].status = BufferStatus::ERROR; 522 if (req.buffers[i].acquireFence >= 0) { 523 native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); 524 handle->data[0] = req.buffers[i].acquireFence; 525 result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); 526 } 527 } 528 529 // update inflight records 530 { 531 Mutex::Autolock _l(mLock); 532 mInflightFrames.erase(req.frameNumber); 533 } 534 535 // Callback into framework 536 invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); 537 freeReleaseFences(results); 538 return Status::OK; 539} 540 541Status ExternalCameraDeviceSession::processCaptureResult(HalRequest& req) { 542 // Return V4L2 buffer to V4L2 buffer queue 543 enqueueV4l2Frame(req.frameIn); 544 545 // NotifyShutter 546 notifyShutter(req.frameNumber, req.shutterTs); 547 548 // Fill output buffers 549 hidl_vec<CaptureResult> results; 550 results.resize(1); 551 CaptureResult& result = results[0]; 552 result.frameNumber = req.frameNumber; 553 result.partialResult = 1; 554 result.inputBuffer.streamId = -1; 555 result.outputBuffers.resize(req.buffers.size()); 556 for (size_t i = 0; i < req.buffers.size(); i++) { 557 result.outputBuffers[i].streamId = req.buffers[i].streamId; 558 result.outputBuffers[i].bufferId = req.buffers[i].bufferId; 559 if (req.buffers[i].fenceTimeout) { 560 result.outputBuffers[i].status = BufferStatus::ERROR; 561 native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); 562 handle->data[0] = req.buffers[i].acquireFence; 563 result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); 564 notifyError(req.frameNumber, req.buffers[i].streamId, ErrorCode::ERROR_BUFFER); 565 } else { 566 result.outputBuffers[i].status = BufferStatus::OK; 567 // TODO: refactor 568 if (req.buffers[i].acquireFence > 0) { 569 native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); 570 handle->data[0] = req.buffers[i].acquireFence; 571 result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); 572 } 573 } 574 } 575 576 // Fill capture result metadata 577 fillCaptureResult(req.setting, req.shutterTs); 578 const camera_metadata_t *rawResult = req.setting.getAndLock(); 579 V3_2::implementation::convertToHidl(rawResult, &result.result); 580 req.setting.unlock(rawResult); 581 582 // update inflight records 583 { 584 Mutex::Autolock _l(mLock); 585 mInflightFrames.erase(req.frameNumber); 586 } 587 588 // Callback into framework 589 invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); 590 freeReleaseFences(results); 591 return Status::OK; 592} 593 594void ExternalCameraDeviceSession::invokeProcessCaptureResultCallback( 595 hidl_vec<CaptureResult> &results, bool tryWriteFmq) { 596 if (mProcessCaptureResultLock.tryLock() != OK) { 597 const nsecs_t NS_TO_SECOND = 1000000000; 598 ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__); 599 if (mProcessCaptureResultLock.timedLock(/* 1s */NS_TO_SECOND) != OK) { 600 ALOGE("%s: cannot acquire lock in 1s, cannot proceed", 601 __FUNCTION__); 602 return; 603 } 604 } 605 if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) { 606 for (CaptureResult &result : results) { 607 if (result.result.size() > 0) { 608 if (mResultMetadataQueue->write(result.result.data(), result.result.size())) { 609 result.fmqResultSize = result.result.size(); 610 result.result.resize(0); 611 } else { 612 ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); 613 result.fmqResultSize = 0; 614 } 615 } else { 616 result.fmqResultSize = 0; 617 } 618 } 619 } 620 auto status = mCallback->processCaptureResult(results); 621 if (!status.isOk()) { 622 ALOGE("%s: processCaptureResult ERROR : %s", __FUNCTION__, 623 status.description().c_str()); 624 } 625 626 mProcessCaptureResultLock.unlock(); 627} 628 629void ExternalCameraDeviceSession::freeReleaseFences(hidl_vec<CaptureResult>& results) { 630 for (auto& result : results) { 631 if (result.inputBuffer.releaseFence.getNativeHandle() != nullptr) { 632 native_handle_t* handle = const_cast<native_handle_t*>( 633 result.inputBuffer.releaseFence.getNativeHandle()); 634 native_handle_close(handle); 635 native_handle_delete(handle); 636 } 637 for (auto& buf : result.outputBuffers) { 638 if (buf.releaseFence.getNativeHandle() != nullptr) { 639 native_handle_t* handle = const_cast<native_handle_t*>( 640 buf.releaseFence.getNativeHandle()); 641 native_handle_close(handle); 642 native_handle_delete(handle); 643 } 644 } 645 } 646 return; 647} 648 649ExternalCameraDeviceSession::OutputThread::OutputThread( 650 wp<ExternalCameraDeviceSession> parent, 651 CroppingType ct) : mParent(parent), mCroppingType(ct) {} 652 653ExternalCameraDeviceSession::OutputThread::~OutputThread() {} 654 655uint32_t ExternalCameraDeviceSession::OutputThread::getFourCcFromLayout( 656 const YCbCrLayout& layout) { 657 intptr_t cb = reinterpret_cast<intptr_t>(layout.cb); 658 intptr_t cr = reinterpret_cast<intptr_t>(layout.cr); 659 if (std::abs(cb - cr) == 1 && layout.chromaStep == 2) { 660 // Interleaved format 661 if (layout.cb > layout.cr) { 662 return V4L2_PIX_FMT_NV21; 663 } else { 664 return V4L2_PIX_FMT_NV12; 665 } 666 } else if (layout.chromaStep == 1) { 667 // Planar format 668 if (layout.cb > layout.cr) { 669 return V4L2_PIX_FMT_YVU420; // YV12 670 } else { 671 return V4L2_PIX_FMT_YUV420; // YU12 672 } 673 } else { 674 return FLEX_YUV_GENERIC; 675 } 676} 677 678int ExternalCameraDeviceSession::OutputThread::getCropRect( 679 CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out) { 680 if (out == nullptr) { 681 ALOGE("%s: out is null", __FUNCTION__); 682 return -1; 683 } 684 685 uint32_t inW = inSize.width; 686 uint32_t inH = inSize.height; 687 uint32_t outW = outSize.width; 688 uint32_t outH = outSize.height; 689 690 // Handle special case where aspect ratio is close to input but scaled 691 // dimension is slightly larger than input 692 float arIn = ASPECT_RATIO(inSize); 693 float arOut = ASPECT_RATIO(outSize); 694 if (isAspectRatioClose(arIn, arOut)) { 695 out->left = 0; 696 out->top = 0; 697 out->width = inW; 698 out->height = inH; 699 return 0; 700 } 701 702 if (ct == VERTICAL) { 703 uint64_t scaledOutH = static_cast<uint64_t>(outH) * inW / outW; 704 if (scaledOutH > inH) { 705 ALOGE("%s: Output size %dx%d cannot be vertically cropped from input size %dx%d", 706 __FUNCTION__, outW, outH, inW, inH); 707 return -1; 708 } 709 scaledOutH = scaledOutH & ~0x1; // make it multiple of 2 710 711 out->left = 0; 712 out->top = ((inH - scaledOutH) / 2) & ~0x1; 713 out->width = inW; 714 out->height = static_cast<int32_t>(scaledOutH); 715 ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledH %d", 716 __FUNCTION__, inW, inH, outW, outH, out->top, static_cast<int32_t>(scaledOutH)); 717 } else { 718 uint64_t scaledOutW = static_cast<uint64_t>(outW) * inH / outH; 719 if (scaledOutW > inW) { 720 ALOGE("%s: Output size %dx%d cannot be horizontally cropped from input size %dx%d", 721 __FUNCTION__, outW, outH, inW, inH); 722 return -1; 723 } 724 scaledOutW = scaledOutW & ~0x1; // make it multiple of 2 725 726 out->left = ((inW - scaledOutW) / 2) & ~0x1; 727 out->top = 0; 728 out->width = static_cast<int32_t>(scaledOutW); 729 out->height = inH; 730 ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledW %d", 731 __FUNCTION__, inW, inH, outW, outH, out->top, static_cast<int32_t>(scaledOutW)); 732 } 733 734 return 0; 735} 736 737int ExternalCameraDeviceSession::OutputThread::cropAndScaleLocked( 738 sp<AllocatedFrame>& in, const Size& outSz, YCbCrLayout* out) { 739 Size inSz = {in->mWidth, in->mHeight}; 740 741 int ret; 742 if (inSz == outSz) { 743 ret = in->getLayout(out); 744 if (ret != 0) { 745 ALOGE("%s: failed to get input image layout", __FUNCTION__); 746 return ret; 747 } 748 return ret; 749 } 750 751 // Cropping to output aspect ratio 752 IMapper::Rect inputCrop; 753 ret = getCropRect(mCroppingType, inSz, outSz, &inputCrop); 754 if (ret != 0) { 755 ALOGE("%s: failed to compute crop rect for output size %dx%d", 756 __FUNCTION__, outSz.width, outSz.height); 757 return ret; 758 } 759 760 YCbCrLayout croppedLayout; 761 ret = in->getCroppedLayout(inputCrop, &croppedLayout); 762 if (ret != 0) { 763 ALOGE("%s: failed to crop input image %dx%d to output size %dx%d", 764 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); 765 return ret; 766 } 767 768 if ((mCroppingType == VERTICAL && inSz.width == outSz.width) || 769 (mCroppingType == HORIZONTAL && inSz.height == outSz.height)) { 770 // No scale is needed 771 *out = croppedLayout; 772 return 0; 773 } 774 775 auto it = mScaledYu12Frames.find(outSz); 776 sp<AllocatedFrame> scaledYu12Buf; 777 if (it != mScaledYu12Frames.end()) { 778 scaledYu12Buf = it->second; 779 } else { 780 it = mIntermediateBuffers.find(outSz); 781 if (it == mIntermediateBuffers.end()) { 782 ALOGE("%s: failed to find intermediate buffer size %dx%d", 783 __FUNCTION__, outSz.width, outSz.height); 784 return -1; 785 } 786 scaledYu12Buf = it->second; 787 } 788 // Scale 789 YCbCrLayout outLayout; 790 ret = scaledYu12Buf->getLayout(&outLayout); 791 if (ret != 0) { 792 ALOGE("%s: failed to get output buffer layout", __FUNCTION__); 793 return ret; 794 } 795 796 ret = libyuv::I420Scale( 797 static_cast<uint8_t*>(croppedLayout.y), 798 croppedLayout.yStride, 799 static_cast<uint8_t*>(croppedLayout.cb), 800 croppedLayout.cStride, 801 static_cast<uint8_t*>(croppedLayout.cr), 802 croppedLayout.cStride, 803 inputCrop.width, 804 inputCrop.height, 805 static_cast<uint8_t*>(outLayout.y), 806 outLayout.yStride, 807 static_cast<uint8_t*>(outLayout.cb), 808 outLayout.cStride, 809 static_cast<uint8_t*>(outLayout.cr), 810 outLayout.cStride, 811 outSz.width, 812 outSz.height, 813 // TODO: b/72261744 see if we can use better filter without losing too much perf 814 libyuv::FilterMode::kFilterNone); 815 816 if (ret != 0) { 817 ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", 818 __FUNCTION__, inputCrop.width, inputCrop.height, 819 outSz.width, outSz.height, ret); 820 return ret; 821 } 822 823 *out = outLayout; 824 mScaledYu12Frames.insert({outSz, scaledYu12Buf}); 825 return 0; 826} 827 828 829int ExternalCameraDeviceSession::OutputThread::cropAndScaleThumbLocked( 830 sp<AllocatedFrame>& in, const Size &outSz, YCbCrLayout* out) { 831 Size inSz {in->mWidth, in->mHeight}; 832 833 if ((outSz.width * outSz.height) > 834 (mYu12ThumbFrame->mWidth * mYu12ThumbFrame->mHeight)) { 835 ALOGE("%s: Requested thumbnail size too big (%d,%d) > (%d,%d)", 836 __FUNCTION__, outSz.width, outSz.height, 837 mYu12ThumbFrame->mWidth, mYu12ThumbFrame->mHeight); 838 return -1; 839 } 840 841 int ret; 842 843 /* This will crop-and-zoom the input YUV frame to the thumbnail size 844 * Based on the following logic: 845 * 1) Square pixels come in, square pixels come out, therefore single 846 * scale factor is computed to either make input bigger or smaller 847 * depending on if we are upscaling or downscaling 848 * 2) That single scale factor would either make height too tall or width 849 * too wide so we need to crop the input either horizontally or vertically 850 * but not both 851 */ 852 853 /* Convert the input and output dimensions into floats for ease of math */ 854 float fWin = static_cast<float>(inSz.width); 855 float fHin = static_cast<float>(inSz.height); 856 float fWout = static_cast<float>(outSz.width); 857 float fHout = static_cast<float>(outSz.height); 858 859 /* Compute the one scale factor from (1) above, it will be the smaller of 860 * the two possibilities. */ 861 float scaleFactor = std::min( fHin / fHout, fWin / fWout ); 862 863 /* Since we are crop-and-zooming (as opposed to letter/pillar boxing) we can 864 * simply multiply the output by our scaleFactor to get the cropped input 865 * size. Note that at least one of {fWcrop, fHcrop} is going to wind up 866 * being {fWin, fHin} respectively because fHout or fWout cancels out the 867 * scaleFactor calculation above. 868 * 869 * Specifically: 870 * if ( fHin / fHout ) < ( fWin / fWout ) we crop the sides off 871 * input, in which case 872 * scaleFactor = fHin / fHout 873 * fWcrop = fHin / fHout * fWout 874 * fHcrop = fHin 875 * 876 * Note that fWcrop <= fWin ( because ( fHin / fHout ) * fWout < fWin, which 877 * is just the inequality above with both sides multiplied by fWout 878 * 879 * on the other hand if ( fWin / fWout ) < ( fHin / fHout) we crop the top 880 * and the bottom off of input, and 881 * scaleFactor = fWin / fWout 882 * fWcrop = fWin 883 * fHCrop = fWin / fWout * fHout 884 */ 885 float fWcrop = scaleFactor * fWout; 886 float fHcrop = scaleFactor * fHout; 887 888 /* Convert to integer and truncate to an even number */ 889 Size cropSz = { 2*static_cast<uint32_t>(fWcrop/2.0f), 890 2*static_cast<uint32_t>(fHcrop/2.0f) }; 891 892 /* Convert to a centered rectange with even top/left */ 893 IMapper::Rect inputCrop { 894 2*static_cast<int32_t>((inSz.width - cropSz.width)/4), 895 2*static_cast<int32_t>((inSz.height - cropSz.height)/4), 896 static_cast<int32_t>(cropSz.width), 897 static_cast<int32_t>(cropSz.height) }; 898 899 if ((inputCrop.top < 0) || 900 (inputCrop.top >= static_cast<int32_t>(inSz.height)) || 901 (inputCrop.left < 0) || 902 (inputCrop.left >= static_cast<int32_t>(inSz.width)) || 903 (inputCrop.width <= 0) || 904 (inputCrop.width + inputCrop.left > static_cast<int32_t>(inSz.width)) || 905 (inputCrop.height <= 0) || 906 (inputCrop.height + inputCrop.top > static_cast<int32_t>(inSz.height))) 907 { 908 ALOGE("%s: came up with really wrong crop rectangle",__FUNCTION__); 909 ALOGE("%s: input layout %dx%d to for output size %dx%d", 910 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); 911 ALOGE("%s: computed input crop +%d,+%d %dx%d", 912 __FUNCTION__, inputCrop.left, inputCrop.top, 913 inputCrop.width, inputCrop.height); 914 return -1; 915 } 916 917 YCbCrLayout inputLayout; 918 ret = in->getCroppedLayout(inputCrop, &inputLayout); 919 if (ret != 0) { 920 ALOGE("%s: failed to crop input layout %dx%d to for output size %dx%d", 921 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); 922 ALOGE("%s: computed input crop +%d,+%d %dx%d", 923 __FUNCTION__, inputCrop.left, inputCrop.top, 924 inputCrop.width, inputCrop.height); 925 return ret; 926 } 927 ALOGV("%s: crop input layout %dx%d to for output size %dx%d", 928 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); 929 ALOGV("%s: computed input crop +%d,+%d %dx%d", 930 __FUNCTION__, inputCrop.left, inputCrop.top, 931 inputCrop.width, inputCrop.height); 932 933 934 // Scale 935 YCbCrLayout outFullLayout; 936 937 ret = mYu12ThumbFrame->getLayout(&outFullLayout); 938 if (ret != 0) { 939 ALOGE("%s: failed to get output buffer layout", __FUNCTION__); 940 return ret; 941 } 942 943 944 ret = libyuv::I420Scale( 945 static_cast<uint8_t*>(inputLayout.y), 946 inputLayout.yStride, 947 static_cast<uint8_t*>(inputLayout.cb), 948 inputLayout.cStride, 949 static_cast<uint8_t*>(inputLayout.cr), 950 inputLayout.cStride, 951 inputCrop.width, 952 inputCrop.height, 953 static_cast<uint8_t*>(outFullLayout.y), 954 outFullLayout.yStride, 955 static_cast<uint8_t*>(outFullLayout.cb), 956 outFullLayout.cStride, 957 static_cast<uint8_t*>(outFullLayout.cr), 958 outFullLayout.cStride, 959 outSz.width, 960 outSz.height, 961 libyuv::FilterMode::kFilterNone); 962 963 if (ret != 0) { 964 ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", 965 __FUNCTION__, inputCrop.width, inputCrop.height, 966 outSz.width, outSz.height, ret); 967 return ret; 968 } 969 970 *out = outFullLayout; 971 return 0; 972} 973 974int ExternalCameraDeviceSession::OutputThread::formatConvertLocked( 975 const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format) { 976 int ret = 0; 977 switch (format) { 978 case V4L2_PIX_FMT_NV21: 979 ret = libyuv::I420ToNV21( 980 static_cast<uint8_t*>(in.y), 981 in.yStride, 982 static_cast<uint8_t*>(in.cb), 983 in.cStride, 984 static_cast<uint8_t*>(in.cr), 985 in.cStride, 986 static_cast<uint8_t*>(out.y), 987 out.yStride, 988 static_cast<uint8_t*>(out.cr), 989 out.cStride, 990 sz.width, 991 sz.height); 992 if (ret != 0) { 993 ALOGE("%s: convert to NV21 buffer failed! ret %d", 994 __FUNCTION__, ret); 995 return ret; 996 } 997 break; 998 case V4L2_PIX_FMT_NV12: 999 ret = libyuv::I420ToNV12( 1000 static_cast<uint8_t*>(in.y), 1001 in.yStride, 1002 static_cast<uint8_t*>(in.cb), 1003 in.cStride, 1004 static_cast<uint8_t*>(in.cr), 1005 in.cStride, 1006 static_cast<uint8_t*>(out.y), 1007 out.yStride, 1008 static_cast<uint8_t*>(out.cb), 1009 out.cStride, 1010 sz.width, 1011 sz.height); 1012 if (ret != 0) { 1013 ALOGE("%s: convert to NV12 buffer failed! ret %d", 1014 __FUNCTION__, ret); 1015 return ret; 1016 } 1017 break; 1018 case V4L2_PIX_FMT_YVU420: // YV12 1019 case V4L2_PIX_FMT_YUV420: // YU12 1020 // TODO: maybe we can speed up here by somehow save this copy? 1021 ret = libyuv::I420Copy( 1022 static_cast<uint8_t*>(in.y), 1023 in.yStride, 1024 static_cast<uint8_t*>(in.cb), 1025 in.cStride, 1026 static_cast<uint8_t*>(in.cr), 1027 in.cStride, 1028 static_cast<uint8_t*>(out.y), 1029 out.yStride, 1030 static_cast<uint8_t*>(out.cb), 1031 out.cStride, 1032 static_cast<uint8_t*>(out.cr), 1033 out.cStride, 1034 sz.width, 1035 sz.height); 1036 if (ret != 0) { 1037 ALOGE("%s: copy to YV12 or YU12 buffer failed! ret %d", 1038 __FUNCTION__, ret); 1039 return ret; 1040 } 1041 break; 1042 case FLEX_YUV_GENERIC: 1043 // TODO: b/72261744 write to arbitrary flexible YUV layout. Slow. 1044 ALOGE("%s: unsupported flexible yuv layout" 1045 " y %p cb %p cr %p y_str %d c_str %d c_step %d", 1046 __FUNCTION__, out.y, out.cb, out.cr, 1047 out.yStride, out.cStride, out.chromaStep); 1048 return -1; 1049 default: 1050 ALOGE("%s: unknown YUV format 0x%x!", __FUNCTION__, format); 1051 return -1; 1052 } 1053 return 0; 1054} 1055 1056int ExternalCameraDeviceSession::OutputThread::encodeJpegYU12( 1057 const Size & inSz, const YCbCrLayout& inLayout, 1058 int jpegQuality, const void *app1Buffer, size_t app1Size, 1059 void *out, const size_t maxOutSize, size_t &actualCodeSize) 1060{ 1061 /* libjpeg is a C library so we use C-style "inheritance" by 1062 * putting libjpeg's jpeg_destination_mgr first in our custom 1063 * struct. This allows us to cast jpeg_destination_mgr* to 1064 * CustomJpegDestMgr* when we get it passed to us in a callback */ 1065 struct CustomJpegDestMgr { 1066 struct jpeg_destination_mgr mgr; 1067 JOCTET *mBuffer; 1068 size_t mBufferSize; 1069 size_t mEncodedSize; 1070 bool mSuccess; 1071 } dmgr; 1072 1073 jpeg_compress_struct cinfo = {}; 1074 jpeg_error_mgr jerr; 1075 1076 /* Initialize error handling with standard callbacks, but 1077 * then override output_message (to print to ALOG) and 1078 * error_exit to set a flag and print a message instead 1079 * of killing the whole process */ 1080 cinfo.err = jpeg_std_error(&jerr); 1081 1082 cinfo.err->output_message = [](j_common_ptr cinfo) { 1083 char buffer[JMSG_LENGTH_MAX]; 1084 1085 /* Create the message */ 1086 (*cinfo->err->format_message)(cinfo, buffer); 1087 ALOGE("libjpeg error: %s", buffer); 1088 }; 1089 cinfo.err->error_exit = [](j_common_ptr cinfo) { 1090 (*cinfo->err->output_message)(cinfo); 1091 if(cinfo->client_data) { 1092 auto & dmgr = 1093 *reinterpret_cast<CustomJpegDestMgr*>(cinfo->client_data); 1094 dmgr.mSuccess = false; 1095 } 1096 }; 1097 /* Now that we initialized some callbacks, let's create our compressor */ 1098 jpeg_create_compress(&cinfo); 1099 1100 /* Initialize our destination manager */ 1101 dmgr.mBuffer = static_cast<JOCTET*>(out); 1102 dmgr.mBufferSize = maxOutSize; 1103 dmgr.mEncodedSize = 0; 1104 dmgr.mSuccess = true; 1105 cinfo.client_data = static_cast<void*>(&dmgr); 1106 1107 /* These lambdas become C-style function pointers and as per C++11 spec 1108 * may not capture anything */ 1109 dmgr.mgr.init_destination = [](j_compress_ptr cinfo) { 1110 auto & dmgr = reinterpret_cast<CustomJpegDestMgr&>(*cinfo->dest); 1111 dmgr.mgr.next_output_byte = dmgr.mBuffer; 1112 dmgr.mgr.free_in_buffer = dmgr.mBufferSize; 1113 ALOGV("%s:%d jpeg start: %p [%zu]", 1114 __FUNCTION__, __LINE__, dmgr.mBuffer, dmgr.mBufferSize); 1115 }; 1116 1117 dmgr.mgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) { 1118 ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__); 1119 return 0; 1120 }; 1121 1122 dmgr.mgr.term_destination = [](j_compress_ptr cinfo) { 1123 auto & dmgr = reinterpret_cast<CustomJpegDestMgr&>(*cinfo->dest); 1124 dmgr.mEncodedSize = dmgr.mBufferSize - dmgr.mgr.free_in_buffer; 1125 ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.mEncodedSize); 1126 }; 1127 cinfo.dest = reinterpret_cast<struct jpeg_destination_mgr*>(&dmgr); 1128 1129 /* We are going to be using JPEG in raw data mode, so we are passing 1130 * straight subsampled planar YCbCr and it will not touch our pixel 1131 * data or do any scaling or anything */ 1132 cinfo.image_width = inSz.width; 1133 cinfo.image_height = inSz.height; 1134 cinfo.input_components = 3; 1135 cinfo.in_color_space = JCS_YCbCr; 1136 1137 /* Initialize defaults and then override what we want */ 1138 jpeg_set_defaults(&cinfo); 1139 1140 jpeg_set_quality(&cinfo, jpegQuality, 1); 1141 jpeg_set_colorspace(&cinfo, JCS_YCbCr); 1142 cinfo.raw_data_in = 1; 1143 cinfo.dct_method = JDCT_IFAST; 1144 1145 /* Configure sampling factors. The sampling factor is JPEG subsampling 420 1146 * because the source format is YUV420. Note that libjpeg sampling factors 1147 * are... a little weird. Sampling of Y=2,U=1,V=1 means there is 1 U and 1148 * 1 V value for each 2 Y values */ 1149 cinfo.comp_info[0].h_samp_factor = 2; 1150 cinfo.comp_info[0].v_samp_factor = 2; 1151 cinfo.comp_info[1].h_samp_factor = 1; 1152 cinfo.comp_info[1].v_samp_factor = 1; 1153 cinfo.comp_info[2].h_samp_factor = 1; 1154 cinfo.comp_info[2].v_samp_factor = 1; 1155 1156 /* Let's not hardcode YUV420 in 6 places... 5 was enough */ 1157 int maxVSampFactor = std::max( { 1158 cinfo.comp_info[0].v_samp_factor, 1159 cinfo.comp_info[1].v_samp_factor, 1160 cinfo.comp_info[2].v_samp_factor 1161 }); 1162 int cVSubSampling = cinfo.comp_info[0].v_samp_factor / 1163 cinfo.comp_info[1].v_samp_factor; 1164 1165 /* Start the compressor */ 1166 jpeg_start_compress(&cinfo, TRUE); 1167 1168 /* Compute our macroblock height, so we can pad our input to be vertically 1169 * macroblock aligned. 1170 * TODO: Does it need to be horizontally MCU aligned too? */ 1171 1172 size_t mcuV = DCTSIZE*maxVSampFactor; 1173 size_t paddedHeight = mcuV * ((inSz.height + mcuV - 1) / mcuV); 1174 1175 /* libjpeg uses arrays of row pointers, which makes it really easy to pad 1176 * data vertically (unfortunately doesn't help horizontally) */ 1177 std::vector<JSAMPROW> yLines (paddedHeight); 1178 std::vector<JSAMPROW> cbLines(paddedHeight/cVSubSampling); 1179 std::vector<JSAMPROW> crLines(paddedHeight/cVSubSampling); 1180 1181 uint8_t *py = static_cast<uint8_t*>(inLayout.y); 1182 uint8_t *pcr = static_cast<uint8_t*>(inLayout.cr); 1183 uint8_t *pcb = static_cast<uint8_t*>(inLayout.cb); 1184 1185 for(uint32_t i = 0; i < paddedHeight; i++) 1186 { 1187 /* Once we are in the padding territory we still point to the last line 1188 * effectively replicating it several times ~ CLAMP_TO_EDGE */ 1189 int li = std::min(i, inSz.height - 1); 1190 yLines[i] = static_cast<JSAMPROW>(py + li * inLayout.yStride); 1191 if(i < paddedHeight / cVSubSampling) 1192 { 1193 crLines[i] = static_cast<JSAMPROW>(pcr + li * inLayout.cStride); 1194 cbLines[i] = static_cast<JSAMPROW>(pcb + li * inLayout.cStride); 1195 } 1196 } 1197 1198 /* If APP1 data was passed in, use it */ 1199 if(app1Buffer && app1Size) 1200 { 1201 jpeg_write_marker(&cinfo, JPEG_APP0 + 1, 1202 static_cast<const JOCTET*>(app1Buffer), app1Size); 1203 } 1204 1205 /* While we still have padded height left to go, keep giving it one 1206 * macroblock at a time. */ 1207 while (cinfo.next_scanline < cinfo.image_height) { 1208 const uint32_t batchSize = DCTSIZE * maxVSampFactor; 1209 const uint32_t nl = cinfo.next_scanline; 1210 JSAMPARRAY planes[3]{ &yLines[nl], 1211 &cbLines[nl/cVSubSampling], 1212 &crLines[nl/cVSubSampling] }; 1213 1214 uint32_t done = jpeg_write_raw_data(&cinfo, planes, batchSize); 1215 1216 if (done != batchSize) { 1217 ALOGE("%s: compressed %u lines, expected %u (total %u/%u)", 1218 __FUNCTION__, done, batchSize, cinfo.next_scanline, 1219 cinfo.image_height); 1220 return -1; 1221 } 1222 } 1223 1224 /* This will flush everything */ 1225 jpeg_finish_compress(&cinfo); 1226 1227 /* Grab the actual code size and set it */ 1228 actualCodeSize = dmgr.mEncodedSize; 1229 1230 return 0; 1231} 1232 1233/* 1234 * TODO: There needs to be a mechanism to discover allocated buffer size 1235 * in the HAL. 1236 * 1237 * This is very fragile because it is duplicated computation from: 1238 * frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp 1239 * 1240 */ 1241 1242/* This assumes mSupportedFormats have all been declared as supporting 1243 * HAL_PIXEL_FORMAT_BLOB to the framework */ 1244Size ExternalCameraDeviceSession::getMaxJpegResolution() const { 1245 Size ret { 0, 0 }; 1246 for(auto & fmt : mSupportedFormats) { 1247 if(fmt.width * fmt.height > ret.width * ret.height) { 1248 ret = Size { fmt.width, fmt.height }; 1249 } 1250 } 1251 return ret; 1252} 1253 1254Size ExternalCameraDeviceSession::getMaxThumbResolution() const { 1255 Size thumbSize { 0, 0 }; 1256 camera_metadata_ro_entry entry = 1257 mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); 1258 for(uint32_t i = 0; i < entry.count; i += 2) { 1259 Size sz { static_cast<uint32_t>(entry.data.i32[i]), 1260 static_cast<uint32_t>(entry.data.i32[i+1]) }; 1261 if(sz.width * sz.height > thumbSize.width * thumbSize.height) { 1262 thumbSize = sz; 1263 } 1264 } 1265 1266 if (thumbSize.width * thumbSize.height == 0) { 1267 ALOGW("%s: non-zero thumbnail size not available", __FUNCTION__); 1268 } 1269 1270 return thumbSize; 1271} 1272 1273 1274ssize_t ExternalCameraDeviceSession::getJpegBufferSize( 1275 uint32_t width, uint32_t height) const { 1276 // Constant from camera3.h 1277 const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(CameraBlob); 1278 // Get max jpeg size (area-wise). 1279 if (mMaxJpegResolution.width == 0) { 1280 ALOGE("%s: Do not have a single supported JPEG stream", 1281 __FUNCTION__); 1282 return BAD_VALUE; 1283 } 1284 1285 // Get max jpeg buffer size 1286 ssize_t maxJpegBufferSize = 0; 1287 camera_metadata_ro_entry jpegBufMaxSize = 1288 mCameraCharacteristics.find(ANDROID_JPEG_MAX_SIZE); 1289 if (jpegBufMaxSize.count == 0) { 1290 ALOGE("%s: Can't find maximum JPEG size in static metadata!", 1291 __FUNCTION__); 1292 return BAD_VALUE; 1293 } 1294 maxJpegBufferSize = jpegBufMaxSize.data.i32[0]; 1295 1296 if (maxJpegBufferSize <= kMinJpegBufferSize) { 1297 ALOGE("%s: ANDROID_JPEG_MAX_SIZE (%zd) <= kMinJpegBufferSize (%zd)", 1298 __FUNCTION__, maxJpegBufferSize, kMinJpegBufferSize); 1299 return BAD_VALUE; 1300 } 1301 1302 // Calculate final jpeg buffer size for the given resolution. 1303 float scaleFactor = ((float) (width * height)) / 1304 (mMaxJpegResolution.width * mMaxJpegResolution.height); 1305 ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) + 1306 kMinJpegBufferSize; 1307 if (jpegBufferSize > maxJpegBufferSize) { 1308 jpegBufferSize = maxJpegBufferSize; 1309 } 1310 1311 return jpegBufferSize; 1312} 1313 1314int ExternalCameraDeviceSession::OutputThread::createJpegLocked( 1315 HalStreamBuffer &halBuf, 1316 HalRequest &req) 1317{ 1318 int ret; 1319 auto lfail = [&](auto... args) { 1320 ALOGE(args...); 1321 1322 return 1; 1323 }; 1324 auto parent = mParent.promote(); 1325 if (parent == nullptr) { 1326 ALOGE("%s: session has been disconnected!", __FUNCTION__); 1327 return 1; 1328 } 1329 1330 ALOGV("%s: HAL buffer sid: %d bid: %" PRIu64 " w: %u h: %u", 1331 __FUNCTION__, halBuf.streamId, static_cast<uint64_t>(halBuf.bufferId), 1332 halBuf.width, halBuf.height); 1333 ALOGV("%s: HAL buffer fmt: %x usage: %" PRIx64 " ptr: %p", 1334 __FUNCTION__, halBuf.format, static_cast<uint64_t>(halBuf.usage), 1335 halBuf.bufPtr); 1336 ALOGV("%s: YV12 buffer %d x %d", 1337 __FUNCTION__, 1338 mYu12Frame->mWidth, mYu12Frame->mHeight); 1339 1340 int jpegQuality, thumbQuality; 1341 Size thumbSize; 1342 1343 if (req.setting.exists(ANDROID_JPEG_QUALITY)) { 1344 camera_metadata_entry entry = 1345 req.setting.find(ANDROID_JPEG_QUALITY); 1346 jpegQuality = entry.data.u8[0]; 1347 } else { 1348 return lfail("%s: ANDROID_JPEG_QUALITY not set",__FUNCTION__); 1349 } 1350 1351 if (req.setting.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) { 1352 camera_metadata_entry entry = 1353 req.setting.find(ANDROID_JPEG_THUMBNAIL_QUALITY); 1354 thumbQuality = entry.data.u8[0]; 1355 } else { 1356 return lfail( 1357 "%s: ANDROID_JPEG_THUMBNAIL_QUALITY not set", 1358 __FUNCTION__); 1359 } 1360 1361 if (req.setting.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 1362 camera_metadata_entry entry = 1363 req.setting.find(ANDROID_JPEG_THUMBNAIL_SIZE); 1364 thumbSize = Size { static_cast<uint32_t>(entry.data.i32[0]), 1365 static_cast<uint32_t>(entry.data.i32[1]) 1366 }; 1367 } else { 1368 return lfail( 1369 "%s: ANDROID_JPEG_THUMBNAIL_SIZE not set", __FUNCTION__); 1370 } 1371 1372 /* Cropped and scaled YU12 buffer for main and thumbnail */ 1373 YCbCrLayout yu12Main; 1374 Size jpegSize { halBuf.width, halBuf.height }; 1375 1376 /* Compute temporary buffer sizes accounting for the following: 1377 * thumbnail can't exceed APP1 size of 64K 1378 * main image needs to hold APP1, headers, and at most a poorly 1379 * compressed image */ 1380 const ssize_t maxThumbCodeSize = 64 * 1024; 1381 const ssize_t maxJpegCodeSize = parent->getJpegBufferSize(jpegSize.width, 1382 jpegSize.height); 1383 1384 /* Check that getJpegBufferSize did not return an error */ 1385 if (maxJpegCodeSize < 0) { 1386 return lfail( 1387 "%s: getJpegBufferSize returned %zd",__FUNCTION__,maxJpegCodeSize); 1388 } 1389 1390 1391 /* Hold actual thumbnail and main image code sizes */ 1392 size_t thumbCodeSize = 0, jpegCodeSize = 0; 1393 /* Temporary thumbnail code buffer */ 1394 std::vector<uint8_t> thumbCode(maxThumbCodeSize); 1395 1396 YCbCrLayout yu12Thumb; 1397 ret = cropAndScaleThumbLocked(mYu12Frame, thumbSize, &yu12Thumb); 1398 1399 if (ret != 0) { 1400 return lfail( 1401 "%s: crop and scale thumbnail failed!", __FUNCTION__); 1402 } 1403 1404 /* Scale and crop main jpeg */ 1405 ret = cropAndScaleLocked(mYu12Frame, jpegSize, &yu12Main); 1406 1407 if (ret != 0) { 1408 return lfail("%s: crop and scale main failed!", __FUNCTION__); 1409 } 1410 1411 /* Encode the thumbnail image */ 1412 ret = encodeJpegYU12(thumbSize, yu12Thumb, 1413 thumbQuality, 0, 0, 1414 &thumbCode[0], maxThumbCodeSize, thumbCodeSize); 1415 1416 if (ret != 0) { 1417 return lfail("%s: encodeJpegYU12 failed with %d",__FUNCTION__, ret); 1418 } 1419 1420 /* Combine camera characteristics with request settings to form EXIF 1421 * metadata */ 1422 common::V1_0::helper::CameraMetadata meta(parent->mCameraCharacteristics); 1423 meta.append(req.setting); 1424 1425 /* Generate EXIF object */ 1426 std::unique_ptr<ExifUtils> utils(ExifUtils::create()); 1427 /* Make sure it's initialized */ 1428 utils->initialize(); 1429 1430 utils->setFromMetadata(meta, jpegSize.width, jpegSize.height); 1431 1432 /* Check if we made a non-zero-sized thumbnail. Currently not possible 1433 * that we got this far and the code is size 0, but if this code moves 1434 * around it might become relevant again */ 1435 1436 ret = utils->generateApp1(thumbCodeSize ? &thumbCode[0] : 0, thumbCodeSize); 1437 1438 if (!ret) { 1439 return lfail("%s: generating APP1 failed", __FUNCTION__); 1440 } 1441 1442 /* Get internal buffer */ 1443 size_t exifDataSize = utils->getApp1Length(); 1444 const uint8_t* exifData = utils->getApp1Buffer(); 1445 1446 /* Lock the HAL jpeg code buffer */ 1447 void *bufPtr = sHandleImporter.lock( 1448 *(halBuf.bufPtr), halBuf.usage, maxJpegCodeSize); 1449 1450 if (!bufPtr) { 1451 return lfail("%s: could not lock %zu bytes", __FUNCTION__, maxJpegCodeSize); 1452 } 1453 1454 /* Encode the main jpeg image */ 1455 ret = encodeJpegYU12(jpegSize, yu12Main, 1456 jpegQuality, exifData, exifDataSize, 1457 bufPtr, maxJpegCodeSize, jpegCodeSize); 1458 1459 /* TODO: Not sure this belongs here, maybe better to pass jpegCodeSize out 1460 * and do this when returning buffer to parent */ 1461 CameraBlob blob { CameraBlobId::JPEG, static_cast<uint32_t>(jpegCodeSize) }; 1462 void *blobDst = 1463 reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(bufPtr) + 1464 maxJpegCodeSize - 1465 sizeof(CameraBlob)); 1466 memcpy(blobDst, &blob, sizeof(CameraBlob)); 1467 1468 /* Unlock the HAL jpeg code buffer */ 1469 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); 1470 if (relFence > 0) { 1471 halBuf.acquireFence = relFence; 1472 } 1473 1474 /* Check if our JPEG actually succeeded */ 1475 if (ret != 0) { 1476 return lfail( 1477 "%s: encodeJpegYU12 failed with %d",__FUNCTION__, ret); 1478 } 1479 1480 ALOGV("%s: encoded JPEG (ret:%d) with Q:%d max size: %zu", 1481 __FUNCTION__, ret, jpegQuality, maxJpegCodeSize); 1482 1483 return 0; 1484} 1485 1486bool ExternalCameraDeviceSession::OutputThread::threadLoop() { 1487 HalRequest req; 1488 auto parent = mParent.promote(); 1489 if (parent == nullptr) { 1490 ALOGE("%s: session has been disconnected!", __FUNCTION__); 1491 return false; 1492 } 1493 1494 // TODO: maybe we need to setup a sensor thread to dq/enq v4l frames 1495 // regularly to prevent v4l buffer queue filled with stale buffers 1496 // when app doesn't program a preveiw request 1497 waitForNextRequest(&req); 1498 if (req.frameIn == nullptr) { 1499 // No new request, wait again 1500 return true; 1501 } 1502 1503 auto onDeviceError = [&](auto... args) { 1504 ALOGE(args...); 1505 parent->notifyError( 1506 req.frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); 1507 signalRequestDone(); 1508 return false; 1509 }; 1510 1511 if (req.frameIn->mFourcc != V4L2_PIX_FMT_MJPEG) { 1512 return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__, 1513 req.frameIn->mFourcc & 0xFF, 1514 (req.frameIn->mFourcc >> 8) & 0xFF, 1515 (req.frameIn->mFourcc >> 16) & 0xFF, 1516 (req.frameIn->mFourcc >> 24) & 0xFF); 1517 } 1518 1519 std::unique_lock<std::mutex> lk(mBufferLock); 1520 // Convert input V4L2 frame to YU12 of the same size 1521 // TODO: see if we can save some computation by converting to YV12 here 1522 uint8_t* inData; 1523 size_t inDataSize; 1524 req.frameIn->map(&inData, &inDataSize); 1525 // TODO: profile 1526 // TODO: in some special case maybe we can decode jpg directly to gralloc output? 1527 int res = libyuv::MJPGToI420( 1528 inData, inDataSize, 1529 static_cast<uint8_t*>(mYu12FrameLayout.y), 1530 mYu12FrameLayout.yStride, 1531 static_cast<uint8_t*>(mYu12FrameLayout.cb), 1532 mYu12FrameLayout.cStride, 1533 static_cast<uint8_t*>(mYu12FrameLayout.cr), 1534 mYu12FrameLayout.cStride, 1535 mYu12Frame->mWidth, mYu12Frame->mHeight, 1536 mYu12Frame->mWidth, mYu12Frame->mHeight); 1537 1538 if (res != 0) { 1539 // For some webcam, the first few V4L2 frames might be malformed... 1540 ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res); 1541 lk.unlock(); 1542 Status st = parent->processCaptureRequestError(req); 1543 if (st != Status::OK) { 1544 return onDeviceError("%s: failed to process capture request error!", __FUNCTION__); 1545 } 1546 signalRequestDone(); 1547 return true; 1548 } 1549 1550 ALOGV("%s processing new request", __FUNCTION__); 1551 const int kSyncWaitTimeoutMs = 500; 1552 for (auto& halBuf : req.buffers) { 1553 if (halBuf.acquireFence != -1) { 1554 int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs); 1555 if (ret) { 1556 halBuf.fenceTimeout = true; 1557 } else { 1558 ::close(halBuf.acquireFence); 1559 halBuf.acquireFence = -1; 1560 } 1561 } 1562 1563 if (halBuf.fenceTimeout) { 1564 continue; 1565 } 1566 1567 // Gralloc lockYCbCr the buffer 1568 switch (halBuf.format) { 1569 case PixelFormat::BLOB: { 1570 int ret = createJpegLocked(halBuf, req); 1571 1572 if(ret != 0) { 1573 lk.unlock(); 1574 return onDeviceError("%s: createJpegLocked failed with %d", 1575 __FUNCTION__, ret); 1576 } 1577 } break; 1578 case PixelFormat::YCBCR_420_888: 1579 case PixelFormat::YV12: { 1580 IMapper::Rect outRect {0, 0, 1581 static_cast<int32_t>(halBuf.width), 1582 static_cast<int32_t>(halBuf.height)}; 1583 YCbCrLayout outLayout = sHandleImporter.lockYCbCr( 1584 *(halBuf.bufPtr), halBuf.usage, outRect); 1585 ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d", 1586 __FUNCTION__, outLayout.y, outLayout.cb, outLayout.cr, 1587 outLayout.yStride, outLayout.cStride, outLayout.chromaStep); 1588 1589 // Convert to output buffer size/format 1590 uint32_t outputFourcc = getFourCcFromLayout(outLayout); 1591 ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, 1592 outputFourcc & 0xFF, 1593 (outputFourcc >> 8) & 0xFF, 1594 (outputFourcc >> 16) & 0xFF, 1595 (outputFourcc >> 24) & 0xFF); 1596 1597 YCbCrLayout cropAndScaled; 1598 int ret = cropAndScaleLocked( 1599 mYu12Frame, 1600 Size { halBuf.width, halBuf.height }, 1601 &cropAndScaled); 1602 if (ret != 0) { 1603 lk.unlock(); 1604 return onDeviceError("%s: crop and scale failed!", __FUNCTION__); 1605 } 1606 1607 Size sz {halBuf.width, halBuf.height}; 1608 ret = formatConvertLocked(cropAndScaled, outLayout, sz, outputFourcc); 1609 if (ret != 0) { 1610 lk.unlock(); 1611 return onDeviceError("%s: format coversion failed!", __FUNCTION__); 1612 } 1613 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); 1614 if (relFence > 0) { 1615 halBuf.acquireFence = relFence; 1616 } 1617 } break; 1618 default: 1619 lk.unlock(); 1620 return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format); 1621 } 1622 } // for each buffer 1623 mScaledYu12Frames.clear(); 1624 1625 // Don't hold the lock while calling back to parent 1626 lk.unlock(); 1627 Status st = parent->processCaptureResult(req); 1628 if (st != Status::OK) { 1629 return onDeviceError("%s: failed to process capture result!", __FUNCTION__); 1630 } 1631 signalRequestDone(); 1632 return true; 1633} 1634 1635Status ExternalCameraDeviceSession::OutputThread::allocateIntermediateBuffers( 1636 const Size& v4lSize, const Size& thumbSize, 1637 const hidl_vec<Stream>& streams) { 1638 std::lock_guard<std::mutex> lk(mBufferLock); 1639 if (mScaledYu12Frames.size() != 0) { 1640 ALOGE("%s: intermediate buffer pool has %zu inflight buffers! (expect 0)", 1641 __FUNCTION__, mScaledYu12Frames.size()); 1642 return Status::INTERNAL_ERROR; 1643 } 1644 1645 // Allocating intermediate YU12 frame 1646 if (mYu12Frame == nullptr || mYu12Frame->mWidth != v4lSize.width || 1647 mYu12Frame->mHeight != v4lSize.height) { 1648 mYu12Frame.clear(); 1649 mYu12Frame = new AllocatedFrame(v4lSize.width, v4lSize.height); 1650 int ret = mYu12Frame->allocate(&mYu12FrameLayout); 1651 if (ret != 0) { 1652 ALOGE("%s: allocating YU12 frame failed!", __FUNCTION__); 1653 return Status::INTERNAL_ERROR; 1654 } 1655 } 1656 1657 // Allocating intermediate YU12 thumbnail frame 1658 if (mYu12ThumbFrame == nullptr || 1659 mYu12ThumbFrame->mWidth != thumbSize.width || 1660 mYu12ThumbFrame->mHeight != thumbSize.height) { 1661 mYu12ThumbFrame.clear(); 1662 mYu12ThumbFrame = new AllocatedFrame(thumbSize.width, thumbSize.height); 1663 int ret = mYu12ThumbFrame->allocate(&mYu12ThumbFrameLayout); 1664 if (ret != 0) { 1665 ALOGE("%s: allocating YU12 thumb frame failed!", __FUNCTION__); 1666 return Status::INTERNAL_ERROR; 1667 } 1668 } 1669 1670 // Allocating scaled buffers 1671 for (const auto& stream : streams) { 1672 Size sz = {stream.width, stream.height}; 1673 if (sz == v4lSize) { 1674 continue; // Don't need an intermediate buffer same size as v4lBuffer 1675 } 1676 if (mIntermediateBuffers.count(sz) == 0) { 1677 // Create new intermediate buffer 1678 sp<AllocatedFrame> buf = new AllocatedFrame(stream.width, stream.height); 1679 int ret = buf->allocate(); 1680 if (ret != 0) { 1681 ALOGE("%s: allocating intermediate YU12 frame %dx%d failed!", 1682 __FUNCTION__, stream.width, stream.height); 1683 return Status::INTERNAL_ERROR; 1684 } 1685 mIntermediateBuffers[sz] = buf; 1686 } 1687 } 1688 1689 // Remove unconfigured buffers 1690 auto it = mIntermediateBuffers.begin(); 1691 while (it != mIntermediateBuffers.end()) { 1692 bool configured = false; 1693 auto sz = it->first; 1694 for (const auto& stream : streams) { 1695 if (stream.width == sz.width && stream.height == sz.height) { 1696 configured = true; 1697 break; 1698 } 1699 } 1700 if (configured) { 1701 it++; 1702 } else { 1703 it = mIntermediateBuffers.erase(it); 1704 } 1705 } 1706 return Status::OK; 1707} 1708 1709Status ExternalCameraDeviceSession::OutputThread::submitRequest(const HalRequest& req) { 1710 std::unique_lock<std::mutex> lk(mRequestListLock); 1711 // TODO: reduce object copy in this path 1712 mRequestList.push_back(req); 1713 lk.unlock(); 1714 mRequestCond.notify_one(); 1715 return Status::OK; 1716} 1717 1718void ExternalCameraDeviceSession::OutputThread::flush() { 1719 auto parent = mParent.promote(); 1720 if (parent == nullptr) { 1721 ALOGE("%s: session has been disconnected!", __FUNCTION__); 1722 return; 1723 } 1724 1725 std::unique_lock<std::mutex> lk(mRequestListLock); 1726 std::list<HalRequest> reqs = mRequestList; 1727 mRequestList.clear(); 1728 if (mProcessingRequest) { 1729 std::chrono::seconds timeout = std::chrono::seconds(kReqWaitTimeoutSec); 1730 auto st = mRequestDoneCond.wait_for(lk, timeout); 1731 if (st == std::cv_status::timeout) { 1732 ALOGE("%s: wait for inflight request finish timeout!", __FUNCTION__); 1733 } 1734 } 1735 1736 lk.unlock(); 1737 for (const auto& req : reqs) { 1738 parent->processCaptureRequestError(req); 1739 } 1740} 1741 1742void ExternalCameraDeviceSession::OutputThread::waitForNextRequest(HalRequest* out) { 1743 if (out == nullptr) { 1744 ALOGE("%s: out is null", __FUNCTION__); 1745 return; 1746 } 1747 1748 std::unique_lock<std::mutex> lk(mRequestListLock); 1749 while (mRequestList.empty()) { 1750 std::chrono::seconds timeout = std::chrono::seconds(kReqWaitTimeoutSec); 1751 auto st = mRequestCond.wait_for(lk, timeout); 1752 if (st == std::cv_status::timeout) { 1753 // no new request, return 1754 return; 1755 } 1756 } 1757 *out = mRequestList.front(); 1758 mRequestList.pop_front(); 1759 mProcessingRequest = true; 1760} 1761 1762void ExternalCameraDeviceSession::OutputThread::signalRequestDone() { 1763 std::unique_lock<std::mutex> lk(mRequestListLock); 1764 mProcessingRequest = false; 1765 lk.unlock(); 1766 mRequestDoneCond.notify_one(); 1767} 1768 1769void ExternalCameraDeviceSession::cleanupBuffersLocked(int id) { 1770 for (auto& pair : mCirculatingBuffers.at(id)) { 1771 sHandleImporter.freeBuffer(pair.second); 1772 } 1773 mCirculatingBuffers[id].clear(); 1774 mCirculatingBuffers.erase(id); 1775} 1776 1777void ExternalCameraDeviceSession::updateBufferCaches(const hidl_vec<BufferCache>& cachesToRemove) { 1778 Mutex::Autolock _l(mLock); 1779 for (auto& cache : cachesToRemove) { 1780 auto cbsIt = mCirculatingBuffers.find(cache.streamId); 1781 if (cbsIt == mCirculatingBuffers.end()) { 1782 // The stream could have been removed 1783 continue; 1784 } 1785 CirculatingBuffers& cbs = cbsIt->second; 1786 auto it = cbs.find(cache.bufferId); 1787 if (it != cbs.end()) { 1788 sHandleImporter.freeBuffer(it->second); 1789 cbs.erase(it); 1790 } else { 1791 ALOGE("%s: stream %d buffer %" PRIu64 " is not cached", 1792 __FUNCTION__, cache.streamId, cache.bufferId); 1793 } 1794 } 1795} 1796 1797bool ExternalCameraDeviceSession::isSupported(const Stream& stream) { 1798 int32_t ds = static_cast<int32_t>(stream.dataSpace); 1799 PixelFormat fmt = stream.format; 1800 uint32_t width = stream.width; 1801 uint32_t height = stream.height; 1802 // TODO: check usage flags 1803 1804 if (stream.streamType != StreamType::OUTPUT) { 1805 ALOGE("%s: does not support non-output stream type", __FUNCTION__); 1806 return false; 1807 } 1808 1809 if (stream.rotation != StreamRotation::ROTATION_0) { 1810 ALOGE("%s: does not support stream rotation", __FUNCTION__); 1811 return false; 1812 } 1813 1814 if (ds & Dataspace::DEPTH) { 1815 ALOGI("%s: does not support depth output", __FUNCTION__); 1816 return false; 1817 } 1818 1819 switch (fmt) { 1820 case PixelFormat::BLOB: 1821 if (ds != static_cast<int32_t>(Dataspace::V0_JFIF)) { 1822 ALOGI("%s: BLOB format does not support dataSpace %x", __FUNCTION__, ds); 1823 return false; 1824 } 1825 case PixelFormat::IMPLEMENTATION_DEFINED: 1826 case PixelFormat::YCBCR_420_888: 1827 case PixelFormat::YV12: 1828 // TODO: check what dataspace we can support here. 1829 // intentional no-ops. 1830 break; 1831 default: 1832 ALOGI("%s: does not support format %x", __FUNCTION__, fmt); 1833 return false; 1834 } 1835 1836 // Assume we can convert any V4L2 format to any of supported output format for now, i.e, 1837 // ignoring v4l2Fmt.fourcc for now. Might need more subtle check if we support more v4l format 1838 // in the futrue. 1839 for (const auto& v4l2Fmt : mSupportedFormats) { 1840 if (width == v4l2Fmt.width && height == v4l2Fmt.height) { 1841 return true; 1842 } 1843 } 1844 ALOGI("%s: resolution %dx%d is not supported", __FUNCTION__, width, height); 1845 return false; 1846} 1847 1848int ExternalCameraDeviceSession::v4l2StreamOffLocked() { 1849 if (!mV4l2Streaming) { 1850 return OK; 1851 } 1852 1853 { 1854 std::lock_guard<std::mutex> lk(mV4l2BufferLock); 1855 if (mNumDequeuedV4l2Buffers != 0) { 1856 ALOGE("%s: there are %zu inflight V4L buffers", 1857 __FUNCTION__, mNumDequeuedV4l2Buffers); 1858 return -1; 1859 } 1860 } 1861 mV4L2BufferCount = 0; 1862 1863 // VIDIOC_STREAMOFF 1864 v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1865 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMOFF, &capture_type)) < 0) { 1866 ALOGE("%s: STREAMOFF failed: %s", __FUNCTION__, strerror(errno)); 1867 return -errno; 1868 } 1869 1870 // VIDIOC_REQBUFS: clear buffers 1871 v4l2_requestbuffers req_buffers{}; 1872 req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1873 req_buffers.memory = V4L2_MEMORY_MMAP; 1874 req_buffers.count = 0; 1875 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) { 1876 ALOGE("%s: REQBUFS failed: %s", __FUNCTION__, strerror(errno)); 1877 return -errno; 1878 } 1879 1880 mV4l2Streaming = false; 1881 return OK; 1882} 1883 1884int ExternalCameraDeviceSession::configureV4l2StreamLocked(const SupportedV4L2Format& v4l2Fmt) { 1885 int ret = v4l2StreamOffLocked(); 1886 if (ret != OK) { 1887 ALOGE("%s: stop v4l2 streaming failed: ret %d", __FUNCTION__, ret); 1888 return ret; 1889 } 1890 1891 // VIDIOC_S_FMT w/h/fmt 1892 v4l2_format fmt; 1893 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1894 fmt.fmt.pix.width = v4l2Fmt.width; 1895 fmt.fmt.pix.height = v4l2Fmt.height; 1896 fmt.fmt.pix.pixelformat = v4l2Fmt.fourcc; 1897 ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_FMT, &fmt)); 1898 if (ret < 0) { 1899 ALOGE("%s: S_FMT ioctl failed: %s", __FUNCTION__, strerror(errno)); 1900 return -errno; 1901 } 1902 1903 if (v4l2Fmt.width != fmt.fmt.pix.width || v4l2Fmt.height != fmt.fmt.pix.height || 1904 v4l2Fmt.fourcc != fmt.fmt.pix.pixelformat) { 1905 ALOGE("%s: S_FMT expect %c%c%c%c %dx%d, got %c%c%c%c %dx%d instead!", __FUNCTION__, 1906 v4l2Fmt.fourcc & 0xFF, 1907 (v4l2Fmt.fourcc >> 8) & 0xFF, 1908 (v4l2Fmt.fourcc >> 16) & 0xFF, 1909 (v4l2Fmt.fourcc >> 24) & 0xFF, 1910 v4l2Fmt.width, v4l2Fmt.height, 1911 fmt.fmt.pix.pixelformat & 0xFF, 1912 (fmt.fmt.pix.pixelformat >> 8) & 0xFF, 1913 (fmt.fmt.pix.pixelformat >> 16) & 0xFF, 1914 (fmt.fmt.pix.pixelformat >> 24) & 0xFF, 1915 fmt.fmt.pix.width, fmt.fmt.pix.height); 1916 return -EINVAL; 1917 } 1918 uint32_t bufferSize = fmt.fmt.pix.sizeimage; 1919 ALOGI("%s: V4L2 buffer size is %d", __FUNCTION__, bufferSize); 1920 1921 float maxFps = -1.f; 1922 float fps = 1000.f; 1923 const float kDefaultFps = 30.f; 1924 // Try to pick the slowest fps that is at least 30 1925 for (const auto& fr : v4l2Fmt.frameRates) { 1926 double f = fr.getDouble(); 1927 if (maxFps < f) { 1928 maxFps = f; 1929 } 1930 if (f >= kDefaultFps && f < fps) { 1931 fps = f; 1932 } 1933 } 1934 if (fps == 1000.f) { 1935 fps = maxFps; 1936 } 1937 1938 // VIDIOC_G_PARM/VIDIOC_S_PARM: set fps 1939 v4l2_streamparm streamparm = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE }; 1940 // The following line checks that the driver knows about framerate get/set. 1941 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { 1942 // Now check if the device is able to accept a capture framerate set. 1943 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { 1944 // |frame_rate| is float, approximate by a fraction. 1945 const int kFrameRatePrecision = 10000; 1946 streamparm.parm.capture.timeperframe.numerator = kFrameRatePrecision; 1947 streamparm.parm.capture.timeperframe.denominator = 1948 (fps * kFrameRatePrecision); 1949 1950 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_PARM, &streamparm)) < 0) { 1951 ALOGE("%s: failed to set framerate to %f", __FUNCTION__, fps); 1952 return UNKNOWN_ERROR; 1953 } 1954 } 1955 } 1956 float retFps = streamparm.parm.capture.timeperframe.denominator / 1957 streamparm.parm.capture.timeperframe.numerator; 1958 if (std::fabs(fps - retFps) > std::numeric_limits<float>::epsilon()) { 1959 ALOGE("%s: expect fps %f, got %f instead", __FUNCTION__, fps, retFps); 1960 return BAD_VALUE; 1961 } 1962 1963 uint32_t v4lBufferCount = (fps >= kDefaultFps) ? 1964 mCfg.numVideoBuffers : mCfg.numStillBuffers; 1965 // VIDIOC_REQBUFS: create buffers 1966 v4l2_requestbuffers req_buffers{}; 1967 req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1968 req_buffers.memory = V4L2_MEMORY_MMAP; 1969 req_buffers.count = v4lBufferCount; 1970 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) { 1971 ALOGE("%s: VIDIOC_REQBUFS failed: %s", __FUNCTION__, strerror(errno)); 1972 return -errno; 1973 } 1974 1975 // Driver can indeed return more buffer if it needs more to operate 1976 if (req_buffers.count < v4lBufferCount) { 1977 ALOGE("%s: VIDIOC_REQBUFS expected %d buffers, got %d instead", 1978 __FUNCTION__, v4lBufferCount, req_buffers.count); 1979 return NO_MEMORY; 1980 } 1981 1982 // VIDIOC_QUERYBUF: get buffer offset in the V4L2 fd 1983 // VIDIOC_QBUF: send buffer to driver 1984 mV4L2BufferCount = req_buffers.count; 1985 for (uint32_t i = 0; i < req_buffers.count; i++) { 1986 v4l2_buffer buffer = { 1987 .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, 1988 .index = i, 1989 .memory = V4L2_MEMORY_MMAP}; 1990 1991 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { 1992 ALOGE("%s: QUERYBUF %d failed: %s", __FUNCTION__, i, strerror(errno)); 1993 return -errno; 1994 } 1995 1996 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { 1997 ALOGE("%s: QBUF %d failed: %s", __FUNCTION__, i, strerror(errno)); 1998 return -errno; 1999 } 2000 } 2001 2002 // VIDIOC_STREAMON: start streaming 2003 v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 2004 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMON, &capture_type)) < 0) { 2005 ALOGE("%s: VIDIOC_STREAMON failed: %s", __FUNCTION__, strerror(errno)); 2006 return -errno; 2007 } 2008 2009 // Swallow first few frames after streamOn to account for bad frames from some devices 2010 for (int i = 0; i < kBadFramesAfterStreamOn; i++) { 2011 v4l2_buffer buffer{}; 2012 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 2013 buffer.memory = V4L2_MEMORY_MMAP; 2014 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) { 2015 ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno)); 2016 return -errno; 2017 } 2018 2019 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { 2020 ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, buffer.index, strerror(errno)); 2021 return -errno; 2022 } 2023 } 2024 2025 mV4l2StreamingFmt = v4l2Fmt; 2026 mV4l2Streaming = true; 2027 return OK; 2028} 2029 2030sp<V4L2Frame> ExternalCameraDeviceSession::dequeueV4l2FrameLocked() { 2031 sp<V4L2Frame> ret = nullptr; 2032 2033 { 2034 std::unique_lock<std::mutex> lk(mV4l2BufferLock); 2035 if (mNumDequeuedV4l2Buffers == mV4L2BufferCount) { 2036 std::chrono::seconds timeout = std::chrono::seconds(kBufferWaitTimeoutSec); 2037 mLock.unlock(); 2038 auto st = mV4L2BufferReturned.wait_for(lk, timeout); 2039 mLock.lock(); 2040 if (st == std::cv_status::timeout) { 2041 ALOGE("%s: wait for V4L2 buffer return timeout!", __FUNCTION__); 2042 return ret; 2043 } 2044 } 2045 } 2046 2047 v4l2_buffer buffer{}; 2048 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 2049 buffer.memory = V4L2_MEMORY_MMAP; 2050 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) { 2051 ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno)); 2052 return ret; 2053 } 2054 2055 if (buffer.index >= mV4L2BufferCount) { 2056 ALOGE("%s: Invalid buffer id: %d", __FUNCTION__, buffer.index); 2057 return ret; 2058 } 2059 2060 if (buffer.flags & V4L2_BUF_FLAG_ERROR) { 2061 ALOGE("%s: v4l2 buf error! buf flag 0x%x", __FUNCTION__, buffer.flags); 2062 // TODO: try to dequeue again 2063 } 2064 2065 { 2066 std::lock_guard<std::mutex> lk(mV4l2BufferLock); 2067 mNumDequeuedV4l2Buffers++; 2068 } 2069 return new V4L2Frame( 2070 mV4l2StreamingFmt.width, mV4l2StreamingFmt.height, mV4l2StreamingFmt.fourcc, 2071 buffer.index, mV4l2Fd.get(), buffer.bytesused, buffer.m.offset); 2072} 2073 2074void ExternalCameraDeviceSession::enqueueV4l2Frame(const sp<V4L2Frame>& frame) { 2075 Mutex::Autolock _l(mLock); 2076 frame->unmap(); 2077 v4l2_buffer buffer{}; 2078 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 2079 buffer.memory = V4L2_MEMORY_MMAP; 2080 buffer.index = frame->mBufferIndex; 2081 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { 2082 ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, frame->mBufferIndex, strerror(errno)); 2083 return; 2084 } 2085 2086 { 2087 std::lock_guard<std::mutex> lk(mV4l2BufferLock); 2088 mNumDequeuedV4l2Buffers--; 2089 } 2090 mV4L2BufferReturned.notify_one(); 2091} 2092 2093Status ExternalCameraDeviceSession::configureStreams( 2094 const V3_2::StreamConfiguration& config, V3_3::HalStreamConfiguration* out) { 2095 if (config.operationMode != StreamConfigurationMode::NORMAL_MODE) { 2096 ALOGE("%s: unsupported operation mode: %d", __FUNCTION__, config.operationMode); 2097 return Status::ILLEGAL_ARGUMENT; 2098 } 2099 2100 if (config.streams.size() == 0) { 2101 ALOGE("%s: cannot configure zero stream", __FUNCTION__); 2102 return Status::ILLEGAL_ARGUMENT; 2103 } 2104 2105 int numProcessedStream = 0; 2106 int numStallStream = 0; 2107 for (const auto& stream : config.streams) { 2108 // Check if the format/width/height combo is supported 2109 if (!isSupported(stream)) { 2110 return Status::ILLEGAL_ARGUMENT; 2111 } 2112 if (stream.format == PixelFormat::BLOB) { 2113 numStallStream++; 2114 } else { 2115 numProcessedStream++; 2116 } 2117 } 2118 2119 if (numProcessedStream > kMaxProcessedStream) { 2120 ALOGE("%s: too many processed streams (expect <= %d, got %d)", __FUNCTION__, 2121 kMaxProcessedStream, numProcessedStream); 2122 return Status::ILLEGAL_ARGUMENT; 2123 } 2124 2125 if (numStallStream > kMaxStallStream) { 2126 ALOGE("%s: too many stall streams (expect <= %d, got %d)", __FUNCTION__, 2127 kMaxStallStream, numStallStream); 2128 return Status::ILLEGAL_ARGUMENT; 2129 } 2130 2131 Status status = initStatus(); 2132 if (status != Status::OK) { 2133 return status; 2134 } 2135 2136 Mutex::Autolock _l(mLock); 2137 if (!mInflightFrames.empty()) { 2138 ALOGE("%s: trying to configureStreams while there are still %zu inflight frames!", 2139 __FUNCTION__, mInflightFrames.size()); 2140 return Status::INTERNAL_ERROR; 2141 } 2142 2143 // Add new streams 2144 for (const auto& stream : config.streams) { 2145 if (mStreamMap.count(stream.id) == 0) { 2146 mStreamMap[stream.id] = stream; 2147 mCirculatingBuffers.emplace(stream.id, CirculatingBuffers{}); 2148 } 2149 } 2150 2151 // Cleanup removed streams 2152 for(auto it = mStreamMap.begin(); it != mStreamMap.end();) { 2153 int id = it->first; 2154 bool found = false; 2155 for (const auto& stream : config.streams) { 2156 if (id == stream.id) { 2157 found = true; 2158 break; 2159 } 2160 } 2161 if (!found) { 2162 // Unmap all buffers of deleted stream 2163 cleanupBuffersLocked(id); 2164 it = mStreamMap.erase(it); 2165 } else { 2166 ++it; 2167 } 2168 } 2169 2170 // Now select a V4L2 format to produce all output streams 2171 float desiredAr = (mCroppingType == VERTICAL) ? kMaxAspectRatio : kMinAspectRatio; 2172 uint32_t maxDim = 0; 2173 for (const auto& stream : config.streams) { 2174 float aspectRatio = ASPECT_RATIO(stream); 2175 if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) || 2176 (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) { 2177 desiredAr = aspectRatio; 2178 } 2179 2180 // The dimension that's not cropped 2181 uint32_t dim = (mCroppingType == VERTICAL) ? stream.width : stream.height; 2182 if (dim > maxDim) { 2183 maxDim = dim; 2184 } 2185 } 2186 // Find the smallest format that matches the desired aspect ratio and is wide/high enough 2187 SupportedV4L2Format v4l2Fmt {.width = 0, .height = 0}; 2188 for (const auto& fmt : mSupportedFormats) { 2189 uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; 2190 if (dim >= maxDim) { 2191 float aspectRatio = ASPECT_RATIO(fmt); 2192 if (isAspectRatioClose(aspectRatio, desiredAr)) { 2193 v4l2Fmt = fmt; 2194 // since mSupportedFormats is sorted by width then height, the first matching fmt 2195 // will be the smallest one with matching aspect ratio 2196 break; 2197 } 2198 } 2199 } 2200 if (v4l2Fmt.width == 0) { 2201 // Cannot find exact good aspect ratio candidate, try to find a close one 2202 for (const auto& fmt : mSupportedFormats) { 2203 uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; 2204 if (dim >= maxDim) { 2205 float aspectRatio = ASPECT_RATIO(fmt); 2206 if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) || 2207 (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) { 2208 v4l2Fmt = fmt; 2209 break; 2210 } 2211 } 2212 } 2213 } 2214 2215 if (v4l2Fmt.width == 0) { 2216 ALOGE("%s: unable to find a resolution matching (%s at least %d, aspect ratio %f)" 2217 , __FUNCTION__, (mCroppingType == VERTICAL) ? "width" : "height", 2218 maxDim, desiredAr); 2219 return Status::ILLEGAL_ARGUMENT; 2220 } 2221 2222 if (configureV4l2StreamLocked(v4l2Fmt) != 0) { 2223 ALOGE("V4L configuration failed!, format:%c%c%c%c, w %d, h %d", 2224 v4l2Fmt.fourcc & 0xFF, 2225 (v4l2Fmt.fourcc >> 8) & 0xFF, 2226 (v4l2Fmt.fourcc >> 16) & 0xFF, 2227 (v4l2Fmt.fourcc >> 24) & 0xFF, 2228 v4l2Fmt.width, v4l2Fmt.height); 2229 return Status::INTERNAL_ERROR; 2230 } 2231 2232 Size v4lSize = {v4l2Fmt.width, v4l2Fmt.height}; 2233 Size thumbSize { 0, 0 }; 2234 camera_metadata_ro_entry entry = 2235 mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); 2236 for(uint32_t i = 0; i < entry.count; i += 2) { 2237 Size sz { static_cast<uint32_t>(entry.data.i32[i]), 2238 static_cast<uint32_t>(entry.data.i32[i+1]) }; 2239 if(sz.width * sz.height > thumbSize.width * thumbSize.height) { 2240 thumbSize = sz; 2241 } 2242 } 2243 2244 if (thumbSize.width * thumbSize.height == 0) { 2245 ALOGE("%s: non-zero thumbnail size not available", __FUNCTION__); 2246 return Status::INTERNAL_ERROR; 2247 } 2248 2249 status = mOutputThread->allocateIntermediateBuffers(v4lSize, 2250 mMaxThumbResolution, config.streams); 2251 if (status != Status::OK) { 2252 ALOGE("%s: allocating intermediate buffers failed!", __FUNCTION__); 2253 return status; 2254 } 2255 2256 out->streams.resize(config.streams.size()); 2257 for (size_t i = 0; i < config.streams.size(); i++) { 2258 out->streams[i].overrideDataSpace = config.streams[i].dataSpace; 2259 out->streams[i].v3_2.id = config.streams[i].id; 2260 // TODO: double check should we add those CAMERA flags 2261 mStreamMap[config.streams[i].id].usage = 2262 out->streams[i].v3_2.producerUsage = config.streams[i].usage | 2263 BufferUsage::CPU_WRITE_OFTEN | 2264 BufferUsage::CAMERA_OUTPUT; 2265 out->streams[i].v3_2.consumerUsage = 0; 2266 out->streams[i].v3_2.maxBuffers = mV4L2BufferCount; 2267 2268 switch (config.streams[i].format) { 2269 case PixelFormat::BLOB: 2270 case PixelFormat::YCBCR_420_888: 2271 case PixelFormat::YV12: // Used by SurfaceTexture 2272 // No override 2273 out->streams[i].v3_2.overrideFormat = config.streams[i].format; 2274 break; 2275 case PixelFormat::IMPLEMENTATION_DEFINED: 2276 // Override based on VIDEO or not 2277 out->streams[i].v3_2.overrideFormat = 2278 (config.streams[i].usage & BufferUsage::VIDEO_ENCODER) ? 2279 PixelFormat::YCBCR_420_888 : PixelFormat::YV12; 2280 // Save overridden formt in mStreamMap 2281 mStreamMap[config.streams[i].id].format = out->streams[i].v3_2.overrideFormat; 2282 break; 2283 default: 2284 ALOGE("%s: unsupported format 0x%x", __FUNCTION__, config.streams[i].format); 2285 return Status::ILLEGAL_ARGUMENT; 2286 } 2287 } 2288 2289 mFirstRequest = true; 2290 return Status::OK; 2291} 2292 2293bool ExternalCameraDeviceSession::isClosed() { 2294 Mutex::Autolock _l(mLock); 2295 return mClosed; 2296} 2297 2298#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) 2299#define UPDATE(md, tag, data, size) \ 2300do { \ 2301 if ((md).update((tag), (data), (size))) { \ 2302 ALOGE("Update " #tag " failed!"); \ 2303 return BAD_VALUE; \ 2304 } \ 2305} while (0) 2306 2307status_t ExternalCameraDeviceSession::initDefaultRequests() { 2308 ::android::hardware::camera::common::V1_0::helper::CameraMetadata md; 2309 2310 const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF; 2311 UPDATE(md, ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1); 2312 2313 const int32_t exposureCompensation = 0; 2314 UPDATE(md, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &exposureCompensation, 1); 2315 2316 const uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; 2317 UPDATE(md, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &videoStabilizationMode, 1); 2318 2319 const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 2320 UPDATE(md, ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 2321 2322 const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; 2323 UPDATE(md, ANDROID_CONTROL_AE_MODE, &aeMode, 1); 2324 2325 const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; 2326 UPDATE(md, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1); 2327 2328 const uint8_t afMode = ANDROID_CONTROL_AF_MODE_AUTO; 2329 UPDATE(md, ANDROID_CONTROL_AF_MODE, &afMode, 1); 2330 2331 const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; 2332 UPDATE(md, ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1); 2333 2334 const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED; 2335 UPDATE(md, ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 2336 2337 const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 2338 UPDATE(md, ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 2339 2340 const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 2341 UPDATE(md, ANDROID_FLASH_MODE, &flashMode, 1); 2342 2343 const int32_t thumbnailSize[] = {240, 180}; 2344 UPDATE(md, ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); 2345 2346 const uint8_t jpegQuality = 90; 2347 UPDATE(md, ANDROID_JPEG_QUALITY, &jpegQuality, 1); 2348 UPDATE(md, ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegQuality, 1); 2349 2350 const int32_t jpegOrientation = 0; 2351 UPDATE(md, ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); 2352 2353 const uint8_t oisMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 2354 UPDATE(md, ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &oisMode, 1); 2355 2356 const uint8_t nrMode = ANDROID_NOISE_REDUCTION_MODE_OFF; 2357 UPDATE(md, ANDROID_NOISE_REDUCTION_MODE, &nrMode, 1); 2358 2359 const uint8_t fdMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; 2360 UPDATE(md, ANDROID_STATISTICS_FACE_DETECT_MODE, &fdMode, 1); 2361 2362 const uint8_t hotpixelMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; 2363 UPDATE(md, ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotpixelMode, 1); 2364 2365 bool support30Fps = false; 2366 int32_t maxFps = std::numeric_limits<int32_t>::min(); 2367 for (const auto& supportedFormat : mSupportedFormats) { 2368 for (const auto& fr : supportedFormat.frameRates) { 2369 int32_t framerateInt = static_cast<int32_t>(fr.getDouble()); 2370 if (maxFps < framerateInt) { 2371 maxFps = framerateInt; 2372 } 2373 if (framerateInt == 30) { 2374 support30Fps = true; 2375 break; 2376 } 2377 } 2378 if (support30Fps) { 2379 break; 2380 } 2381 } 2382 int32_t defaultFramerate = support30Fps ? 30 : maxFps; 2383 int32_t defaultFpsRange[] = {defaultFramerate, defaultFramerate}; 2384 UPDATE(md, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, defaultFpsRange, ARRAY_SIZE(defaultFpsRange)); 2385 2386 uint8_t antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; 2387 UPDATE(md, ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibandingMode, 1); 2388 2389 const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 2390 UPDATE(md, ANDROID_CONTROL_MODE, &controlMode, 1); 2391 2392 auto requestTemplates = hidl_enum_iterator<RequestTemplate>(); 2393 for (RequestTemplate type : requestTemplates) { 2394 ::android::hardware::camera::common::V1_0::helper::CameraMetadata mdCopy = md; 2395 uint8_t intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2396 switch (type) { 2397 case RequestTemplate::PREVIEW: 2398 intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2399 break; 2400 case RequestTemplate::STILL_CAPTURE: 2401 intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 2402 break; 2403 case RequestTemplate::VIDEO_RECORD: 2404 intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 2405 break; 2406 case RequestTemplate::VIDEO_SNAPSHOT: 2407 intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 2408 break; 2409 default: 2410 ALOGV("%s: unsupported RequestTemplate type %d", __FUNCTION__, type); 2411 continue; 2412 } 2413 UPDATE(mdCopy, ANDROID_CONTROL_CAPTURE_INTENT, &intent, 1); 2414 2415 camera_metadata_t* rawMd = mdCopy.release(); 2416 CameraMetadata hidlMd; 2417 hidlMd.setToExternal( 2418 (uint8_t*) rawMd, get_camera_metadata_size(rawMd)); 2419 mDefaultRequests[type] = hidlMd; 2420 free_camera_metadata(rawMd); 2421 } 2422 2423 return OK; 2424} 2425 2426status_t ExternalCameraDeviceSession::fillCaptureResult( 2427 common::V1_0::helper::CameraMetadata &md, nsecs_t timestamp) { 2428 // android.control 2429 // For USB camera, we don't know the AE state. Set the state to converged to 2430 // indicate the frame should be good to use. Then apps don't have to wait the 2431 // AE state. 2432 const uint8_t aeState = ANDROID_CONTROL_AE_STATE_CONVERGED; 2433 UPDATE(md, ANDROID_CONTROL_AE_STATE, &aeState, 1); 2434 2435 const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF; 2436 UPDATE(md, ANDROID_CONTROL_AE_LOCK, &ae_lock, 1); 2437 2438 bool afTrigger = mAfTrigger; 2439 if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) { 2440 Mutex::Autolock _l(mLock); 2441 camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER); 2442 if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) { 2443 mAfTrigger = afTrigger = true; 2444 } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) { 2445 mAfTrigger = afTrigger = false; 2446 } 2447 } 2448 2449 // For USB camera, the USB camera handles everything and we don't have control 2450 // over AF. We only simply fake the AF metadata based on the request 2451 // received here. 2452 uint8_t afState; 2453 if (afTrigger) { 2454 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; 2455 } else { 2456 afState = ANDROID_CONTROL_AF_STATE_INACTIVE; 2457 } 2458 UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1); 2459 2460 // Set AWB state to converged to indicate the frame should be good to use. 2461 const uint8_t awbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; 2462 UPDATE(md, ANDROID_CONTROL_AWB_STATE, &awbState, 1); 2463 2464 const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 2465 UPDATE(md, ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 2466 2467 camera_metadata_ro_entry active_array_size = 2468 mCameraCharacteristics.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE); 2469 2470 if (active_array_size.count == 0) { 2471 ALOGE("%s: cannot find active array size!", __FUNCTION__); 2472 return -EINVAL; 2473 } 2474 2475 const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE; 2476 UPDATE(md, ANDROID_FLASH_STATE, &flashState, 1); 2477 2478 // android.scaler 2479 const int32_t crop_region[] = { 2480 active_array_size.data.i32[0], active_array_size.data.i32[1], 2481 active_array_size.data.i32[2], active_array_size.data.i32[3], 2482 }; 2483 UPDATE(md, ANDROID_SCALER_CROP_REGION, crop_region, ARRAY_SIZE(crop_region)); 2484 2485 // android.sensor 2486 UPDATE(md, ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 2487 2488 // android.statistics 2489 const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; 2490 UPDATE(md, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1); 2491 2492 const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; 2493 UPDATE(md, ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1); 2494 2495 return OK; 2496} 2497 2498#undef ARRAY_SIZE 2499#undef UPDATE 2500 2501} // namespace implementation 2502} // namespace V3_4 2503} // namespace device 2504} // namespace camera 2505} // namespace hardware 2506} // namespace android 2507