ExternalCameraDeviceSession.cpp revision d477e1e092ec8249f50dcd354950ad86ba7e6d0d
1/* 2 * Copyright (C) 2018 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16#define LOG_TAG "ExtCamDevSsn@3.4" 17//#define LOG_NDEBUG 0 18#include <log/log.h> 19 20#include <inttypes.h> 21#include "ExternalCameraDeviceSession.h" 22 23#include "android-base/macros.h" 24#include <utils/Timers.h> 25#include <linux/videodev2.h> 26#include <sync/sync.h> 27 28#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs 29#include <libyuv.h> 30 31#include <jpeglib.h> 32 33 34namespace android { 35namespace hardware { 36namespace camera { 37namespace device { 38namespace V3_4 { 39namespace implementation { 40 41namespace { 42// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer. 43static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */; 44 45const int kBadFramesAfterStreamOn = 1; // drop x frames after streamOn to get rid of some initial 46 // bad frames. TODO: develop a better bad frame detection 47 // method 48 49} // Anonymous namespace 50 51// Static instances 52const int ExternalCameraDeviceSession::kMaxProcessedStream; 53const int ExternalCameraDeviceSession::kMaxStallStream; 54HandleImporter ExternalCameraDeviceSession::sHandleImporter; 55 56ExternalCameraDeviceSession::ExternalCameraDeviceSession( 57 const sp<ICameraDeviceCallback>& callback, 58 const ExternalCameraConfig& cfg, 59 const std::vector<SupportedV4L2Format>& sortedFormats, 60 const CroppingType& croppingType, 61 const common::V1_0::helper::CameraMetadata& chars, 62 unique_fd v4l2Fd) : 63 mCallback(callback), 64 mCfg(cfg), 65 mCameraCharacteristics(chars), 66 mSupportedFormats(sortedFormats), 67 mCroppingType(croppingType), 68 mV4l2Fd(std::move(v4l2Fd)), 69 mOutputThread(new OutputThread(this, mCroppingType)), 70 mMaxThumbResolution(getMaxThumbResolution()), 71 mMaxJpegResolution(getMaxJpegResolution()) { 72 mInitFail = initialize(); 73} 74 75bool ExternalCameraDeviceSession::initialize() { 76 if (mV4l2Fd.get() < 0) { 77 ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get()); 78 return true; 79 } 80 81 status_t status = initDefaultRequests(); 82 if (status != OK) { 83 ALOGE("%s: init default requests failed!", __FUNCTION__); 84 return true; 85 } 86 87 mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>( 88 kMetadataMsgQueueSize, false /* non blocking */); 89 if (!mRequestMetadataQueue->isValid()) { 90 ALOGE("%s: invalid request fmq", __FUNCTION__); 91 return true; 92 } 93 mResultMetadataQueue = std::make_shared<RequestMetadataQueue>( 94 kMetadataMsgQueueSize, false /* non blocking */); 95 if (!mResultMetadataQueue->isValid()) { 96 ALOGE("%s: invalid result fmq", __FUNCTION__); 97 return true; 98 } 99 100 // TODO: check is PRIORITY_DISPLAY enough? 101 mOutputThread->run("ExtCamOut", PRIORITY_DISPLAY); 102 return false; 103} 104 105Status ExternalCameraDeviceSession::initStatus() const { 106 Mutex::Autolock _l(mLock); 107 Status status = Status::OK; 108 if (mInitFail || mClosed) { 109 ALOGI("%s: sesssion initFailed %d closed %d", __FUNCTION__, mInitFail, mClosed); 110 status = Status::INTERNAL_ERROR; 111 } 112 return status; 113} 114 115ExternalCameraDeviceSession::~ExternalCameraDeviceSession() { 116 if (!isClosed()) { 117 ALOGE("ExternalCameraDeviceSession deleted before close!"); 118 close(); 119 } 120} 121 122void ExternalCameraDeviceSession::dumpState(const native_handle_t*) { 123 // TODO: b/72261676 dump more runtime information 124} 125 126Return<void> ExternalCameraDeviceSession::constructDefaultRequestSettings( 127 V3_2::RequestTemplate type, 128 V3_2::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) { 129 V3_2::CameraMetadata outMetadata; 130 Status status = constructDefaultRequestSettingsRaw( 131 static_cast<RequestTemplate>(type), &outMetadata); 132 _hidl_cb(status, outMetadata); 133 return Void(); 134} 135 136Status ExternalCameraDeviceSession::constructDefaultRequestSettingsRaw(RequestTemplate type, 137 V3_2::CameraMetadata *outMetadata) { 138 CameraMetadata emptyMd; 139 Status status = initStatus(); 140 if (status != Status::OK) { 141 return status; 142 } 143 144 switch (type) { 145 case RequestTemplate::PREVIEW: 146 case RequestTemplate::STILL_CAPTURE: 147 case RequestTemplate::VIDEO_RECORD: 148 case RequestTemplate::VIDEO_SNAPSHOT: { 149 *outMetadata = mDefaultRequests[type]; 150 break; 151 } 152 case RequestTemplate::MANUAL: 153 case RequestTemplate::ZERO_SHUTTER_LAG: 154 // Don't support MANUAL, ZSL templates 155 status = Status::ILLEGAL_ARGUMENT; 156 break; 157 default: 158 ALOGE("%s: unknown request template type %d", __FUNCTION__, static_cast<int>(type)); 159 status = Status::ILLEGAL_ARGUMENT; 160 break; 161 } 162 return status; 163} 164 165Return<void> ExternalCameraDeviceSession::configureStreams( 166 const V3_2::StreamConfiguration& streams, 167 ICameraDeviceSession::configureStreams_cb _hidl_cb) { 168 V3_2::HalStreamConfiguration outStreams; 169 V3_3::HalStreamConfiguration outStreams_v33; 170 Mutex::Autolock _il(mInterfaceLock); 171 172 Status status = configureStreams(streams, &outStreams_v33); 173 size_t size = outStreams_v33.streams.size(); 174 outStreams.streams.resize(size); 175 for (size_t i = 0; i < size; i++) { 176 outStreams.streams[i] = outStreams_v33.streams[i].v3_2; 177 } 178 _hidl_cb(status, outStreams); 179 return Void(); 180} 181 182Return<void> ExternalCameraDeviceSession::configureStreams_3_3( 183 const V3_2::StreamConfiguration& streams, 184 ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb) { 185 V3_3::HalStreamConfiguration outStreams; 186 Mutex::Autolock _il(mInterfaceLock); 187 188 Status status = configureStreams(streams, &outStreams); 189 _hidl_cb(status, outStreams); 190 return Void(); 191} 192 193Return<void> ExternalCameraDeviceSession::configureStreams_3_4( 194 const V3_4::StreamConfiguration& requestedConfiguration, 195 ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb) { 196 V3_2::StreamConfiguration config_v32; 197 V3_3::HalStreamConfiguration outStreams_v33; 198 Mutex::Autolock _il(mInterfaceLock); 199 200 config_v32.operationMode = requestedConfiguration.operationMode; 201 config_v32.streams.resize(requestedConfiguration.streams.size()); 202 for (size_t i = 0; i < config_v32.streams.size(); i++) { 203 config_v32.streams[i] = requestedConfiguration.streams[i].v3_2; 204 } 205 206 // Ignore requestedConfiguration.sessionParams. External camera does not support it 207 Status status = configureStreams(config_v32, &outStreams_v33); 208 209 V3_4::HalStreamConfiguration outStreams; 210 outStreams.streams.resize(outStreams_v33.streams.size()); 211 for (size_t i = 0; i < outStreams.streams.size(); i++) { 212 outStreams.streams[i].v3_3 = outStreams_v33.streams[i]; 213 } 214 _hidl_cb(status, outStreams); 215 return Void(); 216} 217 218Return<void> ExternalCameraDeviceSession::getCaptureRequestMetadataQueue( 219 ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) { 220 Mutex::Autolock _il(mInterfaceLock); 221 _hidl_cb(*mRequestMetadataQueue->getDesc()); 222 return Void(); 223} 224 225Return<void> ExternalCameraDeviceSession::getCaptureResultMetadataQueue( 226 ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) { 227 Mutex::Autolock _il(mInterfaceLock); 228 _hidl_cb(*mResultMetadataQueue->getDesc()); 229 return Void(); 230} 231 232Return<void> ExternalCameraDeviceSession::processCaptureRequest( 233 const hidl_vec<CaptureRequest>& requests, 234 const hidl_vec<BufferCache>& cachesToRemove, 235 ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) { 236 Mutex::Autolock _il(mInterfaceLock); 237 updateBufferCaches(cachesToRemove); 238 239 uint32_t numRequestProcessed = 0; 240 Status s = Status::OK; 241 for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { 242 s = processOneCaptureRequest(requests[i]); 243 if (s != Status::OK) { 244 break; 245 } 246 } 247 248 _hidl_cb(s, numRequestProcessed); 249 return Void(); 250} 251 252Return<void> ExternalCameraDeviceSession::processCaptureRequest_3_4( 253 const hidl_vec<V3_4::CaptureRequest>& requests, 254 const hidl_vec<V3_2::BufferCache>& cachesToRemove, 255 ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) { 256 Mutex::Autolock _il(mInterfaceLock); 257 updateBufferCaches(cachesToRemove); 258 259 uint32_t numRequestProcessed = 0; 260 Status s = Status::OK; 261 for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { 262 s = processOneCaptureRequest(requests[i].v3_2); 263 if (s != Status::OK) { 264 break; 265 } 266 } 267 268 _hidl_cb(s, numRequestProcessed); 269 return Void(); 270} 271 272Return<Status> ExternalCameraDeviceSession::flush() { 273 Mutex::Autolock _il(mInterfaceLock); 274 Status status = initStatus(); 275 if (status != Status::OK) { 276 return status; 277 } 278 mOutputThread->flush(); 279 return Status::OK; 280} 281 282Return<void> ExternalCameraDeviceSession::close() { 283 Mutex::Autolock _il(mInterfaceLock); 284 Mutex::Autolock _l(mLock); 285 if (!mClosed) { 286 // TODO: b/72261676 Cleanup inflight buffers/V4L2 buffer queue 287 ALOGV("%s: closing V4L2 camera FD %d", __FUNCTION__, mV4l2Fd.get()); 288 mV4l2Fd.reset(); 289 mOutputThread->requestExit(); // TODO: join? 290 291 // free all imported buffers 292 for(auto& pair : mCirculatingBuffers) { 293 CirculatingBuffers& buffers = pair.second; 294 for (auto& p2 : buffers) { 295 sHandleImporter.freeBuffer(p2.second); 296 } 297 } 298 299 mClosed = true; 300 } 301 return Void(); 302} 303 304Status ExternalCameraDeviceSession::importRequest( 305 const CaptureRequest& request, 306 hidl_vec<buffer_handle_t*>& allBufPtrs, 307 hidl_vec<int>& allFences) { 308 size_t numOutputBufs = request.outputBuffers.size(); 309 size_t numBufs = numOutputBufs; 310 // Validate all I/O buffers 311 hidl_vec<buffer_handle_t> allBufs; 312 hidl_vec<uint64_t> allBufIds; 313 allBufs.resize(numBufs); 314 allBufIds.resize(numBufs); 315 allBufPtrs.resize(numBufs); 316 allFences.resize(numBufs); 317 std::vector<int32_t> streamIds(numBufs); 318 319 for (size_t i = 0; i < numOutputBufs; i++) { 320 allBufs[i] = request.outputBuffers[i].buffer.getNativeHandle(); 321 allBufIds[i] = request.outputBuffers[i].bufferId; 322 allBufPtrs[i] = &allBufs[i]; 323 streamIds[i] = request.outputBuffers[i].streamId; 324 } 325 326 for (size_t i = 0; i < numBufs; i++) { 327 buffer_handle_t buf = allBufs[i]; 328 uint64_t bufId = allBufIds[i]; 329 CirculatingBuffers& cbs = mCirculatingBuffers[streamIds[i]]; 330 if (cbs.count(bufId) == 0) { 331 if (buf == nullptr) { 332 ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId); 333 return Status::ILLEGAL_ARGUMENT; 334 } 335 // Register a newly seen buffer 336 buffer_handle_t importedBuf = buf; 337 sHandleImporter.importBuffer(importedBuf); 338 if (importedBuf == nullptr) { 339 ALOGE("%s: output buffer %zu is invalid!", __FUNCTION__, i); 340 return Status::INTERNAL_ERROR; 341 } else { 342 cbs[bufId] = importedBuf; 343 } 344 } 345 allBufPtrs[i] = &cbs[bufId]; 346 } 347 348 // All buffers are imported. Now validate output buffer acquire fences 349 for (size_t i = 0; i < numOutputBufs; i++) { 350 if (!sHandleImporter.importFence( 351 request.outputBuffers[i].acquireFence, allFences[i])) { 352 ALOGE("%s: output buffer %zu acquire fence is invalid", __FUNCTION__, i); 353 cleanupInflightFences(allFences, i); 354 return Status::INTERNAL_ERROR; 355 } 356 } 357 return Status::OK; 358} 359 360void ExternalCameraDeviceSession::cleanupInflightFences( 361 hidl_vec<int>& allFences, size_t numFences) { 362 for (size_t j = 0; j < numFences; j++) { 363 sHandleImporter.closeFence(allFences[j]); 364 } 365} 366 367Status ExternalCameraDeviceSession::processOneCaptureRequest(const CaptureRequest& request) { 368 Status status = initStatus(); 369 if (status != Status::OK) { 370 return status; 371 } 372 373 if (request.inputBuffer.streamId != -1) { 374 ALOGE("%s: external camera does not support reprocessing!", __FUNCTION__); 375 return Status::ILLEGAL_ARGUMENT; 376 } 377 378 Mutex::Autolock _l(mLock); 379 if (!mV4l2Streaming) { 380 ALOGE("%s: cannot process request in streamOff state!", __FUNCTION__); 381 return Status::INTERNAL_ERROR; 382 } 383 384 const camera_metadata_t *rawSettings = nullptr; 385 bool converted = true; 386 CameraMetadata settingsFmq; // settings from FMQ 387 if (request.fmqSettingsSize > 0) { 388 // non-blocking read; client must write metadata before calling 389 // processOneCaptureRequest 390 settingsFmq.resize(request.fmqSettingsSize); 391 bool read = mRequestMetadataQueue->read(settingsFmq.data(), request.fmqSettingsSize); 392 if (read) { 393 converted = V3_2::implementation::convertFromHidl(settingsFmq, &rawSettings); 394 } else { 395 ALOGE("%s: capture request settings metadata couldn't be read from fmq!", __FUNCTION__); 396 converted = false; 397 } 398 } else { 399 converted = V3_2::implementation::convertFromHidl(request.settings, &rawSettings); 400 } 401 402 if (converted && rawSettings != nullptr) { 403 mLatestReqSetting = rawSettings; 404 } 405 406 if (!converted) { 407 ALOGE("%s: capture request settings metadata is corrupt!", __FUNCTION__); 408 return Status::ILLEGAL_ARGUMENT; 409 } 410 411 if (mFirstRequest && rawSettings == nullptr) { 412 ALOGE("%s: capture request settings must not be null for first request!", 413 __FUNCTION__); 414 return Status::ILLEGAL_ARGUMENT; 415 } 416 417 hidl_vec<buffer_handle_t*> allBufPtrs; 418 hidl_vec<int> allFences; 419 size_t numOutputBufs = request.outputBuffers.size(); 420 421 if (numOutputBufs == 0) { 422 ALOGE("%s: capture request must have at least one output buffer!", __FUNCTION__); 423 return Status::ILLEGAL_ARGUMENT; 424 } 425 426 status = importRequest(request, allBufPtrs, allFences); 427 if (status != Status::OK) { 428 return status; 429 } 430 431 // TODO: program fps range per capture request here 432 // or limit the set of availableFpsRange 433 434 sp<V4L2Frame> frameIn = dequeueV4l2FrameLocked(); 435 if ( frameIn == nullptr) { 436 ALOGE("%s: V4L2 deque frame failed!", __FUNCTION__); 437 return Status::INTERNAL_ERROR; 438 } 439 // TODO: This can probably be replaced by use v4lbuffer timestamp 440 // if the device supports it 441 nsecs_t shutterTs = systemTime(SYSTEM_TIME_MONOTONIC); 442 443 444 // TODO: reduce object copy in this path 445 HalRequest halReq = { 446 .frameNumber = request.frameNumber, 447 .setting = mLatestReqSetting, 448 .frameIn = frameIn, 449 .shutterTs = shutterTs}; 450 halReq.buffers.resize(numOutputBufs); 451 for (size_t i = 0; i < numOutputBufs; i++) { 452 HalStreamBuffer& halBuf = halReq.buffers[i]; 453 int streamId = halBuf.streamId = request.outputBuffers[i].streamId; 454 halBuf.bufferId = request.outputBuffers[i].bufferId; 455 const Stream& stream = mStreamMap[streamId]; 456 halBuf.width = stream.width; 457 halBuf.height = stream.height; 458 halBuf.format = stream.format; 459 halBuf.usage = stream.usage; 460 halBuf.bufPtr = allBufPtrs[i]; 461 halBuf.acquireFence = allFences[i]; 462 halBuf.fenceTimeout = false; 463 } 464 mInflightFrames.insert(halReq.frameNumber); 465 // Send request to OutputThread for the rest of processing 466 mOutputThread->submitRequest(halReq); 467 mFirstRequest = false; 468 return Status::OK; 469} 470 471void ExternalCameraDeviceSession::notifyShutter(uint32_t frameNumber, nsecs_t shutterTs) { 472 NotifyMsg msg; 473 msg.type = MsgType::SHUTTER; 474 msg.msg.shutter.frameNumber = frameNumber; 475 msg.msg.shutter.timestamp = shutterTs; 476 mCallback->notify({msg}); 477} 478 479void ExternalCameraDeviceSession::notifyError( 480 uint32_t frameNumber, int32_t streamId, ErrorCode ec) { 481 NotifyMsg msg; 482 msg.type = MsgType::ERROR; 483 msg.msg.error.frameNumber = frameNumber; 484 msg.msg.error.errorStreamId = streamId; 485 msg.msg.error.errorCode = ec; 486 mCallback->notify({msg}); 487} 488 489//TODO: refactor with processCaptureResult 490Status ExternalCameraDeviceSession::processCaptureRequestError(const HalRequest& req) { 491 // Return V4L2 buffer to V4L2 buffer queue 492 enqueueV4l2Frame(req.frameIn); 493 494 // NotifyShutter 495 notifyShutter(req.frameNumber, req.shutterTs); 496 497 notifyError(/*frameNum*/req.frameNumber, /*stream*/-1, ErrorCode::ERROR_REQUEST); 498 499 // Fill output buffers 500 hidl_vec<CaptureResult> results; 501 results.resize(1); 502 CaptureResult& result = results[0]; 503 result.frameNumber = req.frameNumber; 504 result.partialResult = 1; 505 result.inputBuffer.streamId = -1; 506 result.outputBuffers.resize(req.buffers.size()); 507 for (size_t i = 0; i < req.buffers.size(); i++) { 508 result.outputBuffers[i].streamId = req.buffers[i].streamId; 509 result.outputBuffers[i].bufferId = req.buffers[i].bufferId; 510 result.outputBuffers[i].status = BufferStatus::ERROR; 511 if (req.buffers[i].acquireFence >= 0) { 512 native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); 513 handle->data[0] = req.buffers[i].acquireFence; 514 result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); 515 } 516 } 517 518 // update inflight records 519 { 520 Mutex::Autolock _l(mLock); 521 mInflightFrames.erase(req.frameNumber); 522 } 523 524 // Callback into framework 525 invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); 526 freeReleaseFences(results); 527 return Status::OK; 528} 529 530Status ExternalCameraDeviceSession::processCaptureResult(HalRequest& req) { 531 // Return V4L2 buffer to V4L2 buffer queue 532 enqueueV4l2Frame(req.frameIn); 533 534 // NotifyShutter 535 notifyShutter(req.frameNumber, req.shutterTs); 536 537 // Fill output buffers 538 hidl_vec<CaptureResult> results; 539 results.resize(1); 540 CaptureResult& result = results[0]; 541 result.frameNumber = req.frameNumber; 542 result.partialResult = 1; 543 result.inputBuffer.streamId = -1; 544 result.outputBuffers.resize(req.buffers.size()); 545 for (size_t i = 0; i < req.buffers.size(); i++) { 546 result.outputBuffers[i].streamId = req.buffers[i].streamId; 547 result.outputBuffers[i].bufferId = req.buffers[i].bufferId; 548 if (req.buffers[i].fenceTimeout) { 549 result.outputBuffers[i].status = BufferStatus::ERROR; 550 native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); 551 handle->data[0] = req.buffers[i].acquireFence; 552 result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); 553 notifyError(req.frameNumber, req.buffers[i].streamId, ErrorCode::ERROR_BUFFER); 554 } else { 555 result.outputBuffers[i].status = BufferStatus::OK; 556 // TODO: refactor 557 if (req.buffers[i].acquireFence > 0) { 558 native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); 559 handle->data[0] = req.buffers[i].acquireFence; 560 result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); 561 } 562 } 563 } 564 565 // Fill capture result metadata 566 fillCaptureResult(req.setting, req.shutterTs); 567 const camera_metadata_t *rawResult = req.setting.getAndLock(); 568 V3_2::implementation::convertToHidl(rawResult, &result.result); 569 req.setting.unlock(rawResult); 570 571 // update inflight records 572 { 573 Mutex::Autolock _l(mLock); 574 mInflightFrames.erase(req.frameNumber); 575 } 576 577 // Callback into framework 578 invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); 579 freeReleaseFences(results); 580 return Status::OK; 581} 582 583void ExternalCameraDeviceSession::invokeProcessCaptureResultCallback( 584 hidl_vec<CaptureResult> &results, bool tryWriteFmq) { 585 if (mProcessCaptureResultLock.tryLock() != OK) { 586 const nsecs_t NS_TO_SECOND = 1000000000; 587 ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__); 588 if (mProcessCaptureResultLock.timedLock(/* 1s */NS_TO_SECOND) != OK) { 589 ALOGE("%s: cannot acquire lock in 1s, cannot proceed", 590 __FUNCTION__); 591 return; 592 } 593 } 594 if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) { 595 for (CaptureResult &result : results) { 596 if (result.result.size() > 0) { 597 if (mResultMetadataQueue->write(result.result.data(), result.result.size())) { 598 result.fmqResultSize = result.result.size(); 599 result.result.resize(0); 600 } else { 601 ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); 602 result.fmqResultSize = 0; 603 } 604 } else { 605 result.fmqResultSize = 0; 606 } 607 } 608 } 609 auto status = mCallback->processCaptureResult(results); 610 if (!status.isOk()) { 611 ALOGE("%s: processCaptureResult ERROR : %s", __FUNCTION__, 612 status.description().c_str()); 613 } 614 615 mProcessCaptureResultLock.unlock(); 616} 617 618void ExternalCameraDeviceSession::freeReleaseFences(hidl_vec<CaptureResult>& results) { 619 for (auto& result : results) { 620 if (result.inputBuffer.releaseFence.getNativeHandle() != nullptr) { 621 native_handle_t* handle = const_cast<native_handle_t*>( 622 result.inputBuffer.releaseFence.getNativeHandle()); 623 native_handle_close(handle); 624 native_handle_delete(handle); 625 } 626 for (auto& buf : result.outputBuffers) { 627 if (buf.releaseFence.getNativeHandle() != nullptr) { 628 native_handle_t* handle = const_cast<native_handle_t*>( 629 buf.releaseFence.getNativeHandle()); 630 native_handle_close(handle); 631 native_handle_delete(handle); 632 } 633 } 634 } 635 return; 636} 637 638ExternalCameraDeviceSession::OutputThread::OutputThread( 639 wp<ExternalCameraDeviceSession> parent, 640 CroppingType ct) : mParent(parent), mCroppingType(ct) {} 641 642ExternalCameraDeviceSession::OutputThread::~OutputThread() {} 643 644uint32_t ExternalCameraDeviceSession::OutputThread::getFourCcFromLayout( 645 const YCbCrLayout& layout) { 646 intptr_t cb = reinterpret_cast<intptr_t>(layout.cb); 647 intptr_t cr = reinterpret_cast<intptr_t>(layout.cr); 648 if (std::abs(cb - cr) == 1 && layout.chromaStep == 2) { 649 // Interleaved format 650 if (layout.cb > layout.cr) { 651 return V4L2_PIX_FMT_NV21; 652 } else { 653 return V4L2_PIX_FMT_NV12; 654 } 655 } else if (layout.chromaStep == 1) { 656 // Planar format 657 if (layout.cb > layout.cr) { 658 return V4L2_PIX_FMT_YVU420; // YV12 659 } else { 660 return V4L2_PIX_FMT_YUV420; // YU12 661 } 662 } else { 663 return FLEX_YUV_GENERIC; 664 } 665} 666 667int ExternalCameraDeviceSession::OutputThread::getCropRect( 668 CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out) { 669 if (out == nullptr) { 670 ALOGE("%s: out is null", __FUNCTION__); 671 return -1; 672 } 673 674 uint32_t inW = inSize.width; 675 uint32_t inH = inSize.height; 676 uint32_t outW = outSize.width; 677 uint32_t outH = outSize.height; 678 679 // Handle special case where aspect ratio is close to input but scaled 680 // dimension is slightly larger than input 681 float arIn = ASPECT_RATIO(inSize); 682 float arOut = ASPECT_RATIO(outSize); 683 if (isAspectRatioClose(arIn, arOut)) { 684 out->left = 0; 685 out->top = 0; 686 out->width = inW; 687 out->height = inH; 688 return 0; 689 } 690 691 if (ct == VERTICAL) { 692 uint64_t scaledOutH = static_cast<uint64_t>(outH) * inW / outW; 693 if (scaledOutH > inH) { 694 ALOGE("%s: Output size %dx%d cannot be vertically cropped from input size %dx%d", 695 __FUNCTION__, outW, outH, inW, inH); 696 return -1; 697 } 698 scaledOutH = scaledOutH & ~0x1; // make it multiple of 2 699 700 out->left = 0; 701 out->top = ((inH - scaledOutH) / 2) & ~0x1; 702 out->width = inW; 703 out->height = static_cast<int32_t>(scaledOutH); 704 ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledH %d", 705 __FUNCTION__, inW, inH, outW, outH, out->top, static_cast<int32_t>(scaledOutH)); 706 } else { 707 uint64_t scaledOutW = static_cast<uint64_t>(outW) * inH / outH; 708 if (scaledOutW > inW) { 709 ALOGE("%s: Output size %dx%d cannot be horizontally cropped from input size %dx%d", 710 __FUNCTION__, outW, outH, inW, inH); 711 return -1; 712 } 713 scaledOutW = scaledOutW & ~0x1; // make it multiple of 2 714 715 out->left = ((inW - scaledOutW) / 2) & ~0x1; 716 out->top = 0; 717 out->width = static_cast<int32_t>(scaledOutW); 718 out->height = inH; 719 ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledW %d", 720 __FUNCTION__, inW, inH, outW, outH, out->top, static_cast<int32_t>(scaledOutW)); 721 } 722 723 return 0; 724} 725 726int ExternalCameraDeviceSession::OutputThread::cropAndScaleLocked( 727 sp<AllocatedFrame>& in, const Size& outSz, YCbCrLayout* out) { 728 Size inSz = {in->mWidth, in->mHeight}; 729 730 int ret; 731 if (inSz == outSz) { 732 ret = in->getLayout(out); 733 if (ret != 0) { 734 ALOGE("%s: failed to get input image layout", __FUNCTION__); 735 return ret; 736 } 737 return ret; 738 } 739 740 // Cropping to output aspect ratio 741 IMapper::Rect inputCrop; 742 ret = getCropRect(mCroppingType, inSz, outSz, &inputCrop); 743 if (ret != 0) { 744 ALOGE("%s: failed to compute crop rect for output size %dx%d", 745 __FUNCTION__, outSz.width, outSz.height); 746 return ret; 747 } 748 749 YCbCrLayout croppedLayout; 750 ret = in->getCroppedLayout(inputCrop, &croppedLayout); 751 if (ret != 0) { 752 ALOGE("%s: failed to crop input image %dx%d to output size %dx%d", 753 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); 754 return ret; 755 } 756 757 if ((mCroppingType == VERTICAL && inSz.width == outSz.width) || 758 (mCroppingType == HORIZONTAL && inSz.height == outSz.height)) { 759 // No scale is needed 760 *out = croppedLayout; 761 return 0; 762 } 763 764 auto it = mScaledYu12Frames.find(outSz); 765 sp<AllocatedFrame> scaledYu12Buf; 766 if (it != mScaledYu12Frames.end()) { 767 scaledYu12Buf = it->second; 768 } else { 769 it = mIntermediateBuffers.find(outSz); 770 if (it == mIntermediateBuffers.end()) { 771 ALOGE("%s: failed to find intermediate buffer size %dx%d", 772 __FUNCTION__, outSz.width, outSz.height); 773 return -1; 774 } 775 scaledYu12Buf = it->second; 776 } 777 // Scale 778 YCbCrLayout outLayout; 779 ret = scaledYu12Buf->getLayout(&outLayout); 780 if (ret != 0) { 781 ALOGE("%s: failed to get output buffer layout", __FUNCTION__); 782 return ret; 783 } 784 785 ret = libyuv::I420Scale( 786 static_cast<uint8_t*>(croppedLayout.y), 787 croppedLayout.yStride, 788 static_cast<uint8_t*>(croppedLayout.cb), 789 croppedLayout.cStride, 790 static_cast<uint8_t*>(croppedLayout.cr), 791 croppedLayout.cStride, 792 inputCrop.width, 793 inputCrop.height, 794 static_cast<uint8_t*>(outLayout.y), 795 outLayout.yStride, 796 static_cast<uint8_t*>(outLayout.cb), 797 outLayout.cStride, 798 static_cast<uint8_t*>(outLayout.cr), 799 outLayout.cStride, 800 outSz.width, 801 outSz.height, 802 // TODO: b/72261744 see if we can use better filter without losing too much perf 803 libyuv::FilterMode::kFilterNone); 804 805 if (ret != 0) { 806 ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", 807 __FUNCTION__, inputCrop.width, inputCrop.height, 808 outSz.width, outSz.height, ret); 809 return ret; 810 } 811 812 *out = outLayout; 813 mScaledYu12Frames.insert({outSz, scaledYu12Buf}); 814 return 0; 815} 816 817 818int ExternalCameraDeviceSession::OutputThread::cropAndScaleThumbLocked( 819 sp<AllocatedFrame>& in, const Size &outSz, YCbCrLayout* out) { 820 Size inSz {in->mWidth, in->mHeight}; 821 822 if ((outSz.width * outSz.height) > 823 (mYu12ThumbFrame->mWidth * mYu12ThumbFrame->mHeight)) { 824 ALOGE("%s: Requested thumbnail size too big (%d,%d) > (%d,%d)", 825 __FUNCTION__, outSz.width, outSz.height, 826 mYu12ThumbFrame->mWidth, mYu12ThumbFrame->mHeight); 827 return -1; 828 } 829 830 int ret; 831 832 /* This will crop-and-zoom the input YUV frame to the thumbnail size 833 * Based on the following logic: 834 * 1) Square pixels come in, square pixels come out, therefore single 835 * scale factor is computed to either make input bigger or smaller 836 * depending on if we are upscaling or downscaling 837 * 2) That single scale factor would either make height too tall or width 838 * too wide so we need to crop the input either horizontally or vertically 839 * but not both 840 */ 841 842 /* Convert the input and output dimensions into floats for ease of math */ 843 float fWin = static_cast<float>(inSz.width); 844 float fHin = static_cast<float>(inSz.height); 845 float fWout = static_cast<float>(outSz.width); 846 float fHout = static_cast<float>(outSz.height); 847 848 /* Compute the one scale factor from (1) above, it will be the smaller of 849 * the two possibilities. */ 850 float scaleFactor = std::min( fHin / fHout, fWin / fWout ); 851 852 /* Since we are crop-and-zooming (as opposed to letter/pillar boxing) we can 853 * simply multiply the output by our scaleFactor to get the cropped input 854 * size. Note that at least one of {fWcrop, fHcrop} is going to wind up 855 * being {fWin, fHin} respectively because fHout or fWout cancels out the 856 * scaleFactor calculation above. 857 * 858 * Specifically: 859 * if ( fHin / fHout ) < ( fWin / fWout ) we crop the sides off 860 * input, in which case 861 * scaleFactor = fHin / fHout 862 * fWcrop = fHin / fHout * fWout 863 * fHcrop = fHin 864 * 865 * Note that fWcrop <= fWin ( because ( fHin / fHout ) * fWout < fWin, which 866 * is just the inequality above with both sides multiplied by fWout 867 * 868 * on the other hand if ( fWin / fWout ) < ( fHin / fHout) we crop the top 869 * and the bottom off of input, and 870 * scaleFactor = fWin / fWout 871 * fWcrop = fWin 872 * fHCrop = fWin / fWout * fHout 873 */ 874 float fWcrop = scaleFactor * fWout; 875 float fHcrop = scaleFactor * fHout; 876 877 /* Convert to integer and truncate to an even number */ 878 Size cropSz = { 2*static_cast<uint32_t>(fWcrop/2.0f), 879 2*static_cast<uint32_t>(fHcrop/2.0f) }; 880 881 /* Convert to a centered rectange with even top/left */ 882 IMapper::Rect inputCrop { 883 2*static_cast<int32_t>((inSz.width - cropSz.width)/4), 884 2*static_cast<int32_t>((inSz.height - cropSz.height)/4), 885 static_cast<int32_t>(cropSz.width), 886 static_cast<int32_t>(cropSz.height) }; 887 888 if ((inputCrop.top < 0) || 889 (inputCrop.top >= static_cast<int32_t>(inSz.height)) || 890 (inputCrop.left < 0) || 891 (inputCrop.left >= static_cast<int32_t>(inSz.width)) || 892 (inputCrop.width <= 0) || 893 (inputCrop.width + inputCrop.left > static_cast<int32_t>(inSz.width)) || 894 (inputCrop.height <= 0) || 895 (inputCrop.height + inputCrop.top > static_cast<int32_t>(inSz.height))) 896 { 897 ALOGE("%s: came up with really wrong crop rectangle",__FUNCTION__); 898 ALOGE("%s: input layout %dx%d to for output size %dx%d", 899 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); 900 ALOGE("%s: computed input crop +%d,+%d %dx%d", 901 __FUNCTION__, inputCrop.left, inputCrop.top, 902 inputCrop.width, inputCrop.height); 903 return -1; 904 } 905 906 YCbCrLayout inputLayout; 907 ret = in->getCroppedLayout(inputCrop, &inputLayout); 908 if (ret != 0) { 909 ALOGE("%s: failed to crop input layout %dx%d to for output size %dx%d", 910 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); 911 ALOGE("%s: computed input crop +%d,+%d %dx%d", 912 __FUNCTION__, inputCrop.left, inputCrop.top, 913 inputCrop.width, inputCrop.height); 914 return ret; 915 } 916 ALOGV("%s: crop input layout %dx%d to for output size %dx%d", 917 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); 918 ALOGV("%s: computed input crop +%d,+%d %dx%d", 919 __FUNCTION__, inputCrop.left, inputCrop.top, 920 inputCrop.width, inputCrop.height); 921 922 923 // Scale 924 YCbCrLayout outFullLayout; 925 926 ret = mYu12ThumbFrame->getLayout(&outFullLayout); 927 if (ret != 0) { 928 ALOGE("%s: failed to get output buffer layout", __FUNCTION__); 929 return ret; 930 } 931 932 933 ret = libyuv::I420Scale( 934 static_cast<uint8_t*>(inputLayout.y), 935 inputLayout.yStride, 936 static_cast<uint8_t*>(inputLayout.cb), 937 inputLayout.cStride, 938 static_cast<uint8_t*>(inputLayout.cr), 939 inputLayout.cStride, 940 inputCrop.width, 941 inputCrop.height, 942 static_cast<uint8_t*>(outFullLayout.y), 943 outFullLayout.yStride, 944 static_cast<uint8_t*>(outFullLayout.cb), 945 outFullLayout.cStride, 946 static_cast<uint8_t*>(outFullLayout.cr), 947 outFullLayout.cStride, 948 outSz.width, 949 outSz.height, 950 libyuv::FilterMode::kFilterNone); 951 952 if (ret != 0) { 953 ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", 954 __FUNCTION__, inputCrop.width, inputCrop.height, 955 outSz.width, outSz.height, ret); 956 return ret; 957 } 958 959 *out = outFullLayout; 960 return 0; 961} 962 963int ExternalCameraDeviceSession::OutputThread::formatConvertLocked( 964 const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format) { 965 int ret = 0; 966 switch (format) { 967 case V4L2_PIX_FMT_NV21: 968 ret = libyuv::I420ToNV21( 969 static_cast<uint8_t*>(in.y), 970 in.yStride, 971 static_cast<uint8_t*>(in.cb), 972 in.cStride, 973 static_cast<uint8_t*>(in.cr), 974 in.cStride, 975 static_cast<uint8_t*>(out.y), 976 out.yStride, 977 static_cast<uint8_t*>(out.cr), 978 out.cStride, 979 sz.width, 980 sz.height); 981 if (ret != 0) { 982 ALOGE("%s: convert to NV21 buffer failed! ret %d", 983 __FUNCTION__, ret); 984 return ret; 985 } 986 break; 987 case V4L2_PIX_FMT_NV12: 988 ret = libyuv::I420ToNV12( 989 static_cast<uint8_t*>(in.y), 990 in.yStride, 991 static_cast<uint8_t*>(in.cb), 992 in.cStride, 993 static_cast<uint8_t*>(in.cr), 994 in.cStride, 995 static_cast<uint8_t*>(out.y), 996 out.yStride, 997 static_cast<uint8_t*>(out.cb), 998 out.cStride, 999 sz.width, 1000 sz.height); 1001 if (ret != 0) { 1002 ALOGE("%s: convert to NV12 buffer failed! ret %d", 1003 __FUNCTION__, ret); 1004 return ret; 1005 } 1006 break; 1007 case V4L2_PIX_FMT_YVU420: // YV12 1008 case V4L2_PIX_FMT_YUV420: // YU12 1009 // TODO: maybe we can speed up here by somehow save this copy? 1010 ret = libyuv::I420Copy( 1011 static_cast<uint8_t*>(in.y), 1012 in.yStride, 1013 static_cast<uint8_t*>(in.cb), 1014 in.cStride, 1015 static_cast<uint8_t*>(in.cr), 1016 in.cStride, 1017 static_cast<uint8_t*>(out.y), 1018 out.yStride, 1019 static_cast<uint8_t*>(out.cb), 1020 out.cStride, 1021 static_cast<uint8_t*>(out.cr), 1022 out.cStride, 1023 sz.width, 1024 sz.height); 1025 if (ret != 0) { 1026 ALOGE("%s: copy to YV12 or YU12 buffer failed! ret %d", 1027 __FUNCTION__, ret); 1028 return ret; 1029 } 1030 break; 1031 case FLEX_YUV_GENERIC: 1032 // TODO: b/72261744 write to arbitrary flexible YUV layout. Slow. 1033 ALOGE("%s: unsupported flexible yuv layout" 1034 " y %p cb %p cr %p y_str %d c_str %d c_step %d", 1035 __FUNCTION__, out.y, out.cb, out.cr, 1036 out.yStride, out.cStride, out.chromaStep); 1037 return -1; 1038 default: 1039 ALOGE("%s: unknown YUV format 0x%x!", __FUNCTION__, format); 1040 return -1; 1041 } 1042 return 0; 1043} 1044 1045int ExternalCameraDeviceSession::OutputThread::encodeJpegYU12( 1046 const Size & inSz, const YCbCrLayout& inLayout, 1047 int jpegQuality, const void *app1Buffer, size_t app1Size, 1048 void *out, const size_t maxOutSize, size_t &actualCodeSize) 1049{ 1050 /* libjpeg is a C library so we use C-style "inheritance" by 1051 * putting libjpeg's jpeg_destination_mgr first in our custom 1052 * struct. This allows us to cast jpeg_destination_mgr* to 1053 * CustomJpegDestMgr* when we get it passed to us in a callback */ 1054 struct CustomJpegDestMgr { 1055 struct jpeg_destination_mgr mgr; 1056 JOCTET *mBuffer; 1057 size_t mBufferSize; 1058 size_t mEncodedSize; 1059 bool mSuccess; 1060 } dmgr; 1061 1062 jpeg_compress_struct cinfo = {}; 1063 jpeg_error_mgr jerr; 1064 1065 /* Initialize error handling with standard callbacks, but 1066 * then override output_message (to print to ALOG) and 1067 * error_exit to set a flag and print a message instead 1068 * of killing the whole process */ 1069 cinfo.err = jpeg_std_error(&jerr); 1070 1071 cinfo.err->output_message = [](j_common_ptr cinfo) { 1072 char buffer[JMSG_LENGTH_MAX]; 1073 1074 /* Create the message */ 1075 (*cinfo->err->format_message)(cinfo, buffer); 1076 ALOGE("libjpeg error: %s", buffer); 1077 }; 1078 cinfo.err->error_exit = [](j_common_ptr cinfo) { 1079 (*cinfo->err->output_message)(cinfo); 1080 if(cinfo->client_data) { 1081 auto & dmgr = 1082 *reinterpret_cast<CustomJpegDestMgr*>(cinfo->client_data); 1083 dmgr.mSuccess = false; 1084 } 1085 }; 1086 /* Now that we initialized some callbacks, let's create our compressor */ 1087 jpeg_create_compress(&cinfo); 1088 1089 /* Initialize our destination manager */ 1090 dmgr.mBuffer = static_cast<JOCTET*>(out); 1091 dmgr.mBufferSize = maxOutSize; 1092 dmgr.mEncodedSize = 0; 1093 dmgr.mSuccess = true; 1094 cinfo.client_data = static_cast<void*>(&dmgr); 1095 1096 /* These lambdas become C-style function pointers and as per C++11 spec 1097 * may not capture anything */ 1098 dmgr.mgr.init_destination = [](j_compress_ptr cinfo) { 1099 auto & dmgr = reinterpret_cast<CustomJpegDestMgr&>(*cinfo->dest); 1100 dmgr.mgr.next_output_byte = dmgr.mBuffer; 1101 dmgr.mgr.free_in_buffer = dmgr.mBufferSize; 1102 ALOGV("%s:%d jpeg start: %p [%zu]", 1103 __FUNCTION__, __LINE__, dmgr.mBuffer, dmgr.mBufferSize); 1104 }; 1105 1106 dmgr.mgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) { 1107 ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__); 1108 return 0; 1109 }; 1110 1111 dmgr.mgr.term_destination = [](j_compress_ptr cinfo) { 1112 auto & dmgr = reinterpret_cast<CustomJpegDestMgr&>(*cinfo->dest); 1113 dmgr.mEncodedSize = dmgr.mBufferSize - dmgr.mgr.free_in_buffer; 1114 ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.mEncodedSize); 1115 }; 1116 cinfo.dest = reinterpret_cast<struct jpeg_destination_mgr*>(&dmgr); 1117 1118 /* We are going to be using JPEG in raw data mode, so we are passing 1119 * straight subsampled planar YCbCr and it will not touch our pixel 1120 * data or do any scaling or anything */ 1121 cinfo.image_width = inSz.width; 1122 cinfo.image_height = inSz.height; 1123 cinfo.input_components = 3; 1124 cinfo.in_color_space = JCS_YCbCr; 1125 1126 /* Initialize defaults and then override what we want */ 1127 jpeg_set_defaults(&cinfo); 1128 1129 jpeg_set_quality(&cinfo, jpegQuality, 1); 1130 jpeg_set_colorspace(&cinfo, JCS_YCbCr); 1131 cinfo.raw_data_in = 1; 1132 cinfo.dct_method = JDCT_IFAST; 1133 1134 /* Configure sampling factors. The sampling factor is JPEG subsampling 420 1135 * because the source format is YUV420. Note that libjpeg sampling factors 1136 * are... a little weird. Sampling of Y=2,U=1,V=1 means there is 1 U and 1137 * 1 V value for each 2 Y values */ 1138 cinfo.comp_info[0].h_samp_factor = 2; 1139 cinfo.comp_info[0].v_samp_factor = 2; 1140 cinfo.comp_info[1].h_samp_factor = 1; 1141 cinfo.comp_info[1].v_samp_factor = 1; 1142 cinfo.comp_info[2].h_samp_factor = 1; 1143 cinfo.comp_info[2].v_samp_factor = 1; 1144 1145 /* Let's not hardcode YUV420 in 6 places... 5 was enough */ 1146 int maxVSampFactor = std::max( { 1147 cinfo.comp_info[0].v_samp_factor, 1148 cinfo.comp_info[1].v_samp_factor, 1149 cinfo.comp_info[2].v_samp_factor 1150 }); 1151 int cVSubSampling = cinfo.comp_info[0].v_samp_factor / 1152 cinfo.comp_info[1].v_samp_factor; 1153 1154 /* Start the compressor */ 1155 jpeg_start_compress(&cinfo, TRUE); 1156 1157 /* Compute our macroblock height, so we can pad our input to be vertically 1158 * macroblock aligned. 1159 * TODO: Does it need to be horizontally MCU aligned too? */ 1160 1161 size_t mcuV = DCTSIZE*maxVSampFactor; 1162 size_t paddedHeight = mcuV * ((inSz.height + mcuV - 1) / mcuV); 1163 1164 /* libjpeg uses arrays of row pointers, which makes it really easy to pad 1165 * data vertically (unfortunately doesn't help horizontally) */ 1166 std::vector<JSAMPROW> yLines (paddedHeight); 1167 std::vector<JSAMPROW> cbLines(paddedHeight/cVSubSampling); 1168 std::vector<JSAMPROW> crLines(paddedHeight/cVSubSampling); 1169 1170 uint8_t *py = static_cast<uint8_t*>(inLayout.y); 1171 uint8_t *pcr = static_cast<uint8_t*>(inLayout.cr); 1172 uint8_t *pcb = static_cast<uint8_t*>(inLayout.cb); 1173 1174 for(uint32_t i = 0; i < paddedHeight; i++) 1175 { 1176 /* Once we are in the padding territory we still point to the last line 1177 * effectively replicating it several times ~ CLAMP_TO_EDGE */ 1178 int li = std::min(i, inSz.height - 1); 1179 yLines[i] = static_cast<JSAMPROW>(py + li * inLayout.yStride); 1180 if(i < paddedHeight / cVSubSampling) 1181 { 1182 crLines[i] = static_cast<JSAMPROW>(pcr + li * inLayout.cStride); 1183 cbLines[i] = static_cast<JSAMPROW>(pcb + li * inLayout.cStride); 1184 } 1185 } 1186 1187 /* If APP1 data was passed in, use it */ 1188 if(app1Buffer && app1Size) 1189 { 1190 jpeg_write_marker(&cinfo, JPEG_APP0 + 1, 1191 static_cast<const JOCTET*>(app1Buffer), app1Size); 1192 } 1193 1194 /* While we still have padded height left to go, keep giving it one 1195 * macroblock at a time. */ 1196 while (cinfo.next_scanline < cinfo.image_height) { 1197 const uint32_t batchSize = DCTSIZE * maxVSampFactor; 1198 const uint32_t nl = cinfo.next_scanline; 1199 JSAMPARRAY planes[3]{ &yLines[nl], 1200 &cbLines[nl/cVSubSampling], 1201 &crLines[nl/cVSubSampling] }; 1202 1203 uint32_t done = jpeg_write_raw_data(&cinfo, planes, batchSize); 1204 1205 if (done != batchSize) { 1206 ALOGE("%s: compressed %u lines, expected %u (total %u/%u)", 1207 __FUNCTION__, done, batchSize, cinfo.next_scanline, 1208 cinfo.image_height); 1209 return -1; 1210 } 1211 } 1212 1213 /* This will flush everything */ 1214 jpeg_finish_compress(&cinfo); 1215 1216 /* Grab the actual code size and set it */ 1217 actualCodeSize = dmgr.mEncodedSize; 1218 1219 return 0; 1220} 1221 1222/* 1223 * TODO: There needs to be a mechanism to discover allocated buffer size 1224 * in the HAL. 1225 * 1226 * This is very fragile because it is duplicated computation from: 1227 * frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp 1228 * 1229 */ 1230 1231/* This assumes mSupportedFormats have all been declared as supporting 1232 * HAL_PIXEL_FORMAT_BLOB to the framework */ 1233Size ExternalCameraDeviceSession::getMaxJpegResolution() const { 1234 Size ret { 0, 0 }; 1235 for(auto & fmt : mSupportedFormats) { 1236 if(fmt.width * fmt.height > ret.width * ret.height) { 1237 ret = Size { fmt.width, fmt.height }; 1238 } 1239 } 1240 return ret; 1241} 1242 1243Size ExternalCameraDeviceSession::getMaxThumbResolution() const { 1244 Size thumbSize { 0, 0 }; 1245 camera_metadata_ro_entry entry = 1246 mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); 1247 for(uint32_t i = 0; i < entry.count; i += 2) { 1248 Size sz { static_cast<uint32_t>(entry.data.i32[i]), 1249 static_cast<uint32_t>(entry.data.i32[i+1]) }; 1250 if(sz.width * sz.height > thumbSize.width * thumbSize.height) { 1251 thumbSize = sz; 1252 } 1253 } 1254 1255 if (thumbSize.width * thumbSize.height == 0) { 1256 ALOGW("%s: non-zero thumbnail size not available", __FUNCTION__); 1257 } 1258 1259 return thumbSize; 1260} 1261 1262 1263ssize_t ExternalCameraDeviceSession::getJpegBufferSize( 1264 uint32_t width, uint32_t height) const { 1265 // Constant from camera3.h 1266 const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(CameraBlob); 1267 // Get max jpeg size (area-wise). 1268 if (mMaxJpegResolution.width == 0) { 1269 ALOGE("%s: Do not have a single supported JPEG stream", 1270 __FUNCTION__); 1271 return BAD_VALUE; 1272 } 1273 1274 // Get max jpeg buffer size 1275 ssize_t maxJpegBufferSize = 0; 1276 camera_metadata_ro_entry jpegBufMaxSize = 1277 mCameraCharacteristics.find(ANDROID_JPEG_MAX_SIZE); 1278 if (jpegBufMaxSize.count == 0) { 1279 ALOGE("%s: Can't find maximum JPEG size in static metadata!", 1280 __FUNCTION__); 1281 return BAD_VALUE; 1282 } 1283 maxJpegBufferSize = jpegBufMaxSize.data.i32[0]; 1284 1285 if (maxJpegBufferSize <= kMinJpegBufferSize) { 1286 ALOGE("%s: ANDROID_JPEG_MAX_SIZE (%zd) <= kMinJpegBufferSize (%zd)", 1287 __FUNCTION__, maxJpegBufferSize, kMinJpegBufferSize); 1288 return BAD_VALUE; 1289 } 1290 1291 // Calculate final jpeg buffer size for the given resolution. 1292 float scaleFactor = ((float) (width * height)) / 1293 (mMaxJpegResolution.width * mMaxJpegResolution.height); 1294 ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) + 1295 kMinJpegBufferSize; 1296 if (jpegBufferSize > maxJpegBufferSize) { 1297 jpegBufferSize = maxJpegBufferSize; 1298 } 1299 1300 return jpegBufferSize; 1301} 1302 1303int ExternalCameraDeviceSession::OutputThread::createJpegLocked( 1304 HalStreamBuffer &halBuf, 1305 HalRequest &req) 1306{ 1307 int ret; 1308 auto lfail = [&](auto... args) { 1309 ALOGE(args...); 1310 1311 return 1; 1312 }; 1313 auto parent = mParent.promote(); 1314 if (parent == nullptr) { 1315 ALOGE("%s: session has been disconnected!", __FUNCTION__); 1316 return 1; 1317 } 1318 1319 ALOGV("%s: HAL buffer sid: %d bid: %" PRIu64 " w: %u h: %u", 1320 __FUNCTION__, halBuf.streamId, static_cast<uint64_t>(halBuf.bufferId), 1321 halBuf.width, halBuf.height); 1322 ALOGV("%s: HAL buffer fmt: %x usage: %" PRIx64 " ptr: %p", 1323 __FUNCTION__, halBuf.format, static_cast<uint64_t>(halBuf.usage), 1324 halBuf.bufPtr); 1325 ALOGV("%s: YV12 buffer %d x %d", 1326 __FUNCTION__, 1327 mYu12Frame->mWidth, mYu12Frame->mHeight); 1328 1329 int jpegQuality, thumbQuality; 1330 Size thumbSize; 1331 1332 if (req.setting.exists(ANDROID_JPEG_QUALITY)) { 1333 camera_metadata_entry entry = 1334 req.setting.find(ANDROID_JPEG_QUALITY); 1335 jpegQuality = entry.data.u8[0]; 1336 } else { 1337 return lfail("%s: ANDROID_JPEG_QUALITY not set",__FUNCTION__); 1338 } 1339 1340 if (req.setting.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) { 1341 camera_metadata_entry entry = 1342 req.setting.find(ANDROID_JPEG_THUMBNAIL_QUALITY); 1343 thumbQuality = entry.data.u8[0]; 1344 } else { 1345 return lfail( 1346 "%s: ANDROID_JPEG_THUMBNAIL_QUALITY not set", 1347 __FUNCTION__); 1348 } 1349 1350 if (req.setting.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 1351 camera_metadata_entry entry = 1352 req.setting.find(ANDROID_JPEG_THUMBNAIL_SIZE); 1353 thumbSize = Size { static_cast<uint32_t>(entry.data.i32[0]), 1354 static_cast<uint32_t>(entry.data.i32[1]) 1355 }; 1356 } else { 1357 return lfail( 1358 "%s: ANDROID_JPEG_THUMBNAIL_SIZE not set", __FUNCTION__); 1359 } 1360 1361 /* Cropped and scaled YU12 buffer for main and thumbnail */ 1362 YCbCrLayout yu12Main; 1363 Size jpegSize { halBuf.width, halBuf.height }; 1364 1365 /* Compute temporary buffer sizes accounting for the following: 1366 * thumbnail can't exceed APP1 size of 64K 1367 * main image needs to hold APP1, headers, and at most a poorly 1368 * compressed image */ 1369 const ssize_t maxThumbCodeSize = 64 * 1024; 1370 const ssize_t maxJpegCodeSize = parent->getJpegBufferSize(jpegSize.width, 1371 jpegSize.height); 1372 1373 /* Check that getJpegBufferSize did not return an error */ 1374 if (maxJpegCodeSize < 0) { 1375 return lfail( 1376 "%s: getJpegBufferSize returned %zd",__FUNCTION__,maxJpegCodeSize); 1377 } 1378 1379 1380 /* Hold actual thumbnail and main image code sizes */ 1381 size_t thumbCodeSize = 0, jpegCodeSize = 0; 1382 /* Temporary thumbnail code buffer */ 1383 std::vector<uint8_t> thumbCode(maxThumbCodeSize); 1384 1385 YCbCrLayout yu12Thumb; 1386 ret = cropAndScaleThumbLocked(mYu12Frame, thumbSize, &yu12Thumb); 1387 1388 if (ret != 0) { 1389 return lfail( 1390 "%s: crop and scale thumbnail failed!", __FUNCTION__); 1391 } 1392 1393 /* Scale and crop main jpeg */ 1394 ret = cropAndScaleLocked(mYu12Frame, jpegSize, &yu12Main); 1395 1396 if (ret != 0) { 1397 return lfail("%s: crop and scale main failed!", __FUNCTION__); 1398 } 1399 1400 /* Encode the thumbnail image */ 1401 ret = encodeJpegYU12(thumbSize, yu12Thumb, 1402 thumbQuality, 0, 0, 1403 &thumbCode[0], maxThumbCodeSize, thumbCodeSize); 1404 1405 if (ret != 0) { 1406 return lfail("%s: encodeJpegYU12 failed with %d",__FUNCTION__, ret); 1407 } 1408 1409 /* Combine camera characteristics with request settings to form EXIF 1410 * metadata */ 1411 common::V1_0::helper::CameraMetadata meta(parent->mCameraCharacteristics); 1412 meta.append(req.setting); 1413 1414 /* Generate EXIF object */ 1415 std::unique_ptr<ExifUtils> utils(ExifUtils::create()); 1416 /* Make sure it's initialized */ 1417 utils->initialize(); 1418 1419 utils->setFromMetadata(meta, jpegSize.width, jpegSize.height); 1420 1421 /* Check if we made a non-zero-sized thumbnail. Currently not possible 1422 * that we got this far and the code is size 0, but if this code moves 1423 * around it might become relevant again */ 1424 1425 ret = utils->generateApp1(thumbCodeSize ? &thumbCode[0] : 0, thumbCodeSize); 1426 1427 if (!ret) { 1428 return lfail("%s: generating APP1 failed", __FUNCTION__); 1429 } 1430 1431 /* Get internal buffer */ 1432 size_t exifDataSize = utils->getApp1Length(); 1433 const uint8_t* exifData = utils->getApp1Buffer(); 1434 1435 /* Lock the HAL jpeg code buffer */ 1436 void *bufPtr = sHandleImporter.lock( 1437 *(halBuf.bufPtr), halBuf.usage, maxJpegCodeSize); 1438 1439 if (!bufPtr) { 1440 return lfail("%s: could not lock %zu bytes", __FUNCTION__, maxJpegCodeSize); 1441 } 1442 1443 /* Encode the main jpeg image */ 1444 ret = encodeJpegYU12(jpegSize, yu12Main, 1445 jpegQuality, exifData, exifDataSize, 1446 bufPtr, maxJpegCodeSize, jpegCodeSize); 1447 1448 /* TODO: Not sure this belongs here, maybe better to pass jpegCodeSize out 1449 * and do this when returning buffer to parent */ 1450 CameraBlob blob { CameraBlobId::JPEG, static_cast<uint32_t>(jpegCodeSize) }; 1451 void *blobDst = 1452 reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(bufPtr) + 1453 maxJpegCodeSize - 1454 sizeof(CameraBlob)); 1455 memcpy(blobDst, &blob, sizeof(CameraBlob)); 1456 1457 /* Unlock the HAL jpeg code buffer */ 1458 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); 1459 if (relFence > 0) { 1460 halBuf.acquireFence = relFence; 1461 } 1462 1463 /* Check if our JPEG actually succeeded */ 1464 if (ret != 0) { 1465 return lfail( 1466 "%s: encodeJpegYU12 failed with %d",__FUNCTION__, ret); 1467 } 1468 1469 ALOGV("%s: encoded JPEG (ret:%d) with Q:%d max size: %zu", 1470 __FUNCTION__, ret, jpegQuality, maxJpegCodeSize); 1471 1472 return 0; 1473} 1474 1475bool ExternalCameraDeviceSession::OutputThread::threadLoop() { 1476 HalRequest req; 1477 auto parent = mParent.promote(); 1478 if (parent == nullptr) { 1479 ALOGE("%s: session has been disconnected!", __FUNCTION__); 1480 return false; 1481 } 1482 1483 // TODO: maybe we need to setup a sensor thread to dq/enq v4l frames 1484 // regularly to prevent v4l buffer queue filled with stale buffers 1485 // when app doesn't program a preveiw request 1486 waitForNextRequest(&req); 1487 if (req.frameIn == nullptr) { 1488 // No new request, wait again 1489 return true; 1490 } 1491 1492 auto onDeviceError = [&](auto... args) { 1493 ALOGE(args...); 1494 parent->notifyError( 1495 req.frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); 1496 signalRequestDone(); 1497 return false; 1498 }; 1499 1500 if (req.frameIn->mFourcc != V4L2_PIX_FMT_MJPEG) { 1501 return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__, 1502 req.frameIn->mFourcc & 0xFF, 1503 (req.frameIn->mFourcc >> 8) & 0xFF, 1504 (req.frameIn->mFourcc >> 16) & 0xFF, 1505 (req.frameIn->mFourcc >> 24) & 0xFF); 1506 } 1507 1508 std::unique_lock<std::mutex> lk(mBufferLock); 1509 // Convert input V4L2 frame to YU12 of the same size 1510 // TODO: see if we can save some computation by converting to YV12 here 1511 uint8_t* inData; 1512 size_t inDataSize; 1513 req.frameIn->map(&inData, &inDataSize); 1514 // TODO: profile 1515 // TODO: in some special case maybe we can decode jpg directly to gralloc output? 1516 int res = libyuv::MJPGToI420( 1517 inData, inDataSize, 1518 static_cast<uint8_t*>(mYu12FrameLayout.y), 1519 mYu12FrameLayout.yStride, 1520 static_cast<uint8_t*>(mYu12FrameLayout.cb), 1521 mYu12FrameLayout.cStride, 1522 static_cast<uint8_t*>(mYu12FrameLayout.cr), 1523 mYu12FrameLayout.cStride, 1524 mYu12Frame->mWidth, mYu12Frame->mHeight, 1525 mYu12Frame->mWidth, mYu12Frame->mHeight); 1526 1527 if (res != 0) { 1528 // For some webcam, the first few V4L2 frames might be malformed... 1529 ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res); 1530 lk.unlock(); 1531 Status st = parent->processCaptureRequestError(req); 1532 if (st != Status::OK) { 1533 return onDeviceError("%s: failed to process capture request error!", __FUNCTION__); 1534 } 1535 signalRequestDone(); 1536 return true; 1537 } 1538 1539 ALOGV("%s processing new request", __FUNCTION__); 1540 const int kSyncWaitTimeoutMs = 500; 1541 for (auto& halBuf : req.buffers) { 1542 if (halBuf.acquireFence != -1) { 1543 int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs); 1544 if (ret) { 1545 halBuf.fenceTimeout = true; 1546 } else { 1547 ::close(halBuf.acquireFence); 1548 halBuf.acquireFence = -1; 1549 } 1550 } 1551 1552 if (halBuf.fenceTimeout) { 1553 continue; 1554 } 1555 1556 // Gralloc lockYCbCr the buffer 1557 switch (halBuf.format) { 1558 case PixelFormat::BLOB: { 1559 int ret = createJpegLocked(halBuf, req); 1560 1561 if(ret != 0) { 1562 lk.unlock(); 1563 return onDeviceError("%s: createJpegLocked failed with %d", 1564 __FUNCTION__, ret); 1565 } 1566 } break; 1567 case PixelFormat::YCBCR_420_888: 1568 case PixelFormat::YV12: { 1569 IMapper::Rect outRect {0, 0, 1570 static_cast<int32_t>(halBuf.width), 1571 static_cast<int32_t>(halBuf.height)}; 1572 YCbCrLayout outLayout = sHandleImporter.lockYCbCr( 1573 *(halBuf.bufPtr), halBuf.usage, outRect); 1574 ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d", 1575 __FUNCTION__, outLayout.y, outLayout.cb, outLayout.cr, 1576 outLayout.yStride, outLayout.cStride, outLayout.chromaStep); 1577 1578 // Convert to output buffer size/format 1579 uint32_t outputFourcc = getFourCcFromLayout(outLayout); 1580 ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, 1581 outputFourcc & 0xFF, 1582 (outputFourcc >> 8) & 0xFF, 1583 (outputFourcc >> 16) & 0xFF, 1584 (outputFourcc >> 24) & 0xFF); 1585 1586 YCbCrLayout cropAndScaled; 1587 int ret = cropAndScaleLocked( 1588 mYu12Frame, 1589 Size { halBuf.width, halBuf.height }, 1590 &cropAndScaled); 1591 if (ret != 0) { 1592 lk.unlock(); 1593 return onDeviceError("%s: crop and scale failed!", __FUNCTION__); 1594 } 1595 1596 Size sz {halBuf.width, halBuf.height}; 1597 ret = formatConvertLocked(cropAndScaled, outLayout, sz, outputFourcc); 1598 if (ret != 0) { 1599 lk.unlock(); 1600 return onDeviceError("%s: format coversion failed!", __FUNCTION__); 1601 } 1602 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); 1603 if (relFence > 0) { 1604 halBuf.acquireFence = relFence; 1605 } 1606 } break; 1607 default: 1608 lk.unlock(); 1609 return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format); 1610 } 1611 } // for each buffer 1612 mScaledYu12Frames.clear(); 1613 1614 // Don't hold the lock while calling back to parent 1615 lk.unlock(); 1616 Status st = parent->processCaptureResult(req); 1617 if (st != Status::OK) { 1618 return onDeviceError("%s: failed to process capture result!", __FUNCTION__); 1619 } 1620 signalRequestDone(); 1621 return true; 1622} 1623 1624Status ExternalCameraDeviceSession::OutputThread::allocateIntermediateBuffers( 1625 const Size& v4lSize, const Size& thumbSize, 1626 const hidl_vec<Stream>& streams) { 1627 std::lock_guard<std::mutex> lk(mBufferLock); 1628 if (mScaledYu12Frames.size() != 0) { 1629 ALOGE("%s: intermediate buffer pool has %zu inflight buffers! (expect 0)", 1630 __FUNCTION__, mScaledYu12Frames.size()); 1631 return Status::INTERNAL_ERROR; 1632 } 1633 1634 // Allocating intermediate YU12 frame 1635 if (mYu12Frame == nullptr || mYu12Frame->mWidth != v4lSize.width || 1636 mYu12Frame->mHeight != v4lSize.height) { 1637 mYu12Frame.clear(); 1638 mYu12Frame = new AllocatedFrame(v4lSize.width, v4lSize.height); 1639 int ret = mYu12Frame->allocate(&mYu12FrameLayout); 1640 if (ret != 0) { 1641 ALOGE("%s: allocating YU12 frame failed!", __FUNCTION__); 1642 return Status::INTERNAL_ERROR; 1643 } 1644 } 1645 1646 // Allocating intermediate YU12 thumbnail frame 1647 if (mYu12ThumbFrame == nullptr || 1648 mYu12ThumbFrame->mWidth != thumbSize.width || 1649 mYu12ThumbFrame->mHeight != thumbSize.height) { 1650 mYu12ThumbFrame.clear(); 1651 mYu12ThumbFrame = new AllocatedFrame(thumbSize.width, thumbSize.height); 1652 int ret = mYu12ThumbFrame->allocate(&mYu12ThumbFrameLayout); 1653 if (ret != 0) { 1654 ALOGE("%s: allocating YU12 thumb frame failed!", __FUNCTION__); 1655 return Status::INTERNAL_ERROR; 1656 } 1657 } 1658 1659 // Allocating scaled buffers 1660 for (const auto& stream : streams) { 1661 Size sz = {stream.width, stream.height}; 1662 if (sz == v4lSize) { 1663 continue; // Don't need an intermediate buffer same size as v4lBuffer 1664 } 1665 if (mIntermediateBuffers.count(sz) == 0) { 1666 // Create new intermediate buffer 1667 sp<AllocatedFrame> buf = new AllocatedFrame(stream.width, stream.height); 1668 int ret = buf->allocate(); 1669 if (ret != 0) { 1670 ALOGE("%s: allocating intermediate YU12 frame %dx%d failed!", 1671 __FUNCTION__, stream.width, stream.height); 1672 return Status::INTERNAL_ERROR; 1673 } 1674 mIntermediateBuffers[sz] = buf; 1675 } 1676 } 1677 1678 // Remove unconfigured buffers 1679 auto it = mIntermediateBuffers.begin(); 1680 while (it != mIntermediateBuffers.end()) { 1681 bool configured = false; 1682 auto sz = it->first; 1683 for (const auto& stream : streams) { 1684 if (stream.width == sz.width && stream.height == sz.height) { 1685 configured = true; 1686 break; 1687 } 1688 } 1689 if (configured) { 1690 it++; 1691 } else { 1692 it = mIntermediateBuffers.erase(it); 1693 } 1694 } 1695 return Status::OK; 1696} 1697 1698Status ExternalCameraDeviceSession::OutputThread::submitRequest(const HalRequest& req) { 1699 std::unique_lock<std::mutex> lk(mRequestListLock); 1700 // TODO: reduce object copy in this path 1701 mRequestList.push_back(req); 1702 lk.unlock(); 1703 mRequestCond.notify_one(); 1704 return Status::OK; 1705} 1706 1707void ExternalCameraDeviceSession::OutputThread::flush() { 1708 auto parent = mParent.promote(); 1709 if (parent == nullptr) { 1710 ALOGE("%s: session has been disconnected!", __FUNCTION__); 1711 return; 1712 } 1713 1714 std::unique_lock<std::mutex> lk(mRequestListLock); 1715 std::list<HalRequest> reqs = mRequestList; 1716 mRequestList.clear(); 1717 if (mProcessingRequest) { 1718 std::chrono::seconds timeout = std::chrono::seconds(kReqWaitTimeoutSec); 1719 auto st = mRequestDoneCond.wait_for(lk, timeout); 1720 if (st == std::cv_status::timeout) { 1721 ALOGE("%s: wait for inflight request finish timeout!", __FUNCTION__); 1722 } 1723 } 1724 1725 lk.unlock(); 1726 for (const auto& req : reqs) { 1727 parent->processCaptureRequestError(req); 1728 } 1729} 1730 1731void ExternalCameraDeviceSession::OutputThread::waitForNextRequest(HalRequest* out) { 1732 if (out == nullptr) { 1733 ALOGE("%s: out is null", __FUNCTION__); 1734 return; 1735 } 1736 1737 std::unique_lock<std::mutex> lk(mRequestListLock); 1738 while (mRequestList.empty()) { 1739 std::chrono::seconds timeout = std::chrono::seconds(kReqWaitTimeoutSec); 1740 auto st = mRequestCond.wait_for(lk, timeout); 1741 if (st == std::cv_status::timeout) { 1742 // no new request, return 1743 return; 1744 } 1745 } 1746 *out = mRequestList.front(); 1747 mRequestList.pop_front(); 1748 mProcessingRequest = true; 1749} 1750 1751void ExternalCameraDeviceSession::OutputThread::signalRequestDone() { 1752 std::unique_lock<std::mutex> lk(mRequestListLock); 1753 mProcessingRequest = false; 1754 lk.unlock(); 1755 mRequestDoneCond.notify_one(); 1756} 1757 1758void ExternalCameraDeviceSession::cleanupBuffersLocked(int id) { 1759 for (auto& pair : mCirculatingBuffers.at(id)) { 1760 sHandleImporter.freeBuffer(pair.second); 1761 } 1762 mCirculatingBuffers[id].clear(); 1763 mCirculatingBuffers.erase(id); 1764} 1765 1766void ExternalCameraDeviceSession::updateBufferCaches(const hidl_vec<BufferCache>& cachesToRemove) { 1767 Mutex::Autolock _l(mLock); 1768 for (auto& cache : cachesToRemove) { 1769 auto cbsIt = mCirculatingBuffers.find(cache.streamId); 1770 if (cbsIt == mCirculatingBuffers.end()) { 1771 // The stream could have been removed 1772 continue; 1773 } 1774 CirculatingBuffers& cbs = cbsIt->second; 1775 auto it = cbs.find(cache.bufferId); 1776 if (it != cbs.end()) { 1777 sHandleImporter.freeBuffer(it->second); 1778 cbs.erase(it); 1779 } else { 1780 ALOGE("%s: stream %d buffer %" PRIu64 " is not cached", 1781 __FUNCTION__, cache.streamId, cache.bufferId); 1782 } 1783 } 1784} 1785 1786bool ExternalCameraDeviceSession::isSupported(const Stream& stream) { 1787 int32_t ds = static_cast<int32_t>(stream.dataSpace); 1788 PixelFormat fmt = stream.format; 1789 uint32_t width = stream.width; 1790 uint32_t height = stream.height; 1791 // TODO: check usage flags 1792 1793 if (stream.streamType != StreamType::OUTPUT) { 1794 ALOGE("%s: does not support non-output stream type", __FUNCTION__); 1795 return false; 1796 } 1797 1798 if (stream.rotation != StreamRotation::ROTATION_0) { 1799 ALOGE("%s: does not support stream rotation", __FUNCTION__); 1800 return false; 1801 } 1802 1803 if (ds & Dataspace::DEPTH) { 1804 ALOGI("%s: does not support depth output", __FUNCTION__); 1805 return false; 1806 } 1807 1808 switch (fmt) { 1809 case PixelFormat::BLOB: 1810 if (ds != static_cast<int32_t>(Dataspace::V0_JFIF)) { 1811 ALOGI("%s: BLOB format does not support dataSpace %x", __FUNCTION__, ds); 1812 return false; 1813 } 1814 case PixelFormat::IMPLEMENTATION_DEFINED: 1815 case PixelFormat::YCBCR_420_888: 1816 case PixelFormat::YV12: 1817 // TODO: check what dataspace we can support here. 1818 // intentional no-ops. 1819 break; 1820 default: 1821 ALOGI("%s: does not support format %x", __FUNCTION__, fmt); 1822 return false; 1823 } 1824 1825 // Assume we can convert any V4L2 format to any of supported output format for now, i.e, 1826 // ignoring v4l2Fmt.fourcc for now. Might need more subtle check if we support more v4l format 1827 // in the futrue. 1828 for (const auto& v4l2Fmt : mSupportedFormats) { 1829 if (width == v4l2Fmt.width && height == v4l2Fmt.height) { 1830 return true; 1831 } 1832 } 1833 ALOGI("%s: resolution %dx%d is not supported", __FUNCTION__, width, height); 1834 return false; 1835} 1836 1837int ExternalCameraDeviceSession::v4l2StreamOffLocked() { 1838 if (!mV4l2Streaming) { 1839 return OK; 1840 } 1841 1842 { 1843 std::lock_guard<std::mutex> lk(mV4l2BufferLock); 1844 if (mNumDequeuedV4l2Buffers != 0) { 1845 ALOGE("%s: there are %zu inflight V4L buffers", 1846 __FUNCTION__, mNumDequeuedV4l2Buffers); 1847 return -1; 1848 } 1849 } 1850 mV4L2BufferCount = 0; 1851 1852 // VIDIOC_STREAMOFF 1853 v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1854 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMOFF, &capture_type)) < 0) { 1855 ALOGE("%s: STREAMOFF failed: %s", __FUNCTION__, strerror(errno)); 1856 return -errno; 1857 } 1858 1859 // VIDIOC_REQBUFS: clear buffers 1860 v4l2_requestbuffers req_buffers{}; 1861 req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1862 req_buffers.memory = V4L2_MEMORY_MMAP; 1863 req_buffers.count = 0; 1864 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) { 1865 ALOGE("%s: REQBUFS failed: %s", __FUNCTION__, strerror(errno)); 1866 return -errno; 1867 } 1868 1869 mV4l2Streaming = false; 1870 return OK; 1871} 1872 1873int ExternalCameraDeviceSession::configureV4l2StreamLocked(const SupportedV4L2Format& v4l2Fmt) { 1874 int ret = v4l2StreamOffLocked(); 1875 if (ret != OK) { 1876 ALOGE("%s: stop v4l2 streaming failed: ret %d", __FUNCTION__, ret); 1877 return ret; 1878 } 1879 1880 // VIDIOC_S_FMT w/h/fmt 1881 v4l2_format fmt; 1882 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1883 fmt.fmt.pix.width = v4l2Fmt.width; 1884 fmt.fmt.pix.height = v4l2Fmt.height; 1885 fmt.fmt.pix.pixelformat = v4l2Fmt.fourcc; 1886 ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_FMT, &fmt)); 1887 if (ret < 0) { 1888 ALOGE("%s: S_FMT ioctl failed: %s", __FUNCTION__, strerror(errno)); 1889 return -errno; 1890 } 1891 1892 if (v4l2Fmt.width != fmt.fmt.pix.width || v4l2Fmt.height != fmt.fmt.pix.height || 1893 v4l2Fmt.fourcc != fmt.fmt.pix.pixelformat) { 1894 ALOGE("%s: S_FMT expect %c%c%c%c %dx%d, got %c%c%c%c %dx%d instead!", __FUNCTION__, 1895 v4l2Fmt.fourcc & 0xFF, 1896 (v4l2Fmt.fourcc >> 8) & 0xFF, 1897 (v4l2Fmt.fourcc >> 16) & 0xFF, 1898 (v4l2Fmt.fourcc >> 24) & 0xFF, 1899 v4l2Fmt.width, v4l2Fmt.height, 1900 fmt.fmt.pix.pixelformat & 0xFF, 1901 (fmt.fmt.pix.pixelformat >> 8) & 0xFF, 1902 (fmt.fmt.pix.pixelformat >> 16) & 0xFF, 1903 (fmt.fmt.pix.pixelformat >> 24) & 0xFF, 1904 fmt.fmt.pix.width, fmt.fmt.pix.height); 1905 return -EINVAL; 1906 } 1907 uint32_t bufferSize = fmt.fmt.pix.sizeimage; 1908 ALOGI("%s: V4L2 buffer size is %d", __FUNCTION__, bufferSize); 1909 1910 float maxFps = -1.f; 1911 float fps = 1000.f; 1912 const float kDefaultFps = 30.f; 1913 // Try to pick the slowest fps that is at least 30 1914 for (const auto& fr : v4l2Fmt.frameRates) { 1915 double f = fr.getDouble(); 1916 if (maxFps < f) { 1917 maxFps = f; 1918 } 1919 if (f >= kDefaultFps && f < fps) { 1920 fps = f; 1921 } 1922 } 1923 if (fps == 1000.f) { 1924 fps = maxFps; 1925 } 1926 1927 // VIDIOC_G_PARM/VIDIOC_S_PARM: set fps 1928 v4l2_streamparm streamparm = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE }; 1929 // The following line checks that the driver knows about framerate get/set. 1930 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { 1931 // Now check if the device is able to accept a capture framerate set. 1932 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { 1933 // |frame_rate| is float, approximate by a fraction. 1934 const int kFrameRatePrecision = 10000; 1935 streamparm.parm.capture.timeperframe.numerator = kFrameRatePrecision; 1936 streamparm.parm.capture.timeperframe.denominator = 1937 (fps * kFrameRatePrecision); 1938 1939 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_PARM, &streamparm)) < 0) { 1940 ALOGE("%s: failed to set framerate to %f", __FUNCTION__, fps); 1941 return UNKNOWN_ERROR; 1942 } 1943 } 1944 } 1945 float retFps = streamparm.parm.capture.timeperframe.denominator / 1946 streamparm.parm.capture.timeperframe.numerator; 1947 if (std::fabs(fps - retFps) > std::numeric_limits<float>::epsilon()) { 1948 ALOGE("%s: expect fps %f, got %f instead", __FUNCTION__, fps, retFps); 1949 return BAD_VALUE; 1950 } 1951 1952 uint32_t v4lBufferCount = (fps >= kDefaultFps) ? 1953 mCfg.numVideoBuffers : mCfg.numStillBuffers; 1954 // VIDIOC_REQBUFS: create buffers 1955 v4l2_requestbuffers req_buffers{}; 1956 req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1957 req_buffers.memory = V4L2_MEMORY_MMAP; 1958 req_buffers.count = v4lBufferCount; 1959 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) { 1960 ALOGE("%s: VIDIOC_REQBUFS failed: %s", __FUNCTION__, strerror(errno)); 1961 return -errno; 1962 } 1963 1964 // Driver can indeed return more buffer if it needs more to operate 1965 if (req_buffers.count < v4lBufferCount) { 1966 ALOGE("%s: VIDIOC_REQBUFS expected %d buffers, got %d instead", 1967 __FUNCTION__, v4lBufferCount, req_buffers.count); 1968 return NO_MEMORY; 1969 } 1970 1971 // VIDIOC_QUERYBUF: get buffer offset in the V4L2 fd 1972 // VIDIOC_QBUF: send buffer to driver 1973 mV4L2BufferCount = req_buffers.count; 1974 for (uint32_t i = 0; i < req_buffers.count; i++) { 1975 v4l2_buffer buffer = { 1976 .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, 1977 .index = i, 1978 .memory = V4L2_MEMORY_MMAP}; 1979 1980 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { 1981 ALOGE("%s: QUERYBUF %d failed: %s", __FUNCTION__, i, strerror(errno)); 1982 return -errno; 1983 } 1984 1985 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { 1986 ALOGE("%s: QBUF %d failed: %s", __FUNCTION__, i, strerror(errno)); 1987 return -errno; 1988 } 1989 } 1990 1991 // VIDIOC_STREAMON: start streaming 1992 v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1993 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMON, &capture_type)) < 0) { 1994 ALOGE("%s: VIDIOC_STREAMON failed: %s", __FUNCTION__, strerror(errno)); 1995 return -errno; 1996 } 1997 1998 // Swallow first few frames after streamOn to account for bad frames from some devices 1999 for (int i = 0; i < kBadFramesAfterStreamOn; i++) { 2000 v4l2_buffer buffer{}; 2001 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 2002 buffer.memory = V4L2_MEMORY_MMAP; 2003 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) { 2004 ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno)); 2005 return -errno; 2006 } 2007 2008 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { 2009 ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, buffer.index, strerror(errno)); 2010 return -errno; 2011 } 2012 } 2013 2014 mV4l2StreamingFmt = v4l2Fmt; 2015 mV4l2Streaming = true; 2016 return OK; 2017} 2018 2019sp<V4L2Frame> ExternalCameraDeviceSession::dequeueV4l2FrameLocked() { 2020 sp<V4L2Frame> ret = nullptr; 2021 2022 { 2023 std::unique_lock<std::mutex> lk(mV4l2BufferLock); 2024 if (mNumDequeuedV4l2Buffers == mV4L2BufferCount) { 2025 std::chrono::seconds timeout = std::chrono::seconds(kBufferWaitTimeoutSec); 2026 mLock.unlock(); 2027 auto st = mV4L2BufferReturned.wait_for(lk, timeout); 2028 mLock.lock(); 2029 if (st == std::cv_status::timeout) { 2030 ALOGE("%s: wait for V4L2 buffer return timeout!", __FUNCTION__); 2031 return ret; 2032 } 2033 } 2034 } 2035 2036 v4l2_buffer buffer{}; 2037 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 2038 buffer.memory = V4L2_MEMORY_MMAP; 2039 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) { 2040 ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno)); 2041 return ret; 2042 } 2043 2044 if (buffer.index >= mV4L2BufferCount) { 2045 ALOGE("%s: Invalid buffer id: %d", __FUNCTION__, buffer.index); 2046 return ret; 2047 } 2048 2049 if (buffer.flags & V4L2_BUF_FLAG_ERROR) { 2050 ALOGE("%s: v4l2 buf error! buf flag 0x%x", __FUNCTION__, buffer.flags); 2051 // TODO: try to dequeue again 2052 } 2053 2054 { 2055 std::lock_guard<std::mutex> lk(mV4l2BufferLock); 2056 mNumDequeuedV4l2Buffers++; 2057 } 2058 return new V4L2Frame( 2059 mV4l2StreamingFmt.width, mV4l2StreamingFmt.height, mV4l2StreamingFmt.fourcc, 2060 buffer.index, mV4l2Fd.get(), buffer.bytesused, buffer.m.offset); 2061} 2062 2063void ExternalCameraDeviceSession::enqueueV4l2Frame(const sp<V4L2Frame>& frame) { 2064 Mutex::Autolock _l(mLock); 2065 frame->unmap(); 2066 v4l2_buffer buffer{}; 2067 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 2068 buffer.memory = V4L2_MEMORY_MMAP; 2069 buffer.index = frame->mBufferIndex; 2070 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { 2071 ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, frame->mBufferIndex, strerror(errno)); 2072 return; 2073 } 2074 2075 { 2076 std::lock_guard<std::mutex> lk(mV4l2BufferLock); 2077 mNumDequeuedV4l2Buffers--; 2078 } 2079 mV4L2BufferReturned.notify_one(); 2080} 2081 2082Status ExternalCameraDeviceSession::configureStreams( 2083 const V3_2::StreamConfiguration& config, V3_3::HalStreamConfiguration* out) { 2084 if (config.operationMode != StreamConfigurationMode::NORMAL_MODE) { 2085 ALOGE("%s: unsupported operation mode: %d", __FUNCTION__, config.operationMode); 2086 return Status::ILLEGAL_ARGUMENT; 2087 } 2088 2089 if (config.streams.size() == 0) { 2090 ALOGE("%s: cannot configure zero stream", __FUNCTION__); 2091 return Status::ILLEGAL_ARGUMENT; 2092 } 2093 2094 int numProcessedStream = 0; 2095 int numStallStream = 0; 2096 for (const auto& stream : config.streams) { 2097 // Check if the format/width/height combo is supported 2098 if (!isSupported(stream)) { 2099 return Status::ILLEGAL_ARGUMENT; 2100 } 2101 if (stream.format == PixelFormat::BLOB) { 2102 numStallStream++; 2103 } else { 2104 numProcessedStream++; 2105 } 2106 } 2107 2108 if (numProcessedStream > kMaxProcessedStream) { 2109 ALOGE("%s: too many processed streams (expect <= %d, got %d)", __FUNCTION__, 2110 kMaxProcessedStream, numProcessedStream); 2111 return Status::ILLEGAL_ARGUMENT; 2112 } 2113 2114 if (numStallStream > kMaxStallStream) { 2115 ALOGE("%s: too many stall streams (expect <= %d, got %d)", __FUNCTION__, 2116 kMaxStallStream, numStallStream); 2117 return Status::ILLEGAL_ARGUMENT; 2118 } 2119 2120 Status status = initStatus(); 2121 if (status != Status::OK) { 2122 return status; 2123 } 2124 2125 Mutex::Autolock _l(mLock); 2126 if (!mInflightFrames.empty()) { 2127 ALOGE("%s: trying to configureStreams while there are still %zu inflight frames!", 2128 __FUNCTION__, mInflightFrames.size()); 2129 return Status::INTERNAL_ERROR; 2130 } 2131 2132 // Add new streams 2133 for (const auto& stream : config.streams) { 2134 if (mStreamMap.count(stream.id) == 0) { 2135 mStreamMap[stream.id] = stream; 2136 mCirculatingBuffers.emplace(stream.id, CirculatingBuffers{}); 2137 } 2138 } 2139 2140 // Cleanup removed streams 2141 for(auto it = mStreamMap.begin(); it != mStreamMap.end();) { 2142 int id = it->first; 2143 bool found = false; 2144 for (const auto& stream : config.streams) { 2145 if (id == stream.id) { 2146 found = true; 2147 break; 2148 } 2149 } 2150 if (!found) { 2151 // Unmap all buffers of deleted stream 2152 cleanupBuffersLocked(id); 2153 it = mStreamMap.erase(it); 2154 } else { 2155 ++it; 2156 } 2157 } 2158 2159 // Now select a V4L2 format to produce all output streams 2160 float desiredAr = (mCroppingType == VERTICAL) ? kMaxAspectRatio : kMinAspectRatio; 2161 uint32_t maxDim = 0; 2162 for (const auto& stream : config.streams) { 2163 float aspectRatio = ASPECT_RATIO(stream); 2164 if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) || 2165 (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) { 2166 desiredAr = aspectRatio; 2167 } 2168 2169 // The dimension that's not cropped 2170 uint32_t dim = (mCroppingType == VERTICAL) ? stream.width : stream.height; 2171 if (dim > maxDim) { 2172 maxDim = dim; 2173 } 2174 } 2175 // Find the smallest format that matches the desired aspect ratio and is wide/high enough 2176 SupportedV4L2Format v4l2Fmt {.width = 0, .height = 0}; 2177 for (const auto& fmt : mSupportedFormats) { 2178 uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; 2179 if (dim >= maxDim) { 2180 float aspectRatio = ASPECT_RATIO(fmt); 2181 if (isAspectRatioClose(aspectRatio, desiredAr)) { 2182 v4l2Fmt = fmt; 2183 // since mSupportedFormats is sorted by width then height, the first matching fmt 2184 // will be the smallest one with matching aspect ratio 2185 break; 2186 } 2187 } 2188 } 2189 if (v4l2Fmt.width == 0) { 2190 // Cannot find exact good aspect ratio candidate, try to find a close one 2191 for (const auto& fmt : mSupportedFormats) { 2192 uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; 2193 if (dim >= maxDim) { 2194 float aspectRatio = ASPECT_RATIO(fmt); 2195 if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) || 2196 (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) { 2197 v4l2Fmt = fmt; 2198 break; 2199 } 2200 } 2201 } 2202 } 2203 2204 if (v4l2Fmt.width == 0) { 2205 ALOGE("%s: unable to find a resolution matching (%s at least %d, aspect ratio %f)" 2206 , __FUNCTION__, (mCroppingType == VERTICAL) ? "width" : "height", 2207 maxDim, desiredAr); 2208 return Status::ILLEGAL_ARGUMENT; 2209 } 2210 2211 if (configureV4l2StreamLocked(v4l2Fmt) != 0) { 2212 ALOGE("V4L configuration failed!, format:%c%c%c%c, w %d, h %d", 2213 v4l2Fmt.fourcc & 0xFF, 2214 (v4l2Fmt.fourcc >> 8) & 0xFF, 2215 (v4l2Fmt.fourcc >> 16) & 0xFF, 2216 (v4l2Fmt.fourcc >> 24) & 0xFF, 2217 v4l2Fmt.width, v4l2Fmt.height); 2218 return Status::INTERNAL_ERROR; 2219 } 2220 2221 Size v4lSize = {v4l2Fmt.width, v4l2Fmt.height}; 2222 Size thumbSize { 0, 0 }; 2223 camera_metadata_ro_entry entry = 2224 mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); 2225 for(uint32_t i = 0; i < entry.count; i += 2) { 2226 Size sz { static_cast<uint32_t>(entry.data.i32[i]), 2227 static_cast<uint32_t>(entry.data.i32[i+1]) }; 2228 if(sz.width * sz.height > thumbSize.width * thumbSize.height) { 2229 thumbSize = sz; 2230 } 2231 } 2232 2233 if (thumbSize.width * thumbSize.height == 0) { 2234 ALOGE("%s: non-zero thumbnail size not available", __FUNCTION__); 2235 return Status::INTERNAL_ERROR; 2236 } 2237 2238 status = mOutputThread->allocateIntermediateBuffers(v4lSize, 2239 mMaxThumbResolution, config.streams); 2240 if (status != Status::OK) { 2241 ALOGE("%s: allocating intermediate buffers failed!", __FUNCTION__); 2242 return status; 2243 } 2244 2245 out->streams.resize(config.streams.size()); 2246 for (size_t i = 0; i < config.streams.size(); i++) { 2247 out->streams[i].overrideDataSpace = config.streams[i].dataSpace; 2248 out->streams[i].v3_2.id = config.streams[i].id; 2249 // TODO: double check should we add those CAMERA flags 2250 mStreamMap[config.streams[i].id].usage = 2251 out->streams[i].v3_2.producerUsage = config.streams[i].usage | 2252 BufferUsage::CPU_WRITE_OFTEN | 2253 BufferUsage::CAMERA_OUTPUT; 2254 out->streams[i].v3_2.consumerUsage = 0; 2255 out->streams[i].v3_2.maxBuffers = mV4L2BufferCount; 2256 2257 switch (config.streams[i].format) { 2258 case PixelFormat::BLOB: 2259 case PixelFormat::YCBCR_420_888: 2260 case PixelFormat::YV12: // Used by SurfaceTexture 2261 // No override 2262 out->streams[i].v3_2.overrideFormat = config.streams[i].format; 2263 break; 2264 case PixelFormat::IMPLEMENTATION_DEFINED: 2265 // Override based on VIDEO or not 2266 out->streams[i].v3_2.overrideFormat = 2267 (config.streams[i].usage & BufferUsage::VIDEO_ENCODER) ? 2268 PixelFormat::YCBCR_420_888 : PixelFormat::YV12; 2269 // Save overridden formt in mStreamMap 2270 mStreamMap[config.streams[i].id].format = out->streams[i].v3_2.overrideFormat; 2271 break; 2272 default: 2273 ALOGE("%s: unsupported format 0x%x", __FUNCTION__, config.streams[i].format); 2274 return Status::ILLEGAL_ARGUMENT; 2275 } 2276 } 2277 2278 mFirstRequest = true; 2279 return Status::OK; 2280} 2281 2282bool ExternalCameraDeviceSession::isClosed() { 2283 Mutex::Autolock _l(mLock); 2284 return mClosed; 2285} 2286 2287#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) 2288#define UPDATE(md, tag, data, size) \ 2289do { \ 2290 if ((md).update((tag), (data), (size))) { \ 2291 ALOGE("Update " #tag " failed!"); \ 2292 return BAD_VALUE; \ 2293 } \ 2294} while (0) 2295 2296status_t ExternalCameraDeviceSession::initDefaultRequests() { 2297 ::android::hardware::camera::common::V1_0::helper::CameraMetadata md; 2298 2299 const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF; 2300 UPDATE(md, ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1); 2301 2302 const int32_t exposureCompensation = 0; 2303 UPDATE(md, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &exposureCompensation, 1); 2304 2305 const uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; 2306 UPDATE(md, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &videoStabilizationMode, 1); 2307 2308 const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 2309 UPDATE(md, ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 2310 2311 const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; 2312 UPDATE(md, ANDROID_CONTROL_AE_MODE, &aeMode, 1); 2313 2314 const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; 2315 UPDATE(md, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1); 2316 2317 const uint8_t afMode = ANDROID_CONTROL_AF_MODE_AUTO; 2318 UPDATE(md, ANDROID_CONTROL_AF_MODE, &afMode, 1); 2319 2320 const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; 2321 UPDATE(md, ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1); 2322 2323 const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED; 2324 UPDATE(md, ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 2325 2326 const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 2327 UPDATE(md, ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 2328 2329 const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 2330 UPDATE(md, ANDROID_FLASH_MODE, &flashMode, 1); 2331 2332 const int32_t thumbnailSize[] = {240, 180}; 2333 UPDATE(md, ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); 2334 2335 const uint8_t jpegQuality = 90; 2336 UPDATE(md, ANDROID_JPEG_QUALITY, &jpegQuality, 1); 2337 UPDATE(md, ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegQuality, 1); 2338 2339 const int32_t jpegOrientation = 0; 2340 UPDATE(md, ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); 2341 2342 const uint8_t oisMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 2343 UPDATE(md, ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &oisMode, 1); 2344 2345 const uint8_t nrMode = ANDROID_NOISE_REDUCTION_MODE_OFF; 2346 UPDATE(md, ANDROID_NOISE_REDUCTION_MODE, &nrMode, 1); 2347 2348 const uint8_t fdMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; 2349 UPDATE(md, ANDROID_STATISTICS_FACE_DETECT_MODE, &fdMode, 1); 2350 2351 const uint8_t hotpixelMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; 2352 UPDATE(md, ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotpixelMode, 1); 2353 2354 bool support30Fps = false; 2355 int32_t maxFps = std::numeric_limits<int32_t>::min(); 2356 for (const auto& supportedFormat : mSupportedFormats) { 2357 for (const auto& fr : supportedFormat.frameRates) { 2358 int32_t framerateInt = static_cast<int32_t>(fr.getDouble()); 2359 if (maxFps < framerateInt) { 2360 maxFps = framerateInt; 2361 } 2362 if (framerateInt == 30) { 2363 support30Fps = true; 2364 break; 2365 } 2366 } 2367 if (support30Fps) { 2368 break; 2369 } 2370 } 2371 int32_t defaultFramerate = support30Fps ? 30 : maxFps; 2372 int32_t defaultFpsRange[] = {defaultFramerate, defaultFramerate}; 2373 UPDATE(md, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, defaultFpsRange, ARRAY_SIZE(defaultFpsRange)); 2374 2375 uint8_t antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; 2376 UPDATE(md, ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibandingMode, 1); 2377 2378 const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 2379 UPDATE(md, ANDROID_CONTROL_MODE, &controlMode, 1); 2380 2381 auto requestTemplates = hidl_enum_iterator<RequestTemplate>(); 2382 for (RequestTemplate type : requestTemplates) { 2383 ::android::hardware::camera::common::V1_0::helper::CameraMetadata mdCopy = md; 2384 uint8_t intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2385 switch (type) { 2386 case RequestTemplate::PREVIEW: 2387 intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2388 break; 2389 case RequestTemplate::STILL_CAPTURE: 2390 intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 2391 break; 2392 case RequestTemplate::VIDEO_RECORD: 2393 intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 2394 break; 2395 case RequestTemplate::VIDEO_SNAPSHOT: 2396 intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 2397 break; 2398 default: 2399 ALOGV("%s: unsupported RequestTemplate type %d", __FUNCTION__, type); 2400 continue; 2401 } 2402 UPDATE(mdCopy, ANDROID_CONTROL_CAPTURE_INTENT, &intent, 1); 2403 2404 camera_metadata_t* rawMd = mdCopy.release(); 2405 CameraMetadata hidlMd; 2406 hidlMd.setToExternal( 2407 (uint8_t*) rawMd, get_camera_metadata_size(rawMd)); 2408 mDefaultRequests[type] = hidlMd; 2409 free_camera_metadata(rawMd); 2410 } 2411 2412 return OK; 2413} 2414 2415status_t ExternalCameraDeviceSession::fillCaptureResult( 2416 common::V1_0::helper::CameraMetadata &md, nsecs_t timestamp) { 2417 // android.control 2418 // For USB camera, we don't know the AE state. Set the state to converged to 2419 // indicate the frame should be good to use. Then apps don't have to wait the 2420 // AE state. 2421 const uint8_t aeState = ANDROID_CONTROL_AE_STATE_CONVERGED; 2422 UPDATE(md, ANDROID_CONTROL_AE_STATE, &aeState, 1); 2423 2424 const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF; 2425 UPDATE(md, ANDROID_CONTROL_AE_LOCK, &ae_lock, 1); 2426 2427 bool afTrigger = mAfTrigger; 2428 if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) { 2429 Mutex::Autolock _l(mLock); 2430 camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER); 2431 if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) { 2432 mAfTrigger = afTrigger = true; 2433 } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) { 2434 mAfTrigger = afTrigger = false; 2435 } 2436 } 2437 2438 // For USB camera, the USB camera handles everything and we don't have control 2439 // over AF. We only simply fake the AF metadata based on the request 2440 // received here. 2441 uint8_t afState; 2442 if (afTrigger) { 2443 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; 2444 } else { 2445 afState = ANDROID_CONTROL_AF_STATE_INACTIVE; 2446 } 2447 UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1); 2448 2449 // Set AWB state to converged to indicate the frame should be good to use. 2450 const uint8_t awbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; 2451 UPDATE(md, ANDROID_CONTROL_AWB_STATE, &awbState, 1); 2452 2453 const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 2454 UPDATE(md, ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 2455 2456 camera_metadata_ro_entry active_array_size = 2457 mCameraCharacteristics.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE); 2458 2459 if (active_array_size.count == 0) { 2460 ALOGE("%s: cannot find active array size!", __FUNCTION__); 2461 return -EINVAL; 2462 } 2463 2464 const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE; 2465 UPDATE(md, ANDROID_FLASH_STATE, &flashState, 1); 2466 2467 // android.scaler 2468 const int32_t crop_region[] = { 2469 active_array_size.data.i32[0], active_array_size.data.i32[1], 2470 active_array_size.data.i32[2], active_array_size.data.i32[3], 2471 }; 2472 UPDATE(md, ANDROID_SCALER_CROP_REGION, crop_region, ARRAY_SIZE(crop_region)); 2473 2474 // android.sensor 2475 UPDATE(md, ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 2476 2477 // android.statistics 2478 const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; 2479 UPDATE(md, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1); 2480 2481 const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; 2482 UPDATE(md, ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1); 2483 2484 return OK; 2485} 2486 2487#undef ARRAY_SIZE 2488#undef UPDATE 2489 2490} // namespace implementation 2491} // namespace V3_4 2492} // namespace device 2493} // namespace camera 2494} // namespace hardware 2495} // namespace android 2496