ExternalCameraDeviceSession.cpp revision fa5aafbec3ac38d174579b1ffc23ec333a868163
1/* 2 * Copyright (C) 2018 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16#define LOG_TAG "ExtCamDevSsn@3.4" 17//#define LOG_NDEBUG 0 18#include <log/log.h> 19 20#include <inttypes.h> 21#include "ExternalCameraDeviceSession.h" 22 23#include "android-base/macros.h" 24#include <utils/Timers.h> 25#include <linux/videodev2.h> 26#include <sync/sync.h> 27 28#define HAVE_JPEG // required for libyuv.h to export MJPEG decode APIs 29#include <libyuv.h> 30 31#include <jpeglib.h> 32 33 34namespace android { 35namespace hardware { 36namespace camera { 37namespace device { 38namespace V3_4 { 39namespace implementation { 40 41namespace { 42// Size of request/result metadata fast message queue. Change to 0 to always use hwbinder buffer. 43static constexpr size_t kMetadataMsgQueueSize = 1 << 18 /* 256kB */; 44 45const int kBadFramesAfterStreamOn = 1; // drop x frames after streamOn to get rid of some initial 46 // bad frames. TODO: develop a better bad frame detection 47 // method 48 49bool tryLock(Mutex& mutex) 50{ 51 static const int kDumpLockRetries = 50; 52 static const int kDumpLockSleep = 60000; 53 bool locked = false; 54 for (int i = 0; i < kDumpLockRetries; ++i) { 55 if (mutex.tryLock() == NO_ERROR) { 56 locked = true; 57 break; 58 } 59 usleep(kDumpLockSleep); 60 } 61 return locked; 62} 63 64} // Anonymous namespace 65 66// Static instances 67const int ExternalCameraDeviceSession::kMaxProcessedStream; 68const int ExternalCameraDeviceSession::kMaxStallStream; 69HandleImporter ExternalCameraDeviceSession::sHandleImporter; 70 71ExternalCameraDeviceSession::ExternalCameraDeviceSession( 72 const sp<ICameraDeviceCallback>& callback, 73 const ExternalCameraConfig& cfg, 74 const std::vector<SupportedV4L2Format>& sortedFormats, 75 const CroppingType& croppingType, 76 const common::V1_0::helper::CameraMetadata& chars, 77 const std::string& cameraId, 78 unique_fd v4l2Fd) : 79 mCallback(callback), 80 mCfg(cfg), 81 mCameraCharacteristics(chars), 82 mSupportedFormats(sortedFormats), 83 mCroppingType(croppingType), 84 mCameraId(cameraId), 85 mV4l2Fd(std::move(v4l2Fd)), 86 mOutputThread(new OutputThread(this, mCroppingType)), 87 mMaxThumbResolution(getMaxThumbResolution()), 88 mMaxJpegResolution(getMaxJpegResolution()) { 89 mInitFail = initialize(); 90} 91 92bool ExternalCameraDeviceSession::initialize() { 93 if (mV4l2Fd.get() < 0) { 94 ALOGE("%s: invalid v4l2 device fd %d!", __FUNCTION__, mV4l2Fd.get()); 95 return true; 96 } 97 98 status_t status = initDefaultRequests(); 99 if (status != OK) { 100 ALOGE("%s: init default requests failed!", __FUNCTION__); 101 return true; 102 } 103 104 mRequestMetadataQueue = std::make_unique<RequestMetadataQueue>( 105 kMetadataMsgQueueSize, false /* non blocking */); 106 if (!mRequestMetadataQueue->isValid()) { 107 ALOGE("%s: invalid request fmq", __FUNCTION__); 108 return true; 109 } 110 mResultMetadataQueue = std::make_shared<RequestMetadataQueue>( 111 kMetadataMsgQueueSize, false /* non blocking */); 112 if (!mResultMetadataQueue->isValid()) { 113 ALOGE("%s: invalid result fmq", __FUNCTION__); 114 return true; 115 } 116 117 // TODO: check is PRIORITY_DISPLAY enough? 118 mOutputThread->run("ExtCamOut", PRIORITY_DISPLAY); 119 return false; 120} 121 122Status ExternalCameraDeviceSession::initStatus() const { 123 Mutex::Autolock _l(mLock); 124 Status status = Status::OK; 125 if (mInitFail || mClosed) { 126 ALOGI("%s: sesssion initFailed %d closed %d", __FUNCTION__, mInitFail, mClosed); 127 status = Status::INTERNAL_ERROR; 128 } 129 return status; 130} 131 132ExternalCameraDeviceSession::~ExternalCameraDeviceSession() { 133 if (!isClosed()) { 134 ALOGE("ExternalCameraDeviceSession deleted before close!"); 135 close(); 136 } 137} 138 139 140void ExternalCameraDeviceSession::dumpState(const native_handle_t* handle) { 141 if (handle->numFds != 1 || handle->numInts != 0) { 142 ALOGE("%s: handle must contain 1 FD and 0 integers! Got %d FDs and %d ints", 143 __FUNCTION__, handle->numFds, handle->numInts); 144 return; 145 } 146 int fd = handle->data[0]; 147 148 bool intfLocked = tryLock(mInterfaceLock); 149 if (!intfLocked) { 150 dprintf(fd, "!! ExternalCameraDeviceSession interface may be deadlocked !!\n"); 151 } 152 153 if (isClosed()) { 154 dprintf(fd, "External camera %s is closed\n", mCameraId.c_str()); 155 return; 156 } 157 158 bool streaming = false; 159 size_t v4L2BufferCount = 0; 160 SupportedV4L2Format streamingFmt; 161 std::unordered_set<uint32_t> inflightFrames; 162 { 163 Mutex::Autolock _l(mLock); 164 bool sessionLocked = tryLock(mLock); 165 if (!sessionLocked) { 166 dprintf(fd, "!! ExternalCameraDeviceSession mLock may be deadlocked !!\n"); 167 } 168 streaming = mV4l2Streaming; 169 streamingFmt = mV4l2StreamingFmt; 170 v4L2BufferCount = mV4L2BufferCount; 171 inflightFrames = mInflightFrames; 172 if (sessionLocked) { 173 mLock.unlock(); 174 } 175 } 176 177 dprintf(fd, "External camera %s V4L2 FD %d, cropping type %s, %s\n", 178 mCameraId.c_str(), mV4l2Fd.get(), 179 (mCroppingType == VERTICAL) ? "vertical" : "horizontal", 180 streaming ? "streaming" : "not streaming"); 181 if (streaming) { 182 // TODO: dump fps later 183 dprintf(fd, "Current V4L2 format %c%c%c%c %dx%d\n", 184 streamingFmt.fourcc & 0xFF, 185 (streamingFmt.fourcc >> 8) & 0xFF, 186 (streamingFmt.fourcc >> 16) & 0xFF, 187 (streamingFmt.fourcc >> 24) & 0xFF, 188 streamingFmt.width, streamingFmt.height); 189 190 size_t numDequeuedV4l2Buffers = 0; 191 { 192 std::lock_guard<std::mutex> lk(mV4l2BufferLock); 193 numDequeuedV4l2Buffers = mNumDequeuedV4l2Buffers; 194 } 195 dprintf(fd, "V4L2 buffer queue size %zu, dequeued %zu\n", 196 v4L2BufferCount, numDequeuedV4l2Buffers); 197 } 198 199 dprintf(fd, "In-flight frames (not sorted):"); 200 for (const auto& frameNumber : inflightFrames) { 201 dprintf(fd, "%d, ", frameNumber); 202 } 203 dprintf(fd, "\n"); 204 mOutputThread->dump(fd); 205 dprintf(fd, "\n"); 206 207 if (intfLocked) { 208 mInterfaceLock.unlock(); 209 } 210 211 return; 212} 213 214Return<void> ExternalCameraDeviceSession::constructDefaultRequestSettings( 215 V3_2::RequestTemplate type, 216 V3_2::ICameraDeviceSession::constructDefaultRequestSettings_cb _hidl_cb) { 217 V3_2::CameraMetadata outMetadata; 218 Status status = constructDefaultRequestSettingsRaw( 219 static_cast<RequestTemplate>(type), &outMetadata); 220 _hidl_cb(status, outMetadata); 221 return Void(); 222} 223 224Status ExternalCameraDeviceSession::constructDefaultRequestSettingsRaw(RequestTemplate type, 225 V3_2::CameraMetadata *outMetadata) { 226 CameraMetadata emptyMd; 227 Status status = initStatus(); 228 if (status != Status::OK) { 229 return status; 230 } 231 232 switch (type) { 233 case RequestTemplate::PREVIEW: 234 case RequestTemplate::STILL_CAPTURE: 235 case RequestTemplate::VIDEO_RECORD: 236 case RequestTemplate::VIDEO_SNAPSHOT: { 237 *outMetadata = mDefaultRequests[type]; 238 break; 239 } 240 case RequestTemplate::MANUAL: 241 case RequestTemplate::ZERO_SHUTTER_LAG: 242 // Don't support MANUAL, ZSL templates 243 status = Status::ILLEGAL_ARGUMENT; 244 break; 245 default: 246 ALOGE("%s: unknown request template type %d", __FUNCTION__, static_cast<int>(type)); 247 status = Status::ILLEGAL_ARGUMENT; 248 break; 249 } 250 return status; 251} 252 253Return<void> ExternalCameraDeviceSession::configureStreams( 254 const V3_2::StreamConfiguration& streams, 255 ICameraDeviceSession::configureStreams_cb _hidl_cb) { 256 V3_2::HalStreamConfiguration outStreams; 257 V3_3::HalStreamConfiguration outStreams_v33; 258 Mutex::Autolock _il(mInterfaceLock); 259 260 Status status = configureStreams(streams, &outStreams_v33); 261 size_t size = outStreams_v33.streams.size(); 262 outStreams.streams.resize(size); 263 for (size_t i = 0; i < size; i++) { 264 outStreams.streams[i] = outStreams_v33.streams[i].v3_2; 265 } 266 _hidl_cb(status, outStreams); 267 return Void(); 268} 269 270Return<void> ExternalCameraDeviceSession::configureStreams_3_3( 271 const V3_2::StreamConfiguration& streams, 272 ICameraDeviceSession::configureStreams_3_3_cb _hidl_cb) { 273 V3_3::HalStreamConfiguration outStreams; 274 Mutex::Autolock _il(mInterfaceLock); 275 276 Status status = configureStreams(streams, &outStreams); 277 _hidl_cb(status, outStreams); 278 return Void(); 279} 280 281Return<void> ExternalCameraDeviceSession::configureStreams_3_4( 282 const V3_4::StreamConfiguration& requestedConfiguration, 283 ICameraDeviceSession::configureStreams_3_4_cb _hidl_cb) { 284 V3_2::StreamConfiguration config_v32; 285 V3_3::HalStreamConfiguration outStreams_v33; 286 Mutex::Autolock _il(mInterfaceLock); 287 288 config_v32.operationMode = requestedConfiguration.operationMode; 289 config_v32.streams.resize(requestedConfiguration.streams.size()); 290 for (size_t i = 0; i < config_v32.streams.size(); i++) { 291 config_v32.streams[i] = requestedConfiguration.streams[i].v3_2; 292 } 293 294 // Ignore requestedConfiguration.sessionParams. External camera does not support it 295 Status status = configureStreams(config_v32, &outStreams_v33); 296 297 V3_4::HalStreamConfiguration outStreams; 298 outStreams.streams.resize(outStreams_v33.streams.size()); 299 for (size_t i = 0; i < outStreams.streams.size(); i++) { 300 outStreams.streams[i].v3_3 = outStreams_v33.streams[i]; 301 } 302 _hidl_cb(status, outStreams); 303 return Void(); 304} 305 306Return<void> ExternalCameraDeviceSession::getCaptureRequestMetadataQueue( 307 ICameraDeviceSession::getCaptureRequestMetadataQueue_cb _hidl_cb) { 308 Mutex::Autolock _il(mInterfaceLock); 309 _hidl_cb(*mRequestMetadataQueue->getDesc()); 310 return Void(); 311} 312 313Return<void> ExternalCameraDeviceSession::getCaptureResultMetadataQueue( 314 ICameraDeviceSession::getCaptureResultMetadataQueue_cb _hidl_cb) { 315 Mutex::Autolock _il(mInterfaceLock); 316 _hidl_cb(*mResultMetadataQueue->getDesc()); 317 return Void(); 318} 319 320Return<void> ExternalCameraDeviceSession::processCaptureRequest( 321 const hidl_vec<CaptureRequest>& requests, 322 const hidl_vec<BufferCache>& cachesToRemove, 323 ICameraDeviceSession::processCaptureRequest_cb _hidl_cb) { 324 Mutex::Autolock _il(mInterfaceLock); 325 updateBufferCaches(cachesToRemove); 326 327 uint32_t numRequestProcessed = 0; 328 Status s = Status::OK; 329 for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { 330 s = processOneCaptureRequest(requests[i]); 331 if (s != Status::OK) { 332 break; 333 } 334 } 335 336 _hidl_cb(s, numRequestProcessed); 337 return Void(); 338} 339 340Return<void> ExternalCameraDeviceSession::processCaptureRequest_3_4( 341 const hidl_vec<V3_4::CaptureRequest>& requests, 342 const hidl_vec<V3_2::BufferCache>& cachesToRemove, 343 ICameraDeviceSession::processCaptureRequest_3_4_cb _hidl_cb) { 344 Mutex::Autolock _il(mInterfaceLock); 345 updateBufferCaches(cachesToRemove); 346 347 uint32_t numRequestProcessed = 0; 348 Status s = Status::OK; 349 for (size_t i = 0; i < requests.size(); i++, numRequestProcessed++) { 350 s = processOneCaptureRequest(requests[i].v3_2); 351 if (s != Status::OK) { 352 break; 353 } 354 } 355 356 _hidl_cb(s, numRequestProcessed); 357 return Void(); 358} 359 360Return<Status> ExternalCameraDeviceSession::flush() { 361 Mutex::Autolock _il(mInterfaceLock); 362 Status status = initStatus(); 363 if (status != Status::OK) { 364 return status; 365 } 366 mOutputThread->flush(); 367 return Status::OK; 368} 369 370Return<void> ExternalCameraDeviceSession::close() { 371 Mutex::Autolock _il(mInterfaceLock); 372 bool closed = isClosed(); 373 if (!closed) { 374 mOutputThread->flush(); 375 mOutputThread->requestExit(); 376 mOutputThread->join(); 377 378 Mutex::Autolock _l(mLock); 379 // free all buffers 380 for(auto pair : mStreamMap) { 381 cleanupBuffersLocked(/*Stream ID*/pair.first); 382 } 383 v4l2StreamOffLocked(); 384 ALOGV("%s: closing V4L2 camera FD %d", __FUNCTION__, mV4l2Fd.get()); 385 mV4l2Fd.reset(); 386 mClosed = true; 387 } 388 return Void(); 389} 390 391Status ExternalCameraDeviceSession::importRequest( 392 const CaptureRequest& request, 393 hidl_vec<buffer_handle_t*>& allBufPtrs, 394 hidl_vec<int>& allFences) { 395 size_t numOutputBufs = request.outputBuffers.size(); 396 size_t numBufs = numOutputBufs; 397 // Validate all I/O buffers 398 hidl_vec<buffer_handle_t> allBufs; 399 hidl_vec<uint64_t> allBufIds; 400 allBufs.resize(numBufs); 401 allBufIds.resize(numBufs); 402 allBufPtrs.resize(numBufs); 403 allFences.resize(numBufs); 404 std::vector<int32_t> streamIds(numBufs); 405 406 for (size_t i = 0; i < numOutputBufs; i++) { 407 allBufs[i] = request.outputBuffers[i].buffer.getNativeHandle(); 408 allBufIds[i] = request.outputBuffers[i].bufferId; 409 allBufPtrs[i] = &allBufs[i]; 410 streamIds[i] = request.outputBuffers[i].streamId; 411 } 412 413 for (size_t i = 0; i < numBufs; i++) { 414 buffer_handle_t buf = allBufs[i]; 415 uint64_t bufId = allBufIds[i]; 416 CirculatingBuffers& cbs = mCirculatingBuffers[streamIds[i]]; 417 if (cbs.count(bufId) == 0) { 418 if (buf == nullptr) { 419 ALOGE("%s: bufferId %" PRIu64 " has null buffer handle!", __FUNCTION__, bufId); 420 return Status::ILLEGAL_ARGUMENT; 421 } 422 // Register a newly seen buffer 423 buffer_handle_t importedBuf = buf; 424 sHandleImporter.importBuffer(importedBuf); 425 if (importedBuf == nullptr) { 426 ALOGE("%s: output buffer %zu is invalid!", __FUNCTION__, i); 427 return Status::INTERNAL_ERROR; 428 } else { 429 cbs[bufId] = importedBuf; 430 } 431 } 432 allBufPtrs[i] = &cbs[bufId]; 433 } 434 435 // All buffers are imported. Now validate output buffer acquire fences 436 for (size_t i = 0; i < numOutputBufs; i++) { 437 if (!sHandleImporter.importFence( 438 request.outputBuffers[i].acquireFence, allFences[i])) { 439 ALOGE("%s: output buffer %zu acquire fence is invalid", __FUNCTION__, i); 440 cleanupInflightFences(allFences, i); 441 return Status::INTERNAL_ERROR; 442 } 443 } 444 return Status::OK; 445} 446 447void ExternalCameraDeviceSession::cleanupInflightFences( 448 hidl_vec<int>& allFences, size_t numFences) { 449 for (size_t j = 0; j < numFences; j++) { 450 sHandleImporter.closeFence(allFences[j]); 451 } 452} 453 454Status ExternalCameraDeviceSession::processOneCaptureRequest(const CaptureRequest& request) { 455 Status status = initStatus(); 456 if (status != Status::OK) { 457 return status; 458 } 459 460 if (request.inputBuffer.streamId != -1) { 461 ALOGE("%s: external camera does not support reprocessing!", __FUNCTION__); 462 return Status::ILLEGAL_ARGUMENT; 463 } 464 465 Mutex::Autolock _l(mLock); 466 if (!mV4l2Streaming) { 467 ALOGE("%s: cannot process request in streamOff state!", __FUNCTION__); 468 return Status::INTERNAL_ERROR; 469 } 470 471 const camera_metadata_t *rawSettings = nullptr; 472 bool converted = true; 473 CameraMetadata settingsFmq; // settings from FMQ 474 if (request.fmqSettingsSize > 0) { 475 // non-blocking read; client must write metadata before calling 476 // processOneCaptureRequest 477 settingsFmq.resize(request.fmqSettingsSize); 478 bool read = mRequestMetadataQueue->read(settingsFmq.data(), request.fmqSettingsSize); 479 if (read) { 480 converted = V3_2::implementation::convertFromHidl(settingsFmq, &rawSettings); 481 } else { 482 ALOGE("%s: capture request settings metadata couldn't be read from fmq!", __FUNCTION__); 483 converted = false; 484 } 485 } else { 486 converted = V3_2::implementation::convertFromHidl(request.settings, &rawSettings); 487 } 488 489 if (converted && rawSettings != nullptr) { 490 mLatestReqSetting = rawSettings; 491 } 492 493 if (!converted) { 494 ALOGE("%s: capture request settings metadata is corrupt!", __FUNCTION__); 495 return Status::ILLEGAL_ARGUMENT; 496 } 497 498 if (mFirstRequest && rawSettings == nullptr) { 499 ALOGE("%s: capture request settings must not be null for first request!", 500 __FUNCTION__); 501 return Status::ILLEGAL_ARGUMENT; 502 } 503 504 hidl_vec<buffer_handle_t*> allBufPtrs; 505 hidl_vec<int> allFences; 506 size_t numOutputBufs = request.outputBuffers.size(); 507 508 if (numOutputBufs == 0) { 509 ALOGE("%s: capture request must have at least one output buffer!", __FUNCTION__); 510 return Status::ILLEGAL_ARGUMENT; 511 } 512 513 status = importRequest(request, allBufPtrs, allFences); 514 if (status != Status::OK) { 515 return status; 516 } 517 518 // TODO: program fps range per capture request here 519 // or limit the set of availableFpsRange 520 521 522 nsecs_t shutterTs = 0; 523 sp<V4L2Frame> frameIn = dequeueV4l2FrameLocked(&shutterTs); 524 if ( frameIn == nullptr) { 525 ALOGE("%s: V4L2 deque frame failed!", __FUNCTION__); 526 return Status::INTERNAL_ERROR; 527 } 528 529 // TODO: reduce object copy in this path 530 HalRequest halReq = { 531 .frameNumber = request.frameNumber, 532 .setting = mLatestReqSetting, 533 .frameIn = frameIn, 534 .shutterTs = shutterTs}; 535 halReq.buffers.resize(numOutputBufs); 536 for (size_t i = 0; i < numOutputBufs; i++) { 537 HalStreamBuffer& halBuf = halReq.buffers[i]; 538 int streamId = halBuf.streamId = request.outputBuffers[i].streamId; 539 halBuf.bufferId = request.outputBuffers[i].bufferId; 540 const Stream& stream = mStreamMap[streamId]; 541 halBuf.width = stream.width; 542 halBuf.height = stream.height; 543 halBuf.format = stream.format; 544 halBuf.usage = stream.usage; 545 halBuf.bufPtr = allBufPtrs[i]; 546 halBuf.acquireFence = allFences[i]; 547 halBuf.fenceTimeout = false; 548 } 549 mInflightFrames.insert(halReq.frameNumber); 550 // Send request to OutputThread for the rest of processing 551 mOutputThread->submitRequest(halReq); 552 mFirstRequest = false; 553 return Status::OK; 554} 555 556void ExternalCameraDeviceSession::notifyShutter(uint32_t frameNumber, nsecs_t shutterTs) { 557 NotifyMsg msg; 558 msg.type = MsgType::SHUTTER; 559 msg.msg.shutter.frameNumber = frameNumber; 560 msg.msg.shutter.timestamp = shutterTs; 561 mCallback->notify({msg}); 562} 563 564void ExternalCameraDeviceSession::notifyError( 565 uint32_t frameNumber, int32_t streamId, ErrorCode ec) { 566 NotifyMsg msg; 567 msg.type = MsgType::ERROR; 568 msg.msg.error.frameNumber = frameNumber; 569 msg.msg.error.errorStreamId = streamId; 570 msg.msg.error.errorCode = ec; 571 mCallback->notify({msg}); 572} 573 574//TODO: refactor with processCaptureResult 575Status ExternalCameraDeviceSession::processCaptureRequestError(const HalRequest& req) { 576 // Return V4L2 buffer to V4L2 buffer queue 577 enqueueV4l2Frame(req.frameIn); 578 579 // NotifyShutter 580 notifyShutter(req.frameNumber, req.shutterTs); 581 582 notifyError(/*frameNum*/req.frameNumber, /*stream*/-1, ErrorCode::ERROR_REQUEST); 583 584 // Fill output buffers 585 hidl_vec<CaptureResult> results; 586 results.resize(1); 587 CaptureResult& result = results[0]; 588 result.frameNumber = req.frameNumber; 589 result.partialResult = 1; 590 result.inputBuffer.streamId = -1; 591 result.outputBuffers.resize(req.buffers.size()); 592 for (size_t i = 0; i < req.buffers.size(); i++) { 593 result.outputBuffers[i].streamId = req.buffers[i].streamId; 594 result.outputBuffers[i].bufferId = req.buffers[i].bufferId; 595 result.outputBuffers[i].status = BufferStatus::ERROR; 596 if (req.buffers[i].acquireFence >= 0) { 597 native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); 598 handle->data[0] = req.buffers[i].acquireFence; 599 result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); 600 } 601 } 602 603 // update inflight records 604 { 605 Mutex::Autolock _l(mLock); 606 mInflightFrames.erase(req.frameNumber); 607 } 608 609 // Callback into framework 610 invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); 611 freeReleaseFences(results); 612 return Status::OK; 613} 614 615Status ExternalCameraDeviceSession::processCaptureResult(HalRequest& req) { 616 // Return V4L2 buffer to V4L2 buffer queue 617 enqueueV4l2Frame(req.frameIn); 618 619 // NotifyShutter 620 notifyShutter(req.frameNumber, req.shutterTs); 621 622 // Fill output buffers 623 hidl_vec<CaptureResult> results; 624 results.resize(1); 625 CaptureResult& result = results[0]; 626 result.frameNumber = req.frameNumber; 627 result.partialResult = 1; 628 result.inputBuffer.streamId = -1; 629 result.outputBuffers.resize(req.buffers.size()); 630 for (size_t i = 0; i < req.buffers.size(); i++) { 631 result.outputBuffers[i].streamId = req.buffers[i].streamId; 632 result.outputBuffers[i].bufferId = req.buffers[i].bufferId; 633 if (req.buffers[i].fenceTimeout) { 634 result.outputBuffers[i].status = BufferStatus::ERROR; 635 native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); 636 handle->data[0] = req.buffers[i].acquireFence; 637 result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); 638 notifyError(req.frameNumber, req.buffers[i].streamId, ErrorCode::ERROR_BUFFER); 639 } else { 640 result.outputBuffers[i].status = BufferStatus::OK; 641 // TODO: refactor 642 if (req.buffers[i].acquireFence > 0) { 643 native_handle_t* handle = native_handle_create(/*numFds*/1, /*numInts*/0); 644 handle->data[0] = req.buffers[i].acquireFence; 645 result.outputBuffers[i].releaseFence.setTo(handle, /*shouldOwn*/false); 646 } 647 } 648 } 649 650 // Fill capture result metadata 651 fillCaptureResult(req.setting, req.shutterTs); 652 const camera_metadata_t *rawResult = req.setting.getAndLock(); 653 V3_2::implementation::convertToHidl(rawResult, &result.result); 654 req.setting.unlock(rawResult); 655 656 // update inflight records 657 { 658 Mutex::Autolock _l(mLock); 659 mInflightFrames.erase(req.frameNumber); 660 } 661 662 // Callback into framework 663 invokeProcessCaptureResultCallback(results, /* tryWriteFmq */true); 664 freeReleaseFences(results); 665 return Status::OK; 666} 667 668void ExternalCameraDeviceSession::invokeProcessCaptureResultCallback( 669 hidl_vec<CaptureResult> &results, bool tryWriteFmq) { 670 if (mProcessCaptureResultLock.tryLock() != OK) { 671 const nsecs_t NS_TO_SECOND = 1000000000; 672 ALOGV("%s: previous call is not finished! waiting 1s...", __FUNCTION__); 673 if (mProcessCaptureResultLock.timedLock(/* 1s */NS_TO_SECOND) != OK) { 674 ALOGE("%s: cannot acquire lock in 1s, cannot proceed", 675 __FUNCTION__); 676 return; 677 } 678 } 679 if (tryWriteFmq && mResultMetadataQueue->availableToWrite() > 0) { 680 for (CaptureResult &result : results) { 681 if (result.result.size() > 0) { 682 if (mResultMetadataQueue->write(result.result.data(), result.result.size())) { 683 result.fmqResultSize = result.result.size(); 684 result.result.resize(0); 685 } else { 686 ALOGW("%s: couldn't utilize fmq, fall back to hwbinder", __FUNCTION__); 687 result.fmqResultSize = 0; 688 } 689 } else { 690 result.fmqResultSize = 0; 691 } 692 } 693 } 694 auto status = mCallback->processCaptureResult(results); 695 if (!status.isOk()) { 696 ALOGE("%s: processCaptureResult ERROR : %s", __FUNCTION__, 697 status.description().c_str()); 698 } 699 700 mProcessCaptureResultLock.unlock(); 701} 702 703void ExternalCameraDeviceSession::freeReleaseFences(hidl_vec<CaptureResult>& results) { 704 for (auto& result : results) { 705 if (result.inputBuffer.releaseFence.getNativeHandle() != nullptr) { 706 native_handle_t* handle = const_cast<native_handle_t*>( 707 result.inputBuffer.releaseFence.getNativeHandle()); 708 native_handle_close(handle); 709 native_handle_delete(handle); 710 } 711 for (auto& buf : result.outputBuffers) { 712 if (buf.releaseFence.getNativeHandle() != nullptr) { 713 native_handle_t* handle = const_cast<native_handle_t*>( 714 buf.releaseFence.getNativeHandle()); 715 native_handle_close(handle); 716 native_handle_delete(handle); 717 } 718 } 719 } 720 return; 721} 722 723ExternalCameraDeviceSession::OutputThread::OutputThread( 724 wp<ExternalCameraDeviceSession> parent, 725 CroppingType ct) : mParent(parent), mCroppingType(ct) {} 726 727ExternalCameraDeviceSession::OutputThread::~OutputThread() {} 728 729uint32_t ExternalCameraDeviceSession::OutputThread::getFourCcFromLayout( 730 const YCbCrLayout& layout) { 731 intptr_t cb = reinterpret_cast<intptr_t>(layout.cb); 732 intptr_t cr = reinterpret_cast<intptr_t>(layout.cr); 733 if (std::abs(cb - cr) == 1 && layout.chromaStep == 2) { 734 // Interleaved format 735 if (layout.cb > layout.cr) { 736 return V4L2_PIX_FMT_NV21; 737 } else { 738 return V4L2_PIX_FMT_NV12; 739 } 740 } else if (layout.chromaStep == 1) { 741 // Planar format 742 if (layout.cb > layout.cr) { 743 return V4L2_PIX_FMT_YVU420; // YV12 744 } else { 745 return V4L2_PIX_FMT_YUV420; // YU12 746 } 747 } else { 748 return FLEX_YUV_GENERIC; 749 } 750} 751 752int ExternalCameraDeviceSession::OutputThread::getCropRect( 753 CroppingType ct, const Size& inSize, const Size& outSize, IMapper::Rect* out) { 754 if (out == nullptr) { 755 ALOGE("%s: out is null", __FUNCTION__); 756 return -1; 757 } 758 759 uint32_t inW = inSize.width; 760 uint32_t inH = inSize.height; 761 uint32_t outW = outSize.width; 762 uint32_t outH = outSize.height; 763 764 // Handle special case where aspect ratio is close to input but scaled 765 // dimension is slightly larger than input 766 float arIn = ASPECT_RATIO(inSize); 767 float arOut = ASPECT_RATIO(outSize); 768 if (isAspectRatioClose(arIn, arOut)) { 769 out->left = 0; 770 out->top = 0; 771 out->width = inW; 772 out->height = inH; 773 return 0; 774 } 775 776 if (ct == VERTICAL) { 777 uint64_t scaledOutH = static_cast<uint64_t>(outH) * inW / outW; 778 if (scaledOutH > inH) { 779 ALOGE("%s: Output size %dx%d cannot be vertically cropped from input size %dx%d", 780 __FUNCTION__, outW, outH, inW, inH); 781 return -1; 782 } 783 scaledOutH = scaledOutH & ~0x1; // make it multiple of 2 784 785 out->left = 0; 786 out->top = ((inH - scaledOutH) / 2) & ~0x1; 787 out->width = inW; 788 out->height = static_cast<int32_t>(scaledOutH); 789 ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledH %d", 790 __FUNCTION__, inW, inH, outW, outH, out->top, static_cast<int32_t>(scaledOutH)); 791 } else { 792 uint64_t scaledOutW = static_cast<uint64_t>(outW) * inH / outH; 793 if (scaledOutW > inW) { 794 ALOGE("%s: Output size %dx%d cannot be horizontally cropped from input size %dx%d", 795 __FUNCTION__, outW, outH, inW, inH); 796 return -1; 797 } 798 scaledOutW = scaledOutW & ~0x1; // make it multiple of 2 799 800 out->left = ((inW - scaledOutW) / 2) & ~0x1; 801 out->top = 0; 802 out->width = static_cast<int32_t>(scaledOutW); 803 out->height = inH; 804 ALOGV("%s: crop %dx%d to %dx%d: top %d, scaledW %d", 805 __FUNCTION__, inW, inH, outW, outH, out->top, static_cast<int32_t>(scaledOutW)); 806 } 807 808 return 0; 809} 810 811int ExternalCameraDeviceSession::OutputThread::cropAndScaleLocked( 812 sp<AllocatedFrame>& in, const Size& outSz, YCbCrLayout* out) { 813 Size inSz = {in->mWidth, in->mHeight}; 814 815 int ret; 816 if (inSz == outSz) { 817 ret = in->getLayout(out); 818 if (ret != 0) { 819 ALOGE("%s: failed to get input image layout", __FUNCTION__); 820 return ret; 821 } 822 return ret; 823 } 824 825 // Cropping to output aspect ratio 826 IMapper::Rect inputCrop; 827 ret = getCropRect(mCroppingType, inSz, outSz, &inputCrop); 828 if (ret != 0) { 829 ALOGE("%s: failed to compute crop rect for output size %dx%d", 830 __FUNCTION__, outSz.width, outSz.height); 831 return ret; 832 } 833 834 YCbCrLayout croppedLayout; 835 ret = in->getCroppedLayout(inputCrop, &croppedLayout); 836 if (ret != 0) { 837 ALOGE("%s: failed to crop input image %dx%d to output size %dx%d", 838 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); 839 return ret; 840 } 841 842 if ((mCroppingType == VERTICAL && inSz.width == outSz.width) || 843 (mCroppingType == HORIZONTAL && inSz.height == outSz.height)) { 844 // No scale is needed 845 *out = croppedLayout; 846 return 0; 847 } 848 849 auto it = mScaledYu12Frames.find(outSz); 850 sp<AllocatedFrame> scaledYu12Buf; 851 if (it != mScaledYu12Frames.end()) { 852 scaledYu12Buf = it->second; 853 } else { 854 it = mIntermediateBuffers.find(outSz); 855 if (it == mIntermediateBuffers.end()) { 856 ALOGE("%s: failed to find intermediate buffer size %dx%d", 857 __FUNCTION__, outSz.width, outSz.height); 858 return -1; 859 } 860 scaledYu12Buf = it->second; 861 } 862 // Scale 863 YCbCrLayout outLayout; 864 ret = scaledYu12Buf->getLayout(&outLayout); 865 if (ret != 0) { 866 ALOGE("%s: failed to get output buffer layout", __FUNCTION__); 867 return ret; 868 } 869 870 ret = libyuv::I420Scale( 871 static_cast<uint8_t*>(croppedLayout.y), 872 croppedLayout.yStride, 873 static_cast<uint8_t*>(croppedLayout.cb), 874 croppedLayout.cStride, 875 static_cast<uint8_t*>(croppedLayout.cr), 876 croppedLayout.cStride, 877 inputCrop.width, 878 inputCrop.height, 879 static_cast<uint8_t*>(outLayout.y), 880 outLayout.yStride, 881 static_cast<uint8_t*>(outLayout.cb), 882 outLayout.cStride, 883 static_cast<uint8_t*>(outLayout.cr), 884 outLayout.cStride, 885 outSz.width, 886 outSz.height, 887 // TODO: b/72261744 see if we can use better filter without losing too much perf 888 libyuv::FilterMode::kFilterNone); 889 890 if (ret != 0) { 891 ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", 892 __FUNCTION__, inputCrop.width, inputCrop.height, 893 outSz.width, outSz.height, ret); 894 return ret; 895 } 896 897 *out = outLayout; 898 mScaledYu12Frames.insert({outSz, scaledYu12Buf}); 899 return 0; 900} 901 902 903int ExternalCameraDeviceSession::OutputThread::cropAndScaleThumbLocked( 904 sp<AllocatedFrame>& in, const Size &outSz, YCbCrLayout* out) { 905 Size inSz {in->mWidth, in->mHeight}; 906 907 if ((outSz.width * outSz.height) > 908 (mYu12ThumbFrame->mWidth * mYu12ThumbFrame->mHeight)) { 909 ALOGE("%s: Requested thumbnail size too big (%d,%d) > (%d,%d)", 910 __FUNCTION__, outSz.width, outSz.height, 911 mYu12ThumbFrame->mWidth, mYu12ThumbFrame->mHeight); 912 return -1; 913 } 914 915 int ret; 916 917 /* This will crop-and-zoom the input YUV frame to the thumbnail size 918 * Based on the following logic: 919 * 1) Square pixels come in, square pixels come out, therefore single 920 * scale factor is computed to either make input bigger or smaller 921 * depending on if we are upscaling or downscaling 922 * 2) That single scale factor would either make height too tall or width 923 * too wide so we need to crop the input either horizontally or vertically 924 * but not both 925 */ 926 927 /* Convert the input and output dimensions into floats for ease of math */ 928 float fWin = static_cast<float>(inSz.width); 929 float fHin = static_cast<float>(inSz.height); 930 float fWout = static_cast<float>(outSz.width); 931 float fHout = static_cast<float>(outSz.height); 932 933 /* Compute the one scale factor from (1) above, it will be the smaller of 934 * the two possibilities. */ 935 float scaleFactor = std::min( fHin / fHout, fWin / fWout ); 936 937 /* Since we are crop-and-zooming (as opposed to letter/pillar boxing) we can 938 * simply multiply the output by our scaleFactor to get the cropped input 939 * size. Note that at least one of {fWcrop, fHcrop} is going to wind up 940 * being {fWin, fHin} respectively because fHout or fWout cancels out the 941 * scaleFactor calculation above. 942 * 943 * Specifically: 944 * if ( fHin / fHout ) < ( fWin / fWout ) we crop the sides off 945 * input, in which case 946 * scaleFactor = fHin / fHout 947 * fWcrop = fHin / fHout * fWout 948 * fHcrop = fHin 949 * 950 * Note that fWcrop <= fWin ( because ( fHin / fHout ) * fWout < fWin, which 951 * is just the inequality above with both sides multiplied by fWout 952 * 953 * on the other hand if ( fWin / fWout ) < ( fHin / fHout) we crop the top 954 * and the bottom off of input, and 955 * scaleFactor = fWin / fWout 956 * fWcrop = fWin 957 * fHCrop = fWin / fWout * fHout 958 */ 959 float fWcrop = scaleFactor * fWout; 960 float fHcrop = scaleFactor * fHout; 961 962 /* Convert to integer and truncate to an even number */ 963 Size cropSz = { 2*static_cast<uint32_t>(fWcrop/2.0f), 964 2*static_cast<uint32_t>(fHcrop/2.0f) }; 965 966 /* Convert to a centered rectange with even top/left */ 967 IMapper::Rect inputCrop { 968 2*static_cast<int32_t>((inSz.width - cropSz.width)/4), 969 2*static_cast<int32_t>((inSz.height - cropSz.height)/4), 970 static_cast<int32_t>(cropSz.width), 971 static_cast<int32_t>(cropSz.height) }; 972 973 if ((inputCrop.top < 0) || 974 (inputCrop.top >= static_cast<int32_t>(inSz.height)) || 975 (inputCrop.left < 0) || 976 (inputCrop.left >= static_cast<int32_t>(inSz.width)) || 977 (inputCrop.width <= 0) || 978 (inputCrop.width + inputCrop.left > static_cast<int32_t>(inSz.width)) || 979 (inputCrop.height <= 0) || 980 (inputCrop.height + inputCrop.top > static_cast<int32_t>(inSz.height))) 981 { 982 ALOGE("%s: came up with really wrong crop rectangle",__FUNCTION__); 983 ALOGE("%s: input layout %dx%d to for output size %dx%d", 984 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); 985 ALOGE("%s: computed input crop +%d,+%d %dx%d", 986 __FUNCTION__, inputCrop.left, inputCrop.top, 987 inputCrop.width, inputCrop.height); 988 return -1; 989 } 990 991 YCbCrLayout inputLayout; 992 ret = in->getCroppedLayout(inputCrop, &inputLayout); 993 if (ret != 0) { 994 ALOGE("%s: failed to crop input layout %dx%d to for output size %dx%d", 995 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); 996 ALOGE("%s: computed input crop +%d,+%d %dx%d", 997 __FUNCTION__, inputCrop.left, inputCrop.top, 998 inputCrop.width, inputCrop.height); 999 return ret; 1000 } 1001 ALOGV("%s: crop input layout %dx%d to for output size %dx%d", 1002 __FUNCTION__, inSz.width, inSz.height, outSz.width, outSz.height); 1003 ALOGV("%s: computed input crop +%d,+%d %dx%d", 1004 __FUNCTION__, inputCrop.left, inputCrop.top, 1005 inputCrop.width, inputCrop.height); 1006 1007 1008 // Scale 1009 YCbCrLayout outFullLayout; 1010 1011 ret = mYu12ThumbFrame->getLayout(&outFullLayout); 1012 if (ret != 0) { 1013 ALOGE("%s: failed to get output buffer layout", __FUNCTION__); 1014 return ret; 1015 } 1016 1017 1018 ret = libyuv::I420Scale( 1019 static_cast<uint8_t*>(inputLayout.y), 1020 inputLayout.yStride, 1021 static_cast<uint8_t*>(inputLayout.cb), 1022 inputLayout.cStride, 1023 static_cast<uint8_t*>(inputLayout.cr), 1024 inputLayout.cStride, 1025 inputCrop.width, 1026 inputCrop.height, 1027 static_cast<uint8_t*>(outFullLayout.y), 1028 outFullLayout.yStride, 1029 static_cast<uint8_t*>(outFullLayout.cb), 1030 outFullLayout.cStride, 1031 static_cast<uint8_t*>(outFullLayout.cr), 1032 outFullLayout.cStride, 1033 outSz.width, 1034 outSz.height, 1035 libyuv::FilterMode::kFilterNone); 1036 1037 if (ret != 0) { 1038 ALOGE("%s: failed to scale buffer from %dx%d to %dx%d. Ret %d", 1039 __FUNCTION__, inputCrop.width, inputCrop.height, 1040 outSz.width, outSz.height, ret); 1041 return ret; 1042 } 1043 1044 *out = outFullLayout; 1045 return 0; 1046} 1047 1048int ExternalCameraDeviceSession::OutputThread::formatConvertLocked( 1049 const YCbCrLayout& in, const YCbCrLayout& out, Size sz, uint32_t format) { 1050 int ret = 0; 1051 switch (format) { 1052 case V4L2_PIX_FMT_NV21: 1053 ret = libyuv::I420ToNV21( 1054 static_cast<uint8_t*>(in.y), 1055 in.yStride, 1056 static_cast<uint8_t*>(in.cb), 1057 in.cStride, 1058 static_cast<uint8_t*>(in.cr), 1059 in.cStride, 1060 static_cast<uint8_t*>(out.y), 1061 out.yStride, 1062 static_cast<uint8_t*>(out.cr), 1063 out.cStride, 1064 sz.width, 1065 sz.height); 1066 if (ret != 0) { 1067 ALOGE("%s: convert to NV21 buffer failed! ret %d", 1068 __FUNCTION__, ret); 1069 return ret; 1070 } 1071 break; 1072 case V4L2_PIX_FMT_NV12: 1073 ret = libyuv::I420ToNV12( 1074 static_cast<uint8_t*>(in.y), 1075 in.yStride, 1076 static_cast<uint8_t*>(in.cb), 1077 in.cStride, 1078 static_cast<uint8_t*>(in.cr), 1079 in.cStride, 1080 static_cast<uint8_t*>(out.y), 1081 out.yStride, 1082 static_cast<uint8_t*>(out.cb), 1083 out.cStride, 1084 sz.width, 1085 sz.height); 1086 if (ret != 0) { 1087 ALOGE("%s: convert to NV12 buffer failed! ret %d", 1088 __FUNCTION__, ret); 1089 return ret; 1090 } 1091 break; 1092 case V4L2_PIX_FMT_YVU420: // YV12 1093 case V4L2_PIX_FMT_YUV420: // YU12 1094 // TODO: maybe we can speed up here by somehow save this copy? 1095 ret = libyuv::I420Copy( 1096 static_cast<uint8_t*>(in.y), 1097 in.yStride, 1098 static_cast<uint8_t*>(in.cb), 1099 in.cStride, 1100 static_cast<uint8_t*>(in.cr), 1101 in.cStride, 1102 static_cast<uint8_t*>(out.y), 1103 out.yStride, 1104 static_cast<uint8_t*>(out.cb), 1105 out.cStride, 1106 static_cast<uint8_t*>(out.cr), 1107 out.cStride, 1108 sz.width, 1109 sz.height); 1110 if (ret != 0) { 1111 ALOGE("%s: copy to YV12 or YU12 buffer failed! ret %d", 1112 __FUNCTION__, ret); 1113 return ret; 1114 } 1115 break; 1116 case FLEX_YUV_GENERIC: 1117 // TODO: b/72261744 write to arbitrary flexible YUV layout. Slow. 1118 ALOGE("%s: unsupported flexible yuv layout" 1119 " y %p cb %p cr %p y_str %d c_str %d c_step %d", 1120 __FUNCTION__, out.y, out.cb, out.cr, 1121 out.yStride, out.cStride, out.chromaStep); 1122 return -1; 1123 default: 1124 ALOGE("%s: unknown YUV format 0x%x!", __FUNCTION__, format); 1125 return -1; 1126 } 1127 return 0; 1128} 1129 1130int ExternalCameraDeviceSession::OutputThread::encodeJpegYU12( 1131 const Size & inSz, const YCbCrLayout& inLayout, 1132 int jpegQuality, const void *app1Buffer, size_t app1Size, 1133 void *out, const size_t maxOutSize, size_t &actualCodeSize) 1134{ 1135 /* libjpeg is a C library so we use C-style "inheritance" by 1136 * putting libjpeg's jpeg_destination_mgr first in our custom 1137 * struct. This allows us to cast jpeg_destination_mgr* to 1138 * CustomJpegDestMgr* when we get it passed to us in a callback */ 1139 struct CustomJpegDestMgr { 1140 struct jpeg_destination_mgr mgr; 1141 JOCTET *mBuffer; 1142 size_t mBufferSize; 1143 size_t mEncodedSize; 1144 bool mSuccess; 1145 } dmgr; 1146 1147 jpeg_compress_struct cinfo = {}; 1148 jpeg_error_mgr jerr; 1149 1150 /* Initialize error handling with standard callbacks, but 1151 * then override output_message (to print to ALOG) and 1152 * error_exit to set a flag and print a message instead 1153 * of killing the whole process */ 1154 cinfo.err = jpeg_std_error(&jerr); 1155 1156 cinfo.err->output_message = [](j_common_ptr cinfo) { 1157 char buffer[JMSG_LENGTH_MAX]; 1158 1159 /* Create the message */ 1160 (*cinfo->err->format_message)(cinfo, buffer); 1161 ALOGE("libjpeg error: %s", buffer); 1162 }; 1163 cinfo.err->error_exit = [](j_common_ptr cinfo) { 1164 (*cinfo->err->output_message)(cinfo); 1165 if(cinfo->client_data) { 1166 auto & dmgr = 1167 *reinterpret_cast<CustomJpegDestMgr*>(cinfo->client_data); 1168 dmgr.mSuccess = false; 1169 } 1170 }; 1171 /* Now that we initialized some callbacks, let's create our compressor */ 1172 jpeg_create_compress(&cinfo); 1173 1174 /* Initialize our destination manager */ 1175 dmgr.mBuffer = static_cast<JOCTET*>(out); 1176 dmgr.mBufferSize = maxOutSize; 1177 dmgr.mEncodedSize = 0; 1178 dmgr.mSuccess = true; 1179 cinfo.client_data = static_cast<void*>(&dmgr); 1180 1181 /* These lambdas become C-style function pointers and as per C++11 spec 1182 * may not capture anything */ 1183 dmgr.mgr.init_destination = [](j_compress_ptr cinfo) { 1184 auto & dmgr = reinterpret_cast<CustomJpegDestMgr&>(*cinfo->dest); 1185 dmgr.mgr.next_output_byte = dmgr.mBuffer; 1186 dmgr.mgr.free_in_buffer = dmgr.mBufferSize; 1187 ALOGV("%s:%d jpeg start: %p [%zu]", 1188 __FUNCTION__, __LINE__, dmgr.mBuffer, dmgr.mBufferSize); 1189 }; 1190 1191 dmgr.mgr.empty_output_buffer = [](j_compress_ptr cinfo __unused) { 1192 ALOGV("%s:%d Out of buffer", __FUNCTION__, __LINE__); 1193 return 0; 1194 }; 1195 1196 dmgr.mgr.term_destination = [](j_compress_ptr cinfo) { 1197 auto & dmgr = reinterpret_cast<CustomJpegDestMgr&>(*cinfo->dest); 1198 dmgr.mEncodedSize = dmgr.mBufferSize - dmgr.mgr.free_in_buffer; 1199 ALOGV("%s:%d Done with jpeg: %zu", __FUNCTION__, __LINE__, dmgr.mEncodedSize); 1200 }; 1201 cinfo.dest = reinterpret_cast<struct jpeg_destination_mgr*>(&dmgr); 1202 1203 /* We are going to be using JPEG in raw data mode, so we are passing 1204 * straight subsampled planar YCbCr and it will not touch our pixel 1205 * data or do any scaling or anything */ 1206 cinfo.image_width = inSz.width; 1207 cinfo.image_height = inSz.height; 1208 cinfo.input_components = 3; 1209 cinfo.in_color_space = JCS_YCbCr; 1210 1211 /* Initialize defaults and then override what we want */ 1212 jpeg_set_defaults(&cinfo); 1213 1214 jpeg_set_quality(&cinfo, jpegQuality, 1); 1215 jpeg_set_colorspace(&cinfo, JCS_YCbCr); 1216 cinfo.raw_data_in = 1; 1217 cinfo.dct_method = JDCT_IFAST; 1218 1219 /* Configure sampling factors. The sampling factor is JPEG subsampling 420 1220 * because the source format is YUV420. Note that libjpeg sampling factors 1221 * are... a little weird. Sampling of Y=2,U=1,V=1 means there is 1 U and 1222 * 1 V value for each 2 Y values */ 1223 cinfo.comp_info[0].h_samp_factor = 2; 1224 cinfo.comp_info[0].v_samp_factor = 2; 1225 cinfo.comp_info[1].h_samp_factor = 1; 1226 cinfo.comp_info[1].v_samp_factor = 1; 1227 cinfo.comp_info[2].h_samp_factor = 1; 1228 cinfo.comp_info[2].v_samp_factor = 1; 1229 1230 /* Let's not hardcode YUV420 in 6 places... 5 was enough */ 1231 int maxVSampFactor = std::max( { 1232 cinfo.comp_info[0].v_samp_factor, 1233 cinfo.comp_info[1].v_samp_factor, 1234 cinfo.comp_info[2].v_samp_factor 1235 }); 1236 int cVSubSampling = cinfo.comp_info[0].v_samp_factor / 1237 cinfo.comp_info[1].v_samp_factor; 1238 1239 /* Start the compressor */ 1240 jpeg_start_compress(&cinfo, TRUE); 1241 1242 /* Compute our macroblock height, so we can pad our input to be vertically 1243 * macroblock aligned. 1244 * TODO: Does it need to be horizontally MCU aligned too? */ 1245 1246 size_t mcuV = DCTSIZE*maxVSampFactor; 1247 size_t paddedHeight = mcuV * ((inSz.height + mcuV - 1) / mcuV); 1248 1249 /* libjpeg uses arrays of row pointers, which makes it really easy to pad 1250 * data vertically (unfortunately doesn't help horizontally) */ 1251 std::vector<JSAMPROW> yLines (paddedHeight); 1252 std::vector<JSAMPROW> cbLines(paddedHeight/cVSubSampling); 1253 std::vector<JSAMPROW> crLines(paddedHeight/cVSubSampling); 1254 1255 uint8_t *py = static_cast<uint8_t*>(inLayout.y); 1256 uint8_t *pcr = static_cast<uint8_t*>(inLayout.cr); 1257 uint8_t *pcb = static_cast<uint8_t*>(inLayout.cb); 1258 1259 for(uint32_t i = 0; i < paddedHeight; i++) 1260 { 1261 /* Once we are in the padding territory we still point to the last line 1262 * effectively replicating it several times ~ CLAMP_TO_EDGE */ 1263 int li = std::min(i, inSz.height - 1); 1264 yLines[i] = static_cast<JSAMPROW>(py + li * inLayout.yStride); 1265 if(i < paddedHeight / cVSubSampling) 1266 { 1267 crLines[i] = static_cast<JSAMPROW>(pcr + li * inLayout.cStride); 1268 cbLines[i] = static_cast<JSAMPROW>(pcb + li * inLayout.cStride); 1269 } 1270 } 1271 1272 /* If APP1 data was passed in, use it */ 1273 if(app1Buffer && app1Size) 1274 { 1275 jpeg_write_marker(&cinfo, JPEG_APP0 + 1, 1276 static_cast<const JOCTET*>(app1Buffer), app1Size); 1277 } 1278 1279 /* While we still have padded height left to go, keep giving it one 1280 * macroblock at a time. */ 1281 while (cinfo.next_scanline < cinfo.image_height) { 1282 const uint32_t batchSize = DCTSIZE * maxVSampFactor; 1283 const uint32_t nl = cinfo.next_scanline; 1284 JSAMPARRAY planes[3]{ &yLines[nl], 1285 &cbLines[nl/cVSubSampling], 1286 &crLines[nl/cVSubSampling] }; 1287 1288 uint32_t done = jpeg_write_raw_data(&cinfo, planes, batchSize); 1289 1290 if (done != batchSize) { 1291 ALOGE("%s: compressed %u lines, expected %u (total %u/%u)", 1292 __FUNCTION__, done, batchSize, cinfo.next_scanline, 1293 cinfo.image_height); 1294 return -1; 1295 } 1296 } 1297 1298 /* This will flush everything */ 1299 jpeg_finish_compress(&cinfo); 1300 1301 /* Grab the actual code size and set it */ 1302 actualCodeSize = dmgr.mEncodedSize; 1303 1304 return 0; 1305} 1306 1307/* 1308 * TODO: There needs to be a mechanism to discover allocated buffer size 1309 * in the HAL. 1310 * 1311 * This is very fragile because it is duplicated computation from: 1312 * frameworks/av/services/camera/libcameraservice/device3/Camera3Device.cpp 1313 * 1314 */ 1315 1316/* This assumes mSupportedFormats have all been declared as supporting 1317 * HAL_PIXEL_FORMAT_BLOB to the framework */ 1318Size ExternalCameraDeviceSession::getMaxJpegResolution() const { 1319 Size ret { 0, 0 }; 1320 for(auto & fmt : mSupportedFormats) { 1321 if(fmt.width * fmt.height > ret.width * ret.height) { 1322 ret = Size { fmt.width, fmt.height }; 1323 } 1324 } 1325 return ret; 1326} 1327 1328Size ExternalCameraDeviceSession::getMaxThumbResolution() const { 1329 Size thumbSize { 0, 0 }; 1330 camera_metadata_ro_entry entry = 1331 mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); 1332 for(uint32_t i = 0; i < entry.count; i += 2) { 1333 Size sz { static_cast<uint32_t>(entry.data.i32[i]), 1334 static_cast<uint32_t>(entry.data.i32[i+1]) }; 1335 if(sz.width * sz.height > thumbSize.width * thumbSize.height) { 1336 thumbSize = sz; 1337 } 1338 } 1339 1340 if (thumbSize.width * thumbSize.height == 0) { 1341 ALOGW("%s: non-zero thumbnail size not available", __FUNCTION__); 1342 } 1343 1344 return thumbSize; 1345} 1346 1347 1348ssize_t ExternalCameraDeviceSession::getJpegBufferSize( 1349 uint32_t width, uint32_t height) const { 1350 // Constant from camera3.h 1351 const ssize_t kMinJpegBufferSize = 256 * 1024 + sizeof(CameraBlob); 1352 // Get max jpeg size (area-wise). 1353 if (mMaxJpegResolution.width == 0) { 1354 ALOGE("%s: Do not have a single supported JPEG stream", 1355 __FUNCTION__); 1356 return BAD_VALUE; 1357 } 1358 1359 // Get max jpeg buffer size 1360 ssize_t maxJpegBufferSize = 0; 1361 camera_metadata_ro_entry jpegBufMaxSize = 1362 mCameraCharacteristics.find(ANDROID_JPEG_MAX_SIZE); 1363 if (jpegBufMaxSize.count == 0) { 1364 ALOGE("%s: Can't find maximum JPEG size in static metadata!", 1365 __FUNCTION__); 1366 return BAD_VALUE; 1367 } 1368 maxJpegBufferSize = jpegBufMaxSize.data.i32[0]; 1369 1370 if (maxJpegBufferSize <= kMinJpegBufferSize) { 1371 ALOGE("%s: ANDROID_JPEG_MAX_SIZE (%zd) <= kMinJpegBufferSize (%zd)", 1372 __FUNCTION__, maxJpegBufferSize, kMinJpegBufferSize); 1373 return BAD_VALUE; 1374 } 1375 1376 // Calculate final jpeg buffer size for the given resolution. 1377 float scaleFactor = ((float) (width * height)) / 1378 (mMaxJpegResolution.width * mMaxJpegResolution.height); 1379 ssize_t jpegBufferSize = scaleFactor * (maxJpegBufferSize - kMinJpegBufferSize) + 1380 kMinJpegBufferSize; 1381 if (jpegBufferSize > maxJpegBufferSize) { 1382 jpegBufferSize = maxJpegBufferSize; 1383 } 1384 1385 return jpegBufferSize; 1386} 1387 1388int ExternalCameraDeviceSession::OutputThread::createJpegLocked( 1389 HalStreamBuffer &halBuf, 1390 HalRequest &req) 1391{ 1392 int ret; 1393 auto lfail = [&](auto... args) { 1394 ALOGE(args...); 1395 1396 return 1; 1397 }; 1398 auto parent = mParent.promote(); 1399 if (parent == nullptr) { 1400 ALOGE("%s: session has been disconnected!", __FUNCTION__); 1401 return 1; 1402 } 1403 1404 ALOGV("%s: HAL buffer sid: %d bid: %" PRIu64 " w: %u h: %u", 1405 __FUNCTION__, halBuf.streamId, static_cast<uint64_t>(halBuf.bufferId), 1406 halBuf.width, halBuf.height); 1407 ALOGV("%s: HAL buffer fmt: %x usage: %" PRIx64 " ptr: %p", 1408 __FUNCTION__, halBuf.format, static_cast<uint64_t>(halBuf.usage), 1409 halBuf.bufPtr); 1410 ALOGV("%s: YV12 buffer %d x %d", 1411 __FUNCTION__, 1412 mYu12Frame->mWidth, mYu12Frame->mHeight); 1413 1414 int jpegQuality, thumbQuality; 1415 Size thumbSize; 1416 1417 if (req.setting.exists(ANDROID_JPEG_QUALITY)) { 1418 camera_metadata_entry entry = 1419 req.setting.find(ANDROID_JPEG_QUALITY); 1420 jpegQuality = entry.data.u8[0]; 1421 } else { 1422 return lfail("%s: ANDROID_JPEG_QUALITY not set",__FUNCTION__); 1423 } 1424 1425 if (req.setting.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) { 1426 camera_metadata_entry entry = 1427 req.setting.find(ANDROID_JPEG_THUMBNAIL_QUALITY); 1428 thumbQuality = entry.data.u8[0]; 1429 } else { 1430 return lfail( 1431 "%s: ANDROID_JPEG_THUMBNAIL_QUALITY not set", 1432 __FUNCTION__); 1433 } 1434 1435 if (req.setting.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) { 1436 camera_metadata_entry entry = 1437 req.setting.find(ANDROID_JPEG_THUMBNAIL_SIZE); 1438 thumbSize = Size { static_cast<uint32_t>(entry.data.i32[0]), 1439 static_cast<uint32_t>(entry.data.i32[1]) 1440 }; 1441 } else { 1442 return lfail( 1443 "%s: ANDROID_JPEG_THUMBNAIL_SIZE not set", __FUNCTION__); 1444 } 1445 1446 /* Cropped and scaled YU12 buffer for main and thumbnail */ 1447 YCbCrLayout yu12Main; 1448 Size jpegSize { halBuf.width, halBuf.height }; 1449 1450 /* Compute temporary buffer sizes accounting for the following: 1451 * thumbnail can't exceed APP1 size of 64K 1452 * main image needs to hold APP1, headers, and at most a poorly 1453 * compressed image */ 1454 const ssize_t maxThumbCodeSize = 64 * 1024; 1455 const ssize_t maxJpegCodeSize = parent->getJpegBufferSize(jpegSize.width, 1456 jpegSize.height); 1457 1458 /* Check that getJpegBufferSize did not return an error */ 1459 if (maxJpegCodeSize < 0) { 1460 return lfail( 1461 "%s: getJpegBufferSize returned %zd",__FUNCTION__,maxJpegCodeSize); 1462 } 1463 1464 1465 /* Hold actual thumbnail and main image code sizes */ 1466 size_t thumbCodeSize = 0, jpegCodeSize = 0; 1467 /* Temporary thumbnail code buffer */ 1468 std::vector<uint8_t> thumbCode(maxThumbCodeSize); 1469 1470 YCbCrLayout yu12Thumb; 1471 ret = cropAndScaleThumbLocked(mYu12Frame, thumbSize, &yu12Thumb); 1472 1473 if (ret != 0) { 1474 return lfail( 1475 "%s: crop and scale thumbnail failed!", __FUNCTION__); 1476 } 1477 1478 /* Scale and crop main jpeg */ 1479 ret = cropAndScaleLocked(mYu12Frame, jpegSize, &yu12Main); 1480 1481 if (ret != 0) { 1482 return lfail("%s: crop and scale main failed!", __FUNCTION__); 1483 } 1484 1485 /* Encode the thumbnail image */ 1486 ret = encodeJpegYU12(thumbSize, yu12Thumb, 1487 thumbQuality, 0, 0, 1488 &thumbCode[0], maxThumbCodeSize, thumbCodeSize); 1489 1490 if (ret != 0) { 1491 return lfail("%s: encodeJpegYU12 failed with %d",__FUNCTION__, ret); 1492 } 1493 1494 /* Combine camera characteristics with request settings to form EXIF 1495 * metadata */ 1496 common::V1_0::helper::CameraMetadata meta(parent->mCameraCharacteristics); 1497 meta.append(req.setting); 1498 1499 /* Generate EXIF object */ 1500 std::unique_ptr<ExifUtils> utils(ExifUtils::create()); 1501 /* Make sure it's initialized */ 1502 utils->initialize(); 1503 1504 utils->setFromMetadata(meta, jpegSize.width, jpegSize.height); 1505 1506 /* Check if we made a non-zero-sized thumbnail. Currently not possible 1507 * that we got this far and the code is size 0, but if this code moves 1508 * around it might become relevant again */ 1509 1510 ret = utils->generateApp1(thumbCodeSize ? &thumbCode[0] : 0, thumbCodeSize); 1511 1512 if (!ret) { 1513 return lfail("%s: generating APP1 failed", __FUNCTION__); 1514 } 1515 1516 /* Get internal buffer */ 1517 size_t exifDataSize = utils->getApp1Length(); 1518 const uint8_t* exifData = utils->getApp1Buffer(); 1519 1520 /* Lock the HAL jpeg code buffer */ 1521 void *bufPtr = sHandleImporter.lock( 1522 *(halBuf.bufPtr), halBuf.usage, maxJpegCodeSize); 1523 1524 if (!bufPtr) { 1525 return lfail("%s: could not lock %zu bytes", __FUNCTION__, maxJpegCodeSize); 1526 } 1527 1528 /* Encode the main jpeg image */ 1529 ret = encodeJpegYU12(jpegSize, yu12Main, 1530 jpegQuality, exifData, exifDataSize, 1531 bufPtr, maxJpegCodeSize, jpegCodeSize); 1532 1533 /* TODO: Not sure this belongs here, maybe better to pass jpegCodeSize out 1534 * and do this when returning buffer to parent */ 1535 CameraBlob blob { CameraBlobId::JPEG, static_cast<uint32_t>(jpegCodeSize) }; 1536 void *blobDst = 1537 reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(bufPtr) + 1538 maxJpegCodeSize - 1539 sizeof(CameraBlob)); 1540 memcpy(blobDst, &blob, sizeof(CameraBlob)); 1541 1542 /* Unlock the HAL jpeg code buffer */ 1543 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); 1544 if (relFence > 0) { 1545 halBuf.acquireFence = relFence; 1546 } 1547 1548 /* Check if our JPEG actually succeeded */ 1549 if (ret != 0) { 1550 return lfail( 1551 "%s: encodeJpegYU12 failed with %d",__FUNCTION__, ret); 1552 } 1553 1554 ALOGV("%s: encoded JPEG (ret:%d) with Q:%d max size: %zu", 1555 __FUNCTION__, ret, jpegQuality, maxJpegCodeSize); 1556 1557 return 0; 1558} 1559 1560bool ExternalCameraDeviceSession::OutputThread::threadLoop() { 1561 HalRequest req; 1562 auto parent = mParent.promote(); 1563 if (parent == nullptr) { 1564 ALOGE("%s: session has been disconnected!", __FUNCTION__); 1565 return false; 1566 } 1567 1568 // TODO: maybe we need to setup a sensor thread to dq/enq v4l frames 1569 // regularly to prevent v4l buffer queue filled with stale buffers 1570 // when app doesn't program a preveiw request 1571 waitForNextRequest(&req); 1572 if (req.frameIn == nullptr) { 1573 // No new request, wait again 1574 return true; 1575 } 1576 1577 auto onDeviceError = [&](auto... args) { 1578 ALOGE(args...); 1579 parent->notifyError( 1580 req.frameNumber, /*stream*/-1, ErrorCode::ERROR_DEVICE); 1581 signalRequestDone(); 1582 return false; 1583 }; 1584 1585 if (req.frameIn->mFourcc != V4L2_PIX_FMT_MJPEG) { 1586 return onDeviceError("%s: do not support V4L2 format %c%c%c%c", __FUNCTION__, 1587 req.frameIn->mFourcc & 0xFF, 1588 (req.frameIn->mFourcc >> 8) & 0xFF, 1589 (req.frameIn->mFourcc >> 16) & 0xFF, 1590 (req.frameIn->mFourcc >> 24) & 0xFF); 1591 } 1592 1593 std::unique_lock<std::mutex> lk(mBufferLock); 1594 // Convert input V4L2 frame to YU12 of the same size 1595 // TODO: see if we can save some computation by converting to YV12 here 1596 uint8_t* inData; 1597 size_t inDataSize; 1598 req.frameIn->map(&inData, &inDataSize); 1599 // TODO: profile 1600 // TODO: in some special case maybe we can decode jpg directly to gralloc output? 1601 int res = libyuv::MJPGToI420( 1602 inData, inDataSize, 1603 static_cast<uint8_t*>(mYu12FrameLayout.y), 1604 mYu12FrameLayout.yStride, 1605 static_cast<uint8_t*>(mYu12FrameLayout.cb), 1606 mYu12FrameLayout.cStride, 1607 static_cast<uint8_t*>(mYu12FrameLayout.cr), 1608 mYu12FrameLayout.cStride, 1609 mYu12Frame->mWidth, mYu12Frame->mHeight, 1610 mYu12Frame->mWidth, mYu12Frame->mHeight); 1611 1612 if (res != 0) { 1613 // For some webcam, the first few V4L2 frames might be malformed... 1614 ALOGE("%s: Convert V4L2 frame to YU12 failed! res %d", __FUNCTION__, res); 1615 lk.unlock(); 1616 Status st = parent->processCaptureRequestError(req); 1617 if (st != Status::OK) { 1618 return onDeviceError("%s: failed to process capture request error!", __FUNCTION__); 1619 } 1620 signalRequestDone(); 1621 return true; 1622 } 1623 1624 ALOGV("%s processing new request", __FUNCTION__); 1625 const int kSyncWaitTimeoutMs = 500; 1626 for (auto& halBuf : req.buffers) { 1627 if (halBuf.acquireFence != -1) { 1628 int ret = sync_wait(halBuf.acquireFence, kSyncWaitTimeoutMs); 1629 if (ret) { 1630 halBuf.fenceTimeout = true; 1631 } else { 1632 ::close(halBuf.acquireFence); 1633 halBuf.acquireFence = -1; 1634 } 1635 } 1636 1637 if (halBuf.fenceTimeout) { 1638 continue; 1639 } 1640 1641 // Gralloc lockYCbCr the buffer 1642 switch (halBuf.format) { 1643 case PixelFormat::BLOB: { 1644 int ret = createJpegLocked(halBuf, req); 1645 1646 if(ret != 0) { 1647 lk.unlock(); 1648 return onDeviceError("%s: createJpegLocked failed with %d", 1649 __FUNCTION__, ret); 1650 } 1651 } break; 1652 case PixelFormat::YCBCR_420_888: 1653 case PixelFormat::YV12: { 1654 IMapper::Rect outRect {0, 0, 1655 static_cast<int32_t>(halBuf.width), 1656 static_cast<int32_t>(halBuf.height)}; 1657 YCbCrLayout outLayout = sHandleImporter.lockYCbCr( 1658 *(halBuf.bufPtr), halBuf.usage, outRect); 1659 ALOGV("%s: outLayout y %p cb %p cr %p y_str %d c_str %d c_step %d", 1660 __FUNCTION__, outLayout.y, outLayout.cb, outLayout.cr, 1661 outLayout.yStride, outLayout.cStride, outLayout.chromaStep); 1662 1663 // Convert to output buffer size/format 1664 uint32_t outputFourcc = getFourCcFromLayout(outLayout); 1665 ALOGV("%s: converting to format %c%c%c%c", __FUNCTION__, 1666 outputFourcc & 0xFF, 1667 (outputFourcc >> 8) & 0xFF, 1668 (outputFourcc >> 16) & 0xFF, 1669 (outputFourcc >> 24) & 0xFF); 1670 1671 YCbCrLayout cropAndScaled; 1672 int ret = cropAndScaleLocked( 1673 mYu12Frame, 1674 Size { halBuf.width, halBuf.height }, 1675 &cropAndScaled); 1676 if (ret != 0) { 1677 lk.unlock(); 1678 return onDeviceError("%s: crop and scale failed!", __FUNCTION__); 1679 } 1680 1681 Size sz {halBuf.width, halBuf.height}; 1682 ret = formatConvertLocked(cropAndScaled, outLayout, sz, outputFourcc); 1683 if (ret != 0) { 1684 lk.unlock(); 1685 return onDeviceError("%s: format coversion failed!", __FUNCTION__); 1686 } 1687 int relFence = sHandleImporter.unlock(*(halBuf.bufPtr)); 1688 if (relFence > 0) { 1689 halBuf.acquireFence = relFence; 1690 } 1691 } break; 1692 default: 1693 lk.unlock(); 1694 return onDeviceError("%s: unknown output format %x", __FUNCTION__, halBuf.format); 1695 } 1696 } // for each buffer 1697 mScaledYu12Frames.clear(); 1698 1699 // Don't hold the lock while calling back to parent 1700 lk.unlock(); 1701 Status st = parent->processCaptureResult(req); 1702 if (st != Status::OK) { 1703 return onDeviceError("%s: failed to process capture result!", __FUNCTION__); 1704 } 1705 signalRequestDone(); 1706 return true; 1707} 1708 1709Status ExternalCameraDeviceSession::OutputThread::allocateIntermediateBuffers( 1710 const Size& v4lSize, const Size& thumbSize, 1711 const hidl_vec<Stream>& streams) { 1712 std::lock_guard<std::mutex> lk(mBufferLock); 1713 if (mScaledYu12Frames.size() != 0) { 1714 ALOGE("%s: intermediate buffer pool has %zu inflight buffers! (expect 0)", 1715 __FUNCTION__, mScaledYu12Frames.size()); 1716 return Status::INTERNAL_ERROR; 1717 } 1718 1719 // Allocating intermediate YU12 frame 1720 if (mYu12Frame == nullptr || mYu12Frame->mWidth != v4lSize.width || 1721 mYu12Frame->mHeight != v4lSize.height) { 1722 mYu12Frame.clear(); 1723 mYu12Frame = new AllocatedFrame(v4lSize.width, v4lSize.height); 1724 int ret = mYu12Frame->allocate(&mYu12FrameLayout); 1725 if (ret != 0) { 1726 ALOGE("%s: allocating YU12 frame failed!", __FUNCTION__); 1727 return Status::INTERNAL_ERROR; 1728 } 1729 } 1730 1731 // Allocating intermediate YU12 thumbnail frame 1732 if (mYu12ThumbFrame == nullptr || 1733 mYu12ThumbFrame->mWidth != thumbSize.width || 1734 mYu12ThumbFrame->mHeight != thumbSize.height) { 1735 mYu12ThumbFrame.clear(); 1736 mYu12ThumbFrame = new AllocatedFrame(thumbSize.width, thumbSize.height); 1737 int ret = mYu12ThumbFrame->allocate(&mYu12ThumbFrameLayout); 1738 if (ret != 0) { 1739 ALOGE("%s: allocating YU12 thumb frame failed!", __FUNCTION__); 1740 return Status::INTERNAL_ERROR; 1741 } 1742 } 1743 1744 // Allocating scaled buffers 1745 for (const auto& stream : streams) { 1746 Size sz = {stream.width, stream.height}; 1747 if (sz == v4lSize) { 1748 continue; // Don't need an intermediate buffer same size as v4lBuffer 1749 } 1750 if (mIntermediateBuffers.count(sz) == 0) { 1751 // Create new intermediate buffer 1752 sp<AllocatedFrame> buf = new AllocatedFrame(stream.width, stream.height); 1753 int ret = buf->allocate(); 1754 if (ret != 0) { 1755 ALOGE("%s: allocating intermediate YU12 frame %dx%d failed!", 1756 __FUNCTION__, stream.width, stream.height); 1757 return Status::INTERNAL_ERROR; 1758 } 1759 mIntermediateBuffers[sz] = buf; 1760 } 1761 } 1762 1763 // Remove unconfigured buffers 1764 auto it = mIntermediateBuffers.begin(); 1765 while (it != mIntermediateBuffers.end()) { 1766 bool configured = false; 1767 auto sz = it->first; 1768 for (const auto& stream : streams) { 1769 if (stream.width == sz.width && stream.height == sz.height) { 1770 configured = true; 1771 break; 1772 } 1773 } 1774 if (configured) { 1775 it++; 1776 } else { 1777 it = mIntermediateBuffers.erase(it); 1778 } 1779 } 1780 return Status::OK; 1781} 1782 1783Status ExternalCameraDeviceSession::OutputThread::submitRequest(const HalRequest& req) { 1784 std::unique_lock<std::mutex> lk(mRequestListLock); 1785 // TODO: reduce object copy in this path 1786 mRequestList.push_back(req); 1787 lk.unlock(); 1788 mRequestCond.notify_one(); 1789 return Status::OK; 1790} 1791 1792void ExternalCameraDeviceSession::OutputThread::flush() { 1793 auto parent = mParent.promote(); 1794 if (parent == nullptr) { 1795 ALOGE("%s: session has been disconnected!", __FUNCTION__); 1796 return; 1797 } 1798 1799 std::unique_lock<std::mutex> lk(mRequestListLock); 1800 std::list<HalRequest> reqs = mRequestList; 1801 mRequestList.clear(); 1802 if (mProcessingRequest) { 1803 std::chrono::seconds timeout = std::chrono::seconds(kFlushWaitTimeoutSec); 1804 auto st = mRequestDoneCond.wait_for(lk, timeout); 1805 if (st == std::cv_status::timeout) { 1806 ALOGE("%s: wait for inflight request finish timeout!", __FUNCTION__); 1807 } 1808 } 1809 1810 lk.unlock(); 1811 for (const auto& req : reqs) { 1812 parent->processCaptureRequestError(req); 1813 } 1814} 1815 1816void ExternalCameraDeviceSession::OutputThread::waitForNextRequest(HalRequest* out) { 1817 if (out == nullptr) { 1818 ALOGE("%s: out is null", __FUNCTION__); 1819 return; 1820 } 1821 1822 std::unique_lock<std::mutex> lk(mRequestListLock); 1823 int waitTimes = 0; 1824 while (mRequestList.empty()) { 1825 if (exitPending()) { 1826 return; 1827 } 1828 std::chrono::milliseconds timeout = std::chrono::milliseconds(kReqWaitTimeoutMs); 1829 auto st = mRequestCond.wait_for(lk, timeout); 1830 if (st == std::cv_status::timeout) { 1831 waitTimes++; 1832 if (waitTimes == kReqWaitTimesMax) { 1833 // no new request, return 1834 return; 1835 } 1836 } 1837 } 1838 *out = mRequestList.front(); 1839 mRequestList.pop_front(); 1840 mProcessingRequest = true; 1841 mProcessingFrameNumer = out->frameNumber; 1842} 1843 1844void ExternalCameraDeviceSession::OutputThread::signalRequestDone() { 1845 std::unique_lock<std::mutex> lk(mRequestListLock); 1846 mProcessingRequest = false; 1847 mProcessingFrameNumer = 0; 1848 lk.unlock(); 1849 mRequestDoneCond.notify_one(); 1850} 1851 1852void ExternalCameraDeviceSession::OutputThread::dump(int fd) { 1853 std::lock_guard<std::mutex> lk(mRequestListLock); 1854 if (mProcessingRequest) { 1855 dprintf(fd, "OutputThread processing frame %d\n", mProcessingFrameNumer); 1856 } else { 1857 dprintf(fd, "OutputThread not processing any frames\n"); 1858 } 1859 dprintf(fd, "OutputThread request list contains frame: "); 1860 for (const auto& req : mRequestList) { 1861 dprintf(fd, "%d, ", req.frameNumber); 1862 } 1863 dprintf(fd, "\n"); 1864} 1865 1866void ExternalCameraDeviceSession::cleanupBuffersLocked(int id) { 1867 for (auto& pair : mCirculatingBuffers.at(id)) { 1868 sHandleImporter.freeBuffer(pair.second); 1869 } 1870 mCirculatingBuffers[id].clear(); 1871 mCirculatingBuffers.erase(id); 1872} 1873 1874void ExternalCameraDeviceSession::updateBufferCaches(const hidl_vec<BufferCache>& cachesToRemove) { 1875 Mutex::Autolock _l(mLock); 1876 for (auto& cache : cachesToRemove) { 1877 auto cbsIt = mCirculatingBuffers.find(cache.streamId); 1878 if (cbsIt == mCirculatingBuffers.end()) { 1879 // The stream could have been removed 1880 continue; 1881 } 1882 CirculatingBuffers& cbs = cbsIt->second; 1883 auto it = cbs.find(cache.bufferId); 1884 if (it != cbs.end()) { 1885 sHandleImporter.freeBuffer(it->second); 1886 cbs.erase(it); 1887 } else { 1888 ALOGE("%s: stream %d buffer %" PRIu64 " is not cached", 1889 __FUNCTION__, cache.streamId, cache.bufferId); 1890 } 1891 } 1892} 1893 1894bool ExternalCameraDeviceSession::isSupported(const Stream& stream) { 1895 int32_t ds = static_cast<int32_t>(stream.dataSpace); 1896 PixelFormat fmt = stream.format; 1897 uint32_t width = stream.width; 1898 uint32_t height = stream.height; 1899 // TODO: check usage flags 1900 1901 if (stream.streamType != StreamType::OUTPUT) { 1902 ALOGE("%s: does not support non-output stream type", __FUNCTION__); 1903 return false; 1904 } 1905 1906 if (stream.rotation != StreamRotation::ROTATION_0) { 1907 ALOGE("%s: does not support stream rotation", __FUNCTION__); 1908 return false; 1909 } 1910 1911 if (ds & Dataspace::DEPTH) { 1912 ALOGI("%s: does not support depth output", __FUNCTION__); 1913 return false; 1914 } 1915 1916 switch (fmt) { 1917 case PixelFormat::BLOB: 1918 if (ds != static_cast<int32_t>(Dataspace::V0_JFIF)) { 1919 ALOGI("%s: BLOB format does not support dataSpace %x", __FUNCTION__, ds); 1920 return false; 1921 } 1922 case PixelFormat::IMPLEMENTATION_DEFINED: 1923 case PixelFormat::YCBCR_420_888: 1924 case PixelFormat::YV12: 1925 // TODO: check what dataspace we can support here. 1926 // intentional no-ops. 1927 break; 1928 default: 1929 ALOGI("%s: does not support format %x", __FUNCTION__, fmt); 1930 return false; 1931 } 1932 1933 // Assume we can convert any V4L2 format to any of supported output format for now, i.e, 1934 // ignoring v4l2Fmt.fourcc for now. Might need more subtle check if we support more v4l format 1935 // in the futrue. 1936 for (const auto& v4l2Fmt : mSupportedFormats) { 1937 if (width == v4l2Fmt.width && height == v4l2Fmt.height) { 1938 return true; 1939 } 1940 } 1941 ALOGI("%s: resolution %dx%d is not supported", __FUNCTION__, width, height); 1942 return false; 1943} 1944 1945int ExternalCameraDeviceSession::v4l2StreamOffLocked() { 1946 if (!mV4l2Streaming) { 1947 return OK; 1948 } 1949 1950 { 1951 std::lock_guard<std::mutex> lk(mV4l2BufferLock); 1952 if (mNumDequeuedV4l2Buffers != 0) { 1953 ALOGE("%s: there are %zu inflight V4L buffers", 1954 __FUNCTION__, mNumDequeuedV4l2Buffers); 1955 return -1; 1956 } 1957 } 1958 mV4L2BufferCount = 0; 1959 1960 // VIDIOC_STREAMOFF 1961 v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1962 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMOFF, &capture_type)) < 0) { 1963 ALOGE("%s: STREAMOFF failed: %s", __FUNCTION__, strerror(errno)); 1964 return -errno; 1965 } 1966 1967 // VIDIOC_REQBUFS: clear buffers 1968 v4l2_requestbuffers req_buffers{}; 1969 req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1970 req_buffers.memory = V4L2_MEMORY_MMAP; 1971 req_buffers.count = 0; 1972 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) { 1973 ALOGE("%s: REQBUFS failed: %s", __FUNCTION__, strerror(errno)); 1974 return -errno; 1975 } 1976 1977 mV4l2Streaming = false; 1978 return OK; 1979} 1980 1981int ExternalCameraDeviceSession::configureV4l2StreamLocked(const SupportedV4L2Format& v4l2Fmt) { 1982 int ret = v4l2StreamOffLocked(); 1983 if (ret != OK) { 1984 ALOGE("%s: stop v4l2 streaming failed: ret %d", __FUNCTION__, ret); 1985 return ret; 1986 } 1987 1988 // VIDIOC_S_FMT w/h/fmt 1989 v4l2_format fmt; 1990 fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 1991 fmt.fmt.pix.width = v4l2Fmt.width; 1992 fmt.fmt.pix.height = v4l2Fmt.height; 1993 fmt.fmt.pix.pixelformat = v4l2Fmt.fourcc; 1994 ret = TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_FMT, &fmt)); 1995 if (ret < 0) { 1996 ALOGE("%s: S_FMT ioctl failed: %s", __FUNCTION__, strerror(errno)); 1997 return -errno; 1998 } 1999 2000 if (v4l2Fmt.width != fmt.fmt.pix.width || v4l2Fmt.height != fmt.fmt.pix.height || 2001 v4l2Fmt.fourcc != fmt.fmt.pix.pixelformat) { 2002 ALOGE("%s: S_FMT expect %c%c%c%c %dx%d, got %c%c%c%c %dx%d instead!", __FUNCTION__, 2003 v4l2Fmt.fourcc & 0xFF, 2004 (v4l2Fmt.fourcc >> 8) & 0xFF, 2005 (v4l2Fmt.fourcc >> 16) & 0xFF, 2006 (v4l2Fmt.fourcc >> 24) & 0xFF, 2007 v4l2Fmt.width, v4l2Fmt.height, 2008 fmt.fmt.pix.pixelformat & 0xFF, 2009 (fmt.fmt.pix.pixelformat >> 8) & 0xFF, 2010 (fmt.fmt.pix.pixelformat >> 16) & 0xFF, 2011 (fmt.fmt.pix.pixelformat >> 24) & 0xFF, 2012 fmt.fmt.pix.width, fmt.fmt.pix.height); 2013 return -EINVAL; 2014 } 2015 uint32_t bufferSize = fmt.fmt.pix.sizeimage; 2016 ALOGI("%s: V4L2 buffer size is %d", __FUNCTION__, bufferSize); 2017 2018 float maxFps = -1.f; 2019 float fps = 1000.f; 2020 const float kDefaultFps = 30.f; 2021 // Try to pick the slowest fps that is at least 30 2022 for (const auto& fr : v4l2Fmt.frameRates) { 2023 double f = fr.getDouble(); 2024 if (maxFps < f) { 2025 maxFps = f; 2026 } 2027 if (f >= kDefaultFps && f < fps) { 2028 fps = f; 2029 } 2030 } 2031 if (fps == 1000.f) { 2032 fps = maxFps; 2033 } 2034 2035 // VIDIOC_G_PARM/VIDIOC_S_PARM: set fps 2036 v4l2_streamparm streamparm = { .type = V4L2_BUF_TYPE_VIDEO_CAPTURE }; 2037 // The following line checks that the driver knows about framerate get/set. 2038 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_G_PARM, &streamparm)) >= 0) { 2039 // Now check if the device is able to accept a capture framerate set. 2040 if (streamparm.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) { 2041 // |frame_rate| is float, approximate by a fraction. 2042 const int kFrameRatePrecision = 10000; 2043 streamparm.parm.capture.timeperframe.numerator = kFrameRatePrecision; 2044 streamparm.parm.capture.timeperframe.denominator = 2045 (fps * kFrameRatePrecision); 2046 2047 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_S_PARM, &streamparm)) < 0) { 2048 ALOGE("%s: failed to set framerate to %f", __FUNCTION__, fps); 2049 return UNKNOWN_ERROR; 2050 } 2051 } 2052 } 2053 float retFps = streamparm.parm.capture.timeperframe.denominator / 2054 streamparm.parm.capture.timeperframe.numerator; 2055 if (std::fabs(fps - retFps) > std::numeric_limits<float>::epsilon()) { 2056 ALOGE("%s: expect fps %f, got %f instead", __FUNCTION__, fps, retFps); 2057 return BAD_VALUE; 2058 } 2059 2060 uint32_t v4lBufferCount = (fps >= kDefaultFps) ? 2061 mCfg.numVideoBuffers : mCfg.numStillBuffers; 2062 // VIDIOC_REQBUFS: create buffers 2063 v4l2_requestbuffers req_buffers{}; 2064 req_buffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 2065 req_buffers.memory = V4L2_MEMORY_MMAP; 2066 req_buffers.count = v4lBufferCount; 2067 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_REQBUFS, &req_buffers)) < 0) { 2068 ALOGE("%s: VIDIOC_REQBUFS failed: %s", __FUNCTION__, strerror(errno)); 2069 return -errno; 2070 } 2071 2072 // Driver can indeed return more buffer if it needs more to operate 2073 if (req_buffers.count < v4lBufferCount) { 2074 ALOGE("%s: VIDIOC_REQBUFS expected %d buffers, got %d instead", 2075 __FUNCTION__, v4lBufferCount, req_buffers.count); 2076 return NO_MEMORY; 2077 } 2078 2079 // VIDIOC_QUERYBUF: get buffer offset in the V4L2 fd 2080 // VIDIOC_QBUF: send buffer to driver 2081 mV4L2BufferCount = req_buffers.count; 2082 for (uint32_t i = 0; i < req_buffers.count; i++) { 2083 v4l2_buffer buffer = { 2084 .type = V4L2_BUF_TYPE_VIDEO_CAPTURE, 2085 .index = i, 2086 .memory = V4L2_MEMORY_MMAP}; 2087 2088 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QUERYBUF, &buffer)) < 0) { 2089 ALOGE("%s: QUERYBUF %d failed: %s", __FUNCTION__, i, strerror(errno)); 2090 return -errno; 2091 } 2092 2093 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { 2094 ALOGE("%s: QBUF %d failed: %s", __FUNCTION__, i, strerror(errno)); 2095 return -errno; 2096 } 2097 } 2098 2099 // VIDIOC_STREAMON: start streaming 2100 v4l2_buf_type capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 2101 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_STREAMON, &capture_type)) < 0) { 2102 ALOGE("%s: VIDIOC_STREAMON failed: %s", __FUNCTION__, strerror(errno)); 2103 return -errno; 2104 } 2105 2106 // Swallow first few frames after streamOn to account for bad frames from some devices 2107 for (int i = 0; i < kBadFramesAfterStreamOn; i++) { 2108 v4l2_buffer buffer{}; 2109 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 2110 buffer.memory = V4L2_MEMORY_MMAP; 2111 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) { 2112 ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno)); 2113 return -errno; 2114 } 2115 2116 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { 2117 ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, buffer.index, strerror(errno)); 2118 return -errno; 2119 } 2120 } 2121 2122 mV4l2StreamingFmt = v4l2Fmt; 2123 mV4l2Streaming = true; 2124 return OK; 2125} 2126 2127sp<V4L2Frame> ExternalCameraDeviceSession::dequeueV4l2FrameLocked(/*out*/nsecs_t* shutterTs) { 2128 sp<V4L2Frame> ret = nullptr; 2129 2130 if (shutterTs == nullptr) { 2131 ALOGE("%s: shutterTs must not be null!", __FUNCTION__); 2132 return ret; 2133 } 2134 2135 { 2136 std::unique_lock<std::mutex> lk(mV4l2BufferLock); 2137 if (mNumDequeuedV4l2Buffers == mV4L2BufferCount) { 2138 std::chrono::seconds timeout = std::chrono::seconds(kBufferWaitTimeoutSec); 2139 mLock.unlock(); 2140 auto st = mV4L2BufferReturned.wait_for(lk, timeout); 2141 // Here we introduce a case where mV4l2BufferLock is acquired before mLock, while 2142 // the normal lock acquisition order is reversed, but this is fine because in most of 2143 // cases we are protected by mInterfaceLock. The only thread that can compete these 2144 // locks are the OutputThread, where we do need to make sure we don't acquire mLock then 2145 // mV4l2BufferLock 2146 mLock.lock(); 2147 if (st == std::cv_status::timeout) { 2148 ALOGE("%s: wait for V4L2 buffer return timeout!", __FUNCTION__); 2149 return ret; 2150 } 2151 } 2152 } 2153 2154 v4l2_buffer buffer{}; 2155 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 2156 buffer.memory = V4L2_MEMORY_MMAP; 2157 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_DQBUF, &buffer)) < 0) { 2158 ALOGE("%s: DQBUF fails: %s", __FUNCTION__, strerror(errno)); 2159 return ret; 2160 } 2161 2162 if (buffer.index >= mV4L2BufferCount) { 2163 ALOGE("%s: Invalid buffer id: %d", __FUNCTION__, buffer.index); 2164 return ret; 2165 } 2166 2167 if (buffer.flags & V4L2_BUF_FLAG_ERROR) { 2168 ALOGE("%s: v4l2 buf error! buf flag 0x%x", __FUNCTION__, buffer.flags); 2169 // TODO: try to dequeue again 2170 } 2171 2172 if (buffer.flags & V4L2_BUF_FLAG_TIMESTAMP_MONOTONIC) { 2173 // Ideally we should also check for V4L2_BUF_FLAG_TSTAMP_SRC_SOE, but 2174 // even V4L2_BUF_FLAG_TSTAMP_SRC_EOF is better than capture a timestamp now 2175 *shutterTs = static_cast<nsecs_t>(buffer.timestamp.tv_sec)*1000000000LL + 2176 buffer.timestamp.tv_usec * 1000LL; 2177 } else { 2178 *shutterTs = systemTime(SYSTEM_TIME_MONOTONIC); 2179 } 2180 2181 { 2182 std::lock_guard<std::mutex> lk(mV4l2BufferLock); 2183 mNumDequeuedV4l2Buffers++; 2184 } 2185 return new V4L2Frame( 2186 mV4l2StreamingFmt.width, mV4l2StreamingFmt.height, mV4l2StreamingFmt.fourcc, 2187 buffer.index, mV4l2Fd.get(), buffer.bytesused, buffer.m.offset); 2188} 2189 2190void ExternalCameraDeviceSession::enqueueV4l2Frame(const sp<V4L2Frame>& frame) { 2191 { 2192 // Release mLock before acquiring mV4l2BufferLock to avoid potential 2193 // deadlock 2194 Mutex::Autolock _l(mLock); 2195 frame->unmap(); 2196 v4l2_buffer buffer{}; 2197 buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; 2198 buffer.memory = V4L2_MEMORY_MMAP; 2199 buffer.index = frame->mBufferIndex; 2200 if (TEMP_FAILURE_RETRY(ioctl(mV4l2Fd.get(), VIDIOC_QBUF, &buffer)) < 0) { 2201 ALOGE("%s: QBUF index %d fails: %s", __FUNCTION__, 2202 frame->mBufferIndex, strerror(errno)); 2203 return; 2204 } 2205 } 2206 2207 { 2208 std::lock_guard<std::mutex> lk(mV4l2BufferLock); 2209 mNumDequeuedV4l2Buffers--; 2210 } 2211 mV4L2BufferReturned.notify_one(); 2212} 2213 2214Status ExternalCameraDeviceSession::configureStreams( 2215 const V3_2::StreamConfiguration& config, V3_3::HalStreamConfiguration* out) { 2216 if (config.operationMode != StreamConfigurationMode::NORMAL_MODE) { 2217 ALOGE("%s: unsupported operation mode: %d", __FUNCTION__, config.operationMode); 2218 return Status::ILLEGAL_ARGUMENT; 2219 } 2220 2221 if (config.streams.size() == 0) { 2222 ALOGE("%s: cannot configure zero stream", __FUNCTION__); 2223 return Status::ILLEGAL_ARGUMENT; 2224 } 2225 2226 int numProcessedStream = 0; 2227 int numStallStream = 0; 2228 for (const auto& stream : config.streams) { 2229 // Check if the format/width/height combo is supported 2230 if (!isSupported(stream)) { 2231 return Status::ILLEGAL_ARGUMENT; 2232 } 2233 if (stream.format == PixelFormat::BLOB) { 2234 numStallStream++; 2235 } else { 2236 numProcessedStream++; 2237 } 2238 } 2239 2240 if (numProcessedStream > kMaxProcessedStream) { 2241 ALOGE("%s: too many processed streams (expect <= %d, got %d)", __FUNCTION__, 2242 kMaxProcessedStream, numProcessedStream); 2243 return Status::ILLEGAL_ARGUMENT; 2244 } 2245 2246 if (numStallStream > kMaxStallStream) { 2247 ALOGE("%s: too many stall streams (expect <= %d, got %d)", __FUNCTION__, 2248 kMaxStallStream, numStallStream); 2249 return Status::ILLEGAL_ARGUMENT; 2250 } 2251 2252 Status status = initStatus(); 2253 if (status != Status::OK) { 2254 return status; 2255 } 2256 2257 Mutex::Autolock _l(mLock); 2258 if (!mInflightFrames.empty()) { 2259 ALOGE("%s: trying to configureStreams while there are still %zu inflight frames!", 2260 __FUNCTION__, mInflightFrames.size()); 2261 return Status::INTERNAL_ERROR; 2262 } 2263 2264 // Add new streams 2265 for (const auto& stream : config.streams) { 2266 if (mStreamMap.count(stream.id) == 0) { 2267 mStreamMap[stream.id] = stream; 2268 mCirculatingBuffers.emplace(stream.id, CirculatingBuffers{}); 2269 } 2270 } 2271 2272 // Cleanup removed streams 2273 for(auto it = mStreamMap.begin(); it != mStreamMap.end();) { 2274 int id = it->first; 2275 bool found = false; 2276 for (const auto& stream : config.streams) { 2277 if (id == stream.id) { 2278 found = true; 2279 break; 2280 } 2281 } 2282 if (!found) { 2283 // Unmap all buffers of deleted stream 2284 cleanupBuffersLocked(id); 2285 it = mStreamMap.erase(it); 2286 } else { 2287 ++it; 2288 } 2289 } 2290 2291 // Now select a V4L2 format to produce all output streams 2292 float desiredAr = (mCroppingType == VERTICAL) ? kMaxAspectRatio : kMinAspectRatio; 2293 uint32_t maxDim = 0; 2294 for (const auto& stream : config.streams) { 2295 float aspectRatio = ASPECT_RATIO(stream); 2296 if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) || 2297 (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) { 2298 desiredAr = aspectRatio; 2299 } 2300 2301 // The dimension that's not cropped 2302 uint32_t dim = (mCroppingType == VERTICAL) ? stream.width : stream.height; 2303 if (dim > maxDim) { 2304 maxDim = dim; 2305 } 2306 } 2307 // Find the smallest format that matches the desired aspect ratio and is wide/high enough 2308 SupportedV4L2Format v4l2Fmt {.width = 0, .height = 0}; 2309 for (const auto& fmt : mSupportedFormats) { 2310 uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; 2311 if (dim >= maxDim) { 2312 float aspectRatio = ASPECT_RATIO(fmt); 2313 if (isAspectRatioClose(aspectRatio, desiredAr)) { 2314 v4l2Fmt = fmt; 2315 // since mSupportedFormats is sorted by width then height, the first matching fmt 2316 // will be the smallest one with matching aspect ratio 2317 break; 2318 } 2319 } 2320 } 2321 if (v4l2Fmt.width == 0) { 2322 // Cannot find exact good aspect ratio candidate, try to find a close one 2323 for (const auto& fmt : mSupportedFormats) { 2324 uint32_t dim = (mCroppingType == VERTICAL) ? fmt.width : fmt.height; 2325 if (dim >= maxDim) { 2326 float aspectRatio = ASPECT_RATIO(fmt); 2327 if ((mCroppingType == VERTICAL && aspectRatio < desiredAr) || 2328 (mCroppingType == HORIZONTAL && aspectRatio > desiredAr)) { 2329 v4l2Fmt = fmt; 2330 break; 2331 } 2332 } 2333 } 2334 } 2335 2336 if (v4l2Fmt.width == 0) { 2337 ALOGE("%s: unable to find a resolution matching (%s at least %d, aspect ratio %f)" 2338 , __FUNCTION__, (mCroppingType == VERTICAL) ? "width" : "height", 2339 maxDim, desiredAr); 2340 return Status::ILLEGAL_ARGUMENT; 2341 } 2342 2343 if (configureV4l2StreamLocked(v4l2Fmt) != 0) { 2344 ALOGE("V4L configuration failed!, format:%c%c%c%c, w %d, h %d", 2345 v4l2Fmt.fourcc & 0xFF, 2346 (v4l2Fmt.fourcc >> 8) & 0xFF, 2347 (v4l2Fmt.fourcc >> 16) & 0xFF, 2348 (v4l2Fmt.fourcc >> 24) & 0xFF, 2349 v4l2Fmt.width, v4l2Fmt.height); 2350 return Status::INTERNAL_ERROR; 2351 } 2352 2353 Size v4lSize = {v4l2Fmt.width, v4l2Fmt.height}; 2354 Size thumbSize { 0, 0 }; 2355 camera_metadata_ro_entry entry = 2356 mCameraCharacteristics.find(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES); 2357 for(uint32_t i = 0; i < entry.count; i += 2) { 2358 Size sz { static_cast<uint32_t>(entry.data.i32[i]), 2359 static_cast<uint32_t>(entry.data.i32[i+1]) }; 2360 if(sz.width * sz.height > thumbSize.width * thumbSize.height) { 2361 thumbSize = sz; 2362 } 2363 } 2364 2365 if (thumbSize.width * thumbSize.height == 0) { 2366 ALOGE("%s: non-zero thumbnail size not available", __FUNCTION__); 2367 return Status::INTERNAL_ERROR; 2368 } 2369 2370 status = mOutputThread->allocateIntermediateBuffers(v4lSize, 2371 mMaxThumbResolution, config.streams); 2372 if (status != Status::OK) { 2373 ALOGE("%s: allocating intermediate buffers failed!", __FUNCTION__); 2374 return status; 2375 } 2376 2377 out->streams.resize(config.streams.size()); 2378 for (size_t i = 0; i < config.streams.size(); i++) { 2379 out->streams[i].overrideDataSpace = config.streams[i].dataSpace; 2380 out->streams[i].v3_2.id = config.streams[i].id; 2381 // TODO: double check should we add those CAMERA flags 2382 mStreamMap[config.streams[i].id].usage = 2383 out->streams[i].v3_2.producerUsage = config.streams[i].usage | 2384 BufferUsage::CPU_WRITE_OFTEN | 2385 BufferUsage::CAMERA_OUTPUT; 2386 out->streams[i].v3_2.consumerUsage = 0; 2387 out->streams[i].v3_2.maxBuffers = mV4L2BufferCount; 2388 2389 switch (config.streams[i].format) { 2390 case PixelFormat::BLOB: 2391 case PixelFormat::YCBCR_420_888: 2392 case PixelFormat::YV12: // Used by SurfaceTexture 2393 // No override 2394 out->streams[i].v3_2.overrideFormat = config.streams[i].format; 2395 break; 2396 case PixelFormat::IMPLEMENTATION_DEFINED: 2397 // Override based on VIDEO or not 2398 out->streams[i].v3_2.overrideFormat = 2399 (config.streams[i].usage & BufferUsage::VIDEO_ENCODER) ? 2400 PixelFormat::YCBCR_420_888 : PixelFormat::YV12; 2401 // Save overridden formt in mStreamMap 2402 mStreamMap[config.streams[i].id].format = out->streams[i].v3_2.overrideFormat; 2403 break; 2404 default: 2405 ALOGE("%s: unsupported format 0x%x", __FUNCTION__, config.streams[i].format); 2406 return Status::ILLEGAL_ARGUMENT; 2407 } 2408 } 2409 2410 mFirstRequest = true; 2411 return Status::OK; 2412} 2413 2414bool ExternalCameraDeviceSession::isClosed() { 2415 Mutex::Autolock _l(mLock); 2416 return mClosed; 2417} 2418 2419#define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0])) 2420#define UPDATE(md, tag, data, size) \ 2421do { \ 2422 if ((md).update((tag), (data), (size))) { \ 2423 ALOGE("Update " #tag " failed!"); \ 2424 return BAD_VALUE; \ 2425 } \ 2426} while (0) 2427 2428status_t ExternalCameraDeviceSession::initDefaultRequests() { 2429 ::android::hardware::camera::common::V1_0::helper::CameraMetadata md; 2430 2431 const uint8_t aberrationMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF; 2432 UPDATE(md, ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &aberrationMode, 1); 2433 2434 const int32_t exposureCompensation = 0; 2435 UPDATE(md, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &exposureCompensation, 1); 2436 2437 const uint8_t videoStabilizationMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF; 2438 UPDATE(md, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &videoStabilizationMode, 1); 2439 2440 const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO; 2441 UPDATE(md, ANDROID_CONTROL_AWB_MODE, &awbMode, 1); 2442 2443 const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON; 2444 UPDATE(md, ANDROID_CONTROL_AE_MODE, &aeMode, 1); 2445 2446 const uint8_t aePrecaptureTrigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE; 2447 UPDATE(md, ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &aePrecaptureTrigger, 1); 2448 2449 const uint8_t afMode = ANDROID_CONTROL_AF_MODE_AUTO; 2450 UPDATE(md, ANDROID_CONTROL_AF_MODE, &afMode, 1); 2451 2452 const uint8_t afTrigger = ANDROID_CONTROL_AF_TRIGGER_IDLE; 2453 UPDATE(md, ANDROID_CONTROL_AF_TRIGGER, &afTrigger, 1); 2454 2455 const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_DISABLED; 2456 UPDATE(md, ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1); 2457 2458 const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF; 2459 UPDATE(md, ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1); 2460 2461 const uint8_t flashMode = ANDROID_FLASH_MODE_OFF; 2462 UPDATE(md, ANDROID_FLASH_MODE, &flashMode, 1); 2463 2464 const int32_t thumbnailSize[] = {240, 180}; 2465 UPDATE(md, ANDROID_JPEG_THUMBNAIL_SIZE, thumbnailSize, 2); 2466 2467 const uint8_t jpegQuality = 90; 2468 UPDATE(md, ANDROID_JPEG_QUALITY, &jpegQuality, 1); 2469 UPDATE(md, ANDROID_JPEG_THUMBNAIL_QUALITY, &jpegQuality, 1); 2470 2471 const int32_t jpegOrientation = 0; 2472 UPDATE(md, ANDROID_JPEG_ORIENTATION, &jpegOrientation, 1); 2473 2474 const uint8_t oisMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF; 2475 UPDATE(md, ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &oisMode, 1); 2476 2477 const uint8_t nrMode = ANDROID_NOISE_REDUCTION_MODE_OFF; 2478 UPDATE(md, ANDROID_NOISE_REDUCTION_MODE, &nrMode, 1); 2479 2480 const uint8_t fdMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF; 2481 UPDATE(md, ANDROID_STATISTICS_FACE_DETECT_MODE, &fdMode, 1); 2482 2483 const uint8_t hotpixelMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF; 2484 UPDATE(md, ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotpixelMode, 1); 2485 2486 bool support30Fps = false; 2487 int32_t maxFps = std::numeric_limits<int32_t>::min(); 2488 for (const auto& supportedFormat : mSupportedFormats) { 2489 for (const auto& fr : supportedFormat.frameRates) { 2490 int32_t framerateInt = static_cast<int32_t>(fr.getDouble()); 2491 if (maxFps < framerateInt) { 2492 maxFps = framerateInt; 2493 } 2494 if (framerateInt == 30) { 2495 support30Fps = true; 2496 break; 2497 } 2498 } 2499 if (support30Fps) { 2500 break; 2501 } 2502 } 2503 int32_t defaultFramerate = support30Fps ? 30 : maxFps; 2504 int32_t defaultFpsRange[] = {defaultFramerate, defaultFramerate}; 2505 UPDATE(md, ANDROID_CONTROL_AE_TARGET_FPS_RANGE, defaultFpsRange, ARRAY_SIZE(defaultFpsRange)); 2506 2507 uint8_t antibandingMode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO; 2508 UPDATE(md, ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibandingMode, 1); 2509 2510 const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO; 2511 UPDATE(md, ANDROID_CONTROL_MODE, &controlMode, 1); 2512 2513 auto requestTemplates = hidl_enum_iterator<RequestTemplate>(); 2514 for (RequestTemplate type : requestTemplates) { 2515 ::android::hardware::camera::common::V1_0::helper::CameraMetadata mdCopy = md; 2516 uint8_t intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2517 switch (type) { 2518 case RequestTemplate::PREVIEW: 2519 intent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW; 2520 break; 2521 case RequestTemplate::STILL_CAPTURE: 2522 intent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE; 2523 break; 2524 case RequestTemplate::VIDEO_RECORD: 2525 intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD; 2526 break; 2527 case RequestTemplate::VIDEO_SNAPSHOT: 2528 intent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT; 2529 break; 2530 default: 2531 ALOGV("%s: unsupported RequestTemplate type %d", __FUNCTION__, type); 2532 continue; 2533 } 2534 UPDATE(mdCopy, ANDROID_CONTROL_CAPTURE_INTENT, &intent, 1); 2535 2536 camera_metadata_t* rawMd = mdCopy.release(); 2537 CameraMetadata hidlMd; 2538 hidlMd.setToExternal( 2539 (uint8_t*) rawMd, get_camera_metadata_size(rawMd)); 2540 mDefaultRequests[type] = hidlMd; 2541 free_camera_metadata(rawMd); 2542 } 2543 2544 return OK; 2545} 2546 2547status_t ExternalCameraDeviceSession::fillCaptureResult( 2548 common::V1_0::helper::CameraMetadata &md, nsecs_t timestamp) { 2549 // android.control 2550 // For USB camera, we don't know the AE state. Set the state to converged to 2551 // indicate the frame should be good to use. Then apps don't have to wait the 2552 // AE state. 2553 const uint8_t aeState = ANDROID_CONTROL_AE_STATE_CONVERGED; 2554 UPDATE(md, ANDROID_CONTROL_AE_STATE, &aeState, 1); 2555 2556 const uint8_t ae_lock = ANDROID_CONTROL_AE_LOCK_OFF; 2557 UPDATE(md, ANDROID_CONTROL_AE_LOCK, &ae_lock, 1); 2558 2559 bool afTrigger = mAfTrigger; 2560 if (md.exists(ANDROID_CONTROL_AF_TRIGGER)) { 2561 Mutex::Autolock _l(mLock); 2562 camera_metadata_entry entry = md.find(ANDROID_CONTROL_AF_TRIGGER); 2563 if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_START) { 2564 mAfTrigger = afTrigger = true; 2565 } else if (entry.data.u8[0] == ANDROID_CONTROL_AF_TRIGGER_CANCEL) { 2566 mAfTrigger = afTrigger = false; 2567 } 2568 } 2569 2570 // For USB camera, the USB camera handles everything and we don't have control 2571 // over AF. We only simply fake the AF metadata based on the request 2572 // received here. 2573 uint8_t afState; 2574 if (afTrigger) { 2575 afState = ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED; 2576 } else { 2577 afState = ANDROID_CONTROL_AF_STATE_INACTIVE; 2578 } 2579 UPDATE(md, ANDROID_CONTROL_AF_STATE, &afState, 1); 2580 2581 // Set AWB state to converged to indicate the frame should be good to use. 2582 const uint8_t awbState = ANDROID_CONTROL_AWB_STATE_CONVERGED; 2583 UPDATE(md, ANDROID_CONTROL_AWB_STATE, &awbState, 1); 2584 2585 const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF; 2586 UPDATE(md, ANDROID_CONTROL_AWB_LOCK, &awbLock, 1); 2587 2588 camera_metadata_ro_entry active_array_size = 2589 mCameraCharacteristics.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE); 2590 2591 if (active_array_size.count == 0) { 2592 ALOGE("%s: cannot find active array size!", __FUNCTION__); 2593 return -EINVAL; 2594 } 2595 2596 const uint8_t flashState = ANDROID_FLASH_STATE_UNAVAILABLE; 2597 UPDATE(md, ANDROID_FLASH_STATE, &flashState, 1); 2598 2599 // android.scaler 2600 const int32_t crop_region[] = { 2601 active_array_size.data.i32[0], active_array_size.data.i32[1], 2602 active_array_size.data.i32[2], active_array_size.data.i32[3], 2603 }; 2604 UPDATE(md, ANDROID_SCALER_CROP_REGION, crop_region, ARRAY_SIZE(crop_region)); 2605 2606 // android.sensor 2607 UPDATE(md, ANDROID_SENSOR_TIMESTAMP, ×tamp, 1); 2608 2609 // android.statistics 2610 const uint8_t lensShadingMapMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF; 2611 UPDATE(md, ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMapMode, 1); 2612 2613 const uint8_t sceneFlicker = ANDROID_STATISTICS_SCENE_FLICKER_NONE; 2614 UPDATE(md, ANDROID_STATISTICS_SCENE_FLICKER, &sceneFlicker, 1); 2615 2616 return OK; 2617} 2618 2619#undef ARRAY_SIZE 2620#undef UPDATE 2621 2622} // namespace implementation 2623} // namespace V3_4 2624} // namespace device 2625} // namespace camera 2626} // namespace hardware 2627} // namespace android 2628