1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera2-ZslProcessor"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20//#define LOG_NNDEBUG 0
21
22#ifdef LOG_NNDEBUG
23#define ALOGVV(...) ALOGV(__VA_ARGS__)
24#else
25#define ALOGVV(...) if (0) ALOGV(__VA_ARGS__)
26#endif
27
28#include <inttypes.h>
29
30#include <utils/Log.h>
31#include <utils/Trace.h>
32#include <gui/Surface.h>
33
34#include "common/CameraDeviceBase.h"
35#include "api1/Camera2Client.h"
36#include "api1/client2/CaptureSequencer.h"
37#include "api1/client2/ZslProcessor.h"
38#include "device3/Camera3Device.h"
39
40typedef android::RingBufferConsumer::PinnedBufferItem PinnedBufferItem;
41
42namespace android {
43namespace camera2 {
44
45namespace {
46struct TimestampFinder : public RingBufferConsumer::RingBufferComparator {
47    typedef RingBufferConsumer::BufferInfo BufferInfo;
48
49    enum {
50        SELECT_I1 = -1,
51        SELECT_I2 = 1,
52        SELECT_NEITHER = 0,
53    };
54
55    explicit TimestampFinder(nsecs_t timestamp) : mTimestamp(timestamp) {}
56    ~TimestampFinder() {}
57
58    template <typename T>
59    static void swap(T& a, T& b) {
60        T tmp = a;
61        a = b;
62        b = tmp;
63    }
64
65    /**
66     * Try to find the best candidate for a ZSL buffer.
67     * Match priority from best to worst:
68     *  1) Timestamps match.
69     *  2) Timestamp is closest to the needle (and lower).
70     *  3) Timestamp is closest to the needle (and higher).
71     *
72     */
73    virtual int compare(const BufferInfo *i1,
74                        const BufferInfo *i2) const {
75        // Try to select non-null object first.
76        if (i1 == NULL) {
77            return SELECT_I2;
78        } else if (i2 == NULL) {
79            return SELECT_I1;
80        }
81
82        // Best result: timestamp is identical
83        if (i1->mTimestamp == mTimestamp) {
84            return SELECT_I1;
85        } else if (i2->mTimestamp == mTimestamp) {
86            return SELECT_I2;
87        }
88
89        const BufferInfo* infoPtrs[2] = {
90            i1,
91            i2
92        };
93        int infoSelectors[2] = {
94            SELECT_I1,
95            SELECT_I2
96        };
97
98        // Order i1,i2 so that always i1.timestamp < i2.timestamp
99        if (i1->mTimestamp > i2->mTimestamp) {
100            swap(infoPtrs[0], infoPtrs[1]);
101            swap(infoSelectors[0], infoSelectors[1]);
102        }
103
104        // Second best: closest (lower) timestamp
105        if (infoPtrs[1]->mTimestamp < mTimestamp) {
106            return infoSelectors[1];
107        } else if (infoPtrs[0]->mTimestamp < mTimestamp) {
108            return infoSelectors[0];
109        }
110
111        // Worst: closest (higher) timestamp
112        return infoSelectors[0];
113
114        /**
115         * The above cases should cover all the possibilities,
116         * and we get an 'empty' result only if the ring buffer
117         * was empty itself
118         */
119    }
120
121    const nsecs_t mTimestamp;
122}; // struct TimestampFinder
123} // namespace anonymous
124
125ZslProcessor::ZslProcessor(
126    sp<Camera2Client> client,
127    wp<CaptureSequencer> sequencer):
128        Thread(false),
129        mLatestClearedBufferTimestamp(0),
130        mState(RUNNING),
131        mClient(client),
132        mSequencer(sequencer),
133        mId(client->getCameraId()),
134        mZslStreamId(NO_STREAM),
135        mInputStreamId(NO_STREAM),
136        mFrameListHead(0),
137        mHasFocuser(false),
138        mInputBuffer(nullptr),
139        mProducer(nullptr),
140        mInputProducer(nullptr),
141        mInputProducerSlot(-1),
142        mBuffersToDetach(0) {
143    // Initialize buffer queue and frame list based on pipeline max depth.
144    size_t pipelineMaxDepth = kDefaultMaxPipelineDepth;
145    if (client != 0) {
146        sp<Camera3Device> device =
147        static_cast<Camera3Device*>(client->getCameraDevice().get());
148        if (device != 0) {
149            camera_metadata_ro_entry_t entry =
150                device->info().find(ANDROID_REQUEST_PIPELINE_MAX_DEPTH);
151            if (entry.count == 1) {
152                pipelineMaxDepth = entry.data.u8[0];
153            } else {
154                ALOGW("%s: Unable to find the android.request.pipelineMaxDepth,"
155                        " use default pipeline max depth %d", __FUNCTION__,
156                        kDefaultMaxPipelineDepth);
157            }
158
159            entry = device->info().find(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE);
160            if (entry.count > 0 && entry.data.f[0] != 0.) {
161                mHasFocuser = true;
162            }
163        }
164    }
165
166    ALOGV("%s: Initialize buffer queue and frame list depth based on max pipeline depth (%zu)",
167          __FUNCTION__, pipelineMaxDepth);
168    // Need to keep buffer queue longer than metadata queue because sometimes buffer arrives
169    // earlier than metadata which causes the buffer corresponding to oldest metadata being
170    // removed.
171    mFrameListDepth = pipelineMaxDepth;
172    mBufferQueueDepth = mFrameListDepth + 1;
173
174    mZslQueue.insertAt(0, mBufferQueueDepth);
175    mFrameList.insertAt(0, mFrameListDepth);
176    sp<CaptureSequencer> captureSequencer = mSequencer.promote();
177    if (captureSequencer != 0) captureSequencer->setZslProcessor(this);
178}
179
180ZslProcessor::~ZslProcessor() {
181    ALOGV("%s: Exit", __FUNCTION__);
182    deleteStream();
183}
184
185void ZslProcessor::onResultAvailable(const CaptureResult &result) {
186    ATRACE_CALL();
187    ALOGV("%s:", __FUNCTION__);
188    Mutex::Autolock l(mInputMutex);
189    camera_metadata_ro_entry_t entry;
190    entry = result.mMetadata.find(ANDROID_SENSOR_TIMESTAMP);
191    nsecs_t timestamp = entry.data.i64[0];
192    if (entry.count == 0) {
193        ALOGE("%s: metadata doesn't have timestamp, skip this result", __FUNCTION__);
194        return;
195    }
196
197    entry = result.mMetadata.find(ANDROID_REQUEST_FRAME_COUNT);
198    if (entry.count == 0) {
199        ALOGE("%s: metadata doesn't have frame number, skip this result", __FUNCTION__);
200        return;
201    }
202    int32_t frameNumber = entry.data.i32[0];
203
204    ALOGVV("Got preview metadata for frame %d with timestamp %" PRId64, frameNumber, timestamp);
205
206    if (mState != RUNNING) return;
207
208    // Corresponding buffer has been cleared. No need to push into mFrameList
209    if (timestamp <= mLatestClearedBufferTimestamp) return;
210
211    mFrameList.editItemAt(mFrameListHead) = result.mMetadata;
212    mFrameListHead = (mFrameListHead + 1) % mFrameListDepth;
213}
214
215status_t ZslProcessor::updateStream(const Parameters &params) {
216    ATRACE_CALL();
217    ALOGV("%s: Configuring ZSL streams", __FUNCTION__);
218    status_t res;
219
220    Mutex::Autolock l(mInputMutex);
221
222    sp<Camera2Client> client = mClient.promote();
223    if (client == 0) {
224        ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
225        return INVALID_OPERATION;
226    }
227    sp<Camera3Device> device =
228        static_cast<Camera3Device*>(client->getCameraDevice().get());
229    if (device == 0) {
230        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
231        return INVALID_OPERATION;
232    }
233
234    if ((mZslStreamId != NO_STREAM) || (mInputStreamId != NO_STREAM)) {
235        // Check if stream parameters have to change
236        uint32_t currentWidth, currentHeight;
237        res = device->getStreamInfo(mZslStreamId,
238                &currentWidth, &currentHeight, 0, 0);
239        if (res != OK) {
240            ALOGE("%s: Camera %d: Error querying capture output stream info: "
241                    "%s (%d)", __FUNCTION__,
242                    client->getCameraId(), strerror(-res), res);
243            return res;
244        }
245        if (currentWidth != (uint32_t)params.fastInfo.arrayWidth ||
246                currentHeight != (uint32_t)params.fastInfo.arrayHeight) {
247            if (mZslStreamId != NO_STREAM) {
248                ALOGV("%s: Camera %d: Deleting stream %d since the buffer "
249                      "dimensions changed",
250                    __FUNCTION__, client->getCameraId(), mZslStreamId);
251                res = device->deleteStream(mZslStreamId);
252                if (res == -EBUSY) {
253                    ALOGV("%s: Camera %d: Device is busy, call updateStream again "
254                          " after it becomes idle", __FUNCTION__, mId);
255                    return res;
256                } else if(res != OK) {
257                    ALOGE("%s: Camera %d: Unable to delete old output stream "
258                            "for ZSL: %s (%d)", __FUNCTION__,
259                            client->getCameraId(), strerror(-res), res);
260                    return res;
261                }
262                mZslStreamId = NO_STREAM;
263            }
264
265            if (mInputStreamId != NO_STREAM) {
266                ALOGV("%s: Camera %d: Deleting stream %d since the buffer "
267                      "dimensions changed",
268                    __FUNCTION__, client->getCameraId(), mInputStreamId);
269                res = device->deleteStream(mInputStreamId);
270                if (res == -EBUSY) {
271                    ALOGV("%s: Camera %d: Device is busy, call updateStream again "
272                          " after it becomes idle", __FUNCTION__, mId);
273                    return res;
274                } else if(res != OK) {
275                    ALOGE("%s: Camera %d: Unable to delete old output stream "
276                            "for ZSL: %s (%d)", __FUNCTION__,
277                            client->getCameraId(), strerror(-res), res);
278                    return res;
279                }
280                mInputStreamId = NO_STREAM;
281            }
282            if (nullptr != mInputProducer.get()) {
283                mInputProducer->disconnect(NATIVE_WINDOW_API_CPU);
284                mInputProducer.clear();
285            }
286        }
287    }
288
289    if (mInputStreamId == NO_STREAM) {
290        res = device->createInputStream(params.fastInfo.arrayWidth,
291            params.fastInfo.arrayHeight, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
292            &mInputStreamId);
293        if (res != OK) {
294            ALOGE("%s: Camera %d: Can't create input stream: "
295                    "%s (%d)", __FUNCTION__, client->getCameraId(),
296                    strerror(-res), res);
297            return res;
298        }
299    }
300
301    if (mZslStreamId == NO_STREAM) {
302        // Create stream for HAL production
303        // TODO: Sort out better way to select resolution for ZSL
304
305        sp<IGraphicBufferProducer> producer;
306        sp<IGraphicBufferConsumer> consumer;
307        BufferQueue::createBufferQueue(&producer, &consumer);
308        mProducer = new RingBufferConsumer(consumer, GRALLOC_USAGE_HW_CAMERA_ZSL,
309            mBufferQueueDepth);
310        mProducer->setName(String8("Camera2-ZslRingBufferConsumer"));
311        sp<Surface> outSurface = new Surface(producer);
312
313        res = device->createStream(outSurface, params.fastInfo.arrayWidth,
314            params.fastInfo.arrayHeight, HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
315            HAL_DATASPACE_UNKNOWN, CAMERA3_STREAM_ROTATION_0, &mZslStreamId);
316        if (res != OK) {
317            ALOGE("%s: Camera %d: Can't create ZSL stream: "
318                    "%s (%d)", __FUNCTION__, client->getCameraId(),
319                    strerror(-res), res);
320            return res;
321        }
322    }
323
324    client->registerFrameListener(Camera2Client::kPreviewRequestIdStart,
325            Camera2Client::kPreviewRequestIdEnd,
326            this,
327            /*sendPartials*/false);
328
329    return OK;
330}
331
332status_t ZslProcessor::deleteStream() {
333    ATRACE_CALL();
334    status_t res;
335    sp<Camera3Device> device = nullptr;
336    sp<Camera2Client> client = nullptr;
337
338    Mutex::Autolock l(mInputMutex);
339
340    if ((mZslStreamId != NO_STREAM) || (mInputStreamId != NO_STREAM)) {
341        client = mClient.promote();
342        if (client == 0) {
343            ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
344            return INVALID_OPERATION;
345        }
346
347        device =
348            reinterpret_cast<Camera3Device*>(client->getCameraDevice().get());
349        if (device == 0) {
350            ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
351            return INVALID_OPERATION;
352        }
353    }
354
355    if (mZslStreamId != NO_STREAM) {
356        res = device->deleteStream(mZslStreamId);
357        if (res != OK) {
358            ALOGE("%s: Camera %d: Cannot delete ZSL output stream %d: "
359                    "%s (%d)", __FUNCTION__, client->getCameraId(),
360                    mZslStreamId, strerror(-res), res);
361            return res;
362        }
363
364        mZslStreamId = NO_STREAM;
365    }
366    if (mInputStreamId != NO_STREAM) {
367        res = device->deleteStream(mInputStreamId);
368        if (res != OK) {
369            ALOGE("%s: Camera %d: Cannot delete input stream %d: "
370                    "%s (%d)", __FUNCTION__, client->getCameraId(),
371                    mInputStreamId, strerror(-res), res);
372            return res;
373        }
374
375        mInputStreamId = NO_STREAM;
376    }
377
378    if (nullptr != mInputProducer.get()) {
379        mInputProducer->disconnect(NATIVE_WINDOW_API_CPU);
380        mInputProducer.clear();
381    }
382
383    return OK;
384}
385
386int ZslProcessor::getStreamId() const {
387    Mutex::Autolock l(mInputMutex);
388    return mZslStreamId;
389}
390
391status_t ZslProcessor::updateRequestWithDefaultStillRequest(CameraMetadata &request) const {
392    sp<Camera2Client> client = mClient.promote();
393    if (client == 0) {
394        ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
395        return INVALID_OPERATION;
396    }
397    sp<Camera3Device> device =
398        static_cast<Camera3Device*>(client->getCameraDevice().get());
399    if (device == 0) {
400        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
401        return INVALID_OPERATION;
402    }
403
404    CameraMetadata stillTemplate;
405    device->createDefaultRequest(CAMERA3_TEMPLATE_STILL_CAPTURE, &stillTemplate);
406
407    // Find some of the post-processing tags, and assign the value from template to the request.
408    // Only check the aberration mode and noise reduction mode for now, as they are very important
409    // for image quality.
410    uint32_t postProcessingTags[] = {
411            ANDROID_NOISE_REDUCTION_MODE,
412            ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
413            ANDROID_COLOR_CORRECTION_MODE,
414            ANDROID_TONEMAP_MODE,
415            ANDROID_SHADING_MODE,
416            ANDROID_HOT_PIXEL_MODE,
417            ANDROID_EDGE_MODE
418    };
419
420    camera_metadata_entry_t entry;
421    for (size_t i = 0; i < sizeof(postProcessingTags) / sizeof(uint32_t); i++) {
422        entry = stillTemplate.find(postProcessingTags[i]);
423        if (entry.count > 0) {
424            request.update(postProcessingTags[i], entry.data.u8, 1);
425        }
426    }
427
428    return OK;
429}
430
431void ZslProcessor::notifyInputReleased() {
432    Mutex::Autolock l(mInputMutex);
433
434    mBuffersToDetach++;
435    mBuffersToDetachSignal.signal();
436}
437
438void ZslProcessor::doNotifyInputReleasedLocked() {
439    assert(nullptr != mInputBuffer.get());
440    assert(nullptr != mInputProducer.get());
441
442    sp<GraphicBuffer> gb;
443    sp<Fence> fence;
444    auto rc = mInputProducer->detachNextBuffer(&gb, &fence);
445    if (NO_ERROR != rc) {
446        ALOGE("%s: Failed to detach buffer from input producer: %d",
447            __FUNCTION__, rc);
448        return;
449    }
450
451    BufferItem &item = mInputBuffer->getBufferItem();
452    sp<GraphicBuffer> inputBuffer = item.mGraphicBuffer;
453    if (gb->handle != inputBuffer->handle) {
454        ALOGE("%s: Input mismatch, expected buffer %p received %p", __FUNCTION__,
455            inputBuffer->handle, gb->handle);
456        return;
457    }
458
459    mInputBuffer.clear();
460    ALOGV("%s: Memory optimization, clearing ZSL queue",
461          __FUNCTION__);
462    clearZslResultQueueLocked();
463
464    // Required so we accept more ZSL requests
465    mState = RUNNING;
466}
467
468void ZslProcessor::InputProducerListener::onBufferReleased() {
469    sp<ZslProcessor> parent = mParent.promote();
470    if (nullptr != parent.get()) {
471        parent->notifyInputReleased();
472    }
473}
474
475status_t ZslProcessor::pushToReprocess(int32_t requestId) {
476    ALOGV("%s: Send in reprocess request with id %d",
477            __FUNCTION__, requestId);
478    Mutex::Autolock l(mInputMutex);
479    status_t res;
480    sp<Camera2Client> client = mClient.promote();
481
482    if (client == 0) {
483        ALOGE("%s: Camera %d: Client does not exist", __FUNCTION__, mId);
484        return INVALID_OPERATION;
485    }
486
487    IF_ALOGV() {
488        dumpZslQueue(-1);
489    }
490
491    size_t metadataIdx;
492    nsecs_t candidateTimestamp = getCandidateTimestampLocked(&metadataIdx);
493
494    if (candidateTimestamp == -1) {
495        ALOGV("%s: Could not find good candidate for ZSL reprocessing",
496              __FUNCTION__);
497        return NOT_ENOUGH_DATA;
498    } else {
499        ALOGV("%s: Found good ZSL candidate idx: %u",
500            __FUNCTION__, (unsigned int) metadataIdx);
501    }
502
503    if (nullptr == mInputProducer.get()) {
504        res = client->getCameraDevice()->getInputBufferProducer(
505            &mInputProducer);
506        if (res != OK) {
507            ALOGE("%s: Camera %d: Unable to retrieve input producer: "
508                    "%s (%d)", __FUNCTION__, client->getCameraId(),
509                    strerror(-res), res);
510            return res;
511        }
512
513        IGraphicBufferProducer::QueueBufferOutput output;
514        res = mInputProducer->connect(new InputProducerListener(this),
515            NATIVE_WINDOW_API_CPU, false, &output);
516        if (res != OK) {
517            ALOGE("%s: Camera %d: Unable to connect to input producer: "
518                    "%s (%d)", __FUNCTION__, client->getCameraId(),
519                    strerror(-res), res);
520            return res;
521        }
522    }
523
524    res = enqueueInputBufferByTimestamp(candidateTimestamp,
525        /*actualTimestamp*/NULL);
526    if (res == NO_BUFFER_AVAILABLE) {
527        ALOGV("%s: No ZSL buffers yet", __FUNCTION__);
528        return NOT_ENOUGH_DATA;
529    } else if (res != OK) {
530        ALOGE("%s: Unable to push buffer for reprocessing: %s (%d)",
531                __FUNCTION__, strerror(-res), res);
532        return res;
533    }
534
535    {
536        CameraMetadata request = mFrameList[metadataIdx];
537
538        // Verify that the frame is reasonable for reprocessing
539
540        camera_metadata_entry_t entry;
541        entry = request.find(ANDROID_CONTROL_AE_STATE);
542        if (entry.count == 0) {
543            ALOGE("%s: ZSL queue frame has no AE state field!",
544                    __FUNCTION__);
545            return BAD_VALUE;
546        }
547        if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
548                entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
549            ALOGV("%s: ZSL queue frame AE state is %d, need full capture",
550                    __FUNCTION__, entry.data.u8[0]);
551            return NOT_ENOUGH_DATA;
552        }
553
554        uint8_t requestType = ANDROID_REQUEST_TYPE_REPROCESS;
555        res = request.update(ANDROID_REQUEST_TYPE,
556                &requestType, 1);
557        if (res != OK) {
558            ALOGE("%s: Unable to update request type",
559                  __FUNCTION__);
560            return INVALID_OPERATION;
561        }
562
563        int32_t inputStreams[1] =
564                { mInputStreamId };
565        res = request.update(ANDROID_REQUEST_INPUT_STREAMS,
566                inputStreams, 1);
567        if (res != OK) {
568            ALOGE("%s: Unable to update request input streams",
569                  __FUNCTION__);
570            return INVALID_OPERATION;
571        }
572
573        uint8_t captureIntent =
574                static_cast<uint8_t>(ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE);
575        res = request.update(ANDROID_CONTROL_CAPTURE_INTENT,
576                &captureIntent, 1);
577        if (res != OK ) {
578            ALOGE("%s: Unable to update request capture intent",
579                  __FUNCTION__);
580            return INVALID_OPERATION;
581        }
582
583        // TODO: Shouldn't we also update the latest preview frame?
584        int32_t outputStreams[1] =
585                { client->getCaptureStreamId() };
586        res = request.update(ANDROID_REQUEST_OUTPUT_STREAMS,
587                outputStreams, 1);
588        if (res != OK) {
589            ALOGE("%s: Unable to update request output streams",
590                  __FUNCTION__);
591            return INVALID_OPERATION;
592        }
593
594        res = request.update(ANDROID_REQUEST_ID,
595                &requestId, 1);
596        if (res != OK ) {
597            ALOGE("%s: Unable to update frame to a reprocess request",
598                  __FUNCTION__);
599            return INVALID_OPERATION;
600        }
601
602        res = client->stopStream();
603        if (res != OK) {
604            ALOGE("%s: Camera %d: Unable to stop preview for ZSL capture: "
605                "%s (%d)",
606                __FUNCTION__, client->getCameraId(), strerror(-res), res);
607            return INVALID_OPERATION;
608        }
609
610        // Update JPEG settings
611        {
612            SharedParameters::Lock l(client->getParameters());
613            res = l.mParameters.updateRequestJpeg(&request);
614            if (res != OK) {
615                ALOGE("%s: Camera %d: Unable to update JPEG entries of ZSL "
616                        "capture request: %s (%d)", __FUNCTION__,
617                        client->getCameraId(),
618                        strerror(-res), res);
619                return res;
620            }
621        }
622
623        // Update post-processing settings
624        res = updateRequestWithDefaultStillRequest(request);
625        if (res != OK) {
626            ALOGW("%s: Unable to update post-processing tags, the reprocessed image quality "
627                    "may be compromised", __FUNCTION__);
628        }
629
630        mLatestCapturedRequest = request;
631        res = client->getCameraDevice()->capture(request);
632        if (res != OK ) {
633            ALOGE("%s: Unable to send ZSL reprocess request to capture: %s"
634                  " (%d)", __FUNCTION__, strerror(-res), res);
635            return res;
636        }
637
638        mState = LOCKED;
639    }
640
641    return OK;
642}
643
644status_t ZslProcessor::enqueueInputBufferByTimestamp(
645        nsecs_t timestamp,
646        nsecs_t* actualTimestamp) {
647
648    TimestampFinder timestampFinder = TimestampFinder(timestamp);
649
650    mInputBuffer = mProducer->pinSelectedBuffer(timestampFinder,
651        /*waitForFence*/false);
652
653    if (nullptr == mInputBuffer.get()) {
654        ALOGE("%s: No ZSL buffers were available yet", __FUNCTION__);
655        return NO_BUFFER_AVAILABLE;
656    }
657
658    nsecs_t actual = mInputBuffer->getBufferItem().mTimestamp;
659
660    if (actual != timestamp) {
661        // TODO: This is problematic, the metadata queue timestamp should
662        //       usually have a corresponding ZSL buffer with the same timestamp.
663        //       If this is not the case, then it is possible that we will use
664        //       a ZSL buffer from a different request, which can result in
665        //       side effects during the reprocess pass.
666        ALOGW("%s: ZSL buffer candidate search didn't find an exact match --"
667              " requested timestamp = %" PRId64 ", actual timestamp = %" PRId64,
668              __FUNCTION__, timestamp, actual);
669    }
670
671    if (nullptr != actualTimestamp) {
672        *actualTimestamp = actual;
673    }
674
675    BufferItem &item = mInputBuffer->getBufferItem();
676    auto rc = mInputProducer->attachBuffer(&mInputProducerSlot,
677        item.mGraphicBuffer);
678    if (OK != rc) {
679        ALOGE("%s: Failed to attach input ZSL buffer to producer: %d",
680            __FUNCTION__, rc);
681        return rc;
682    }
683
684    IGraphicBufferProducer::QueueBufferOutput output;
685    IGraphicBufferProducer::QueueBufferInput input(item.mTimestamp,
686            item.mIsAutoTimestamp, item.mDataSpace, item.mCrop,
687            item.mScalingMode, item.mTransform, item.mFence);
688    rc = mInputProducer->queueBuffer(mInputProducerSlot, input, &output);
689    if (OK != rc) {
690        ALOGE("%s: Failed to queue ZSL buffer to producer: %d",
691            __FUNCTION__, rc);
692        return rc;
693    }
694
695    return rc;
696}
697
698status_t ZslProcessor::clearInputRingBufferLocked(nsecs_t* latestTimestamp) {
699
700    if (nullptr != latestTimestamp) {
701        *latestTimestamp = mProducer->getLatestTimestamp();
702    }
703    mInputBuffer.clear();
704
705    return mProducer->clear();
706}
707
708status_t ZslProcessor::clearZslQueue() {
709    Mutex::Autolock l(mInputMutex);
710    // If in middle of capture, can't clear out queue
711    if (mState == LOCKED) return OK;
712
713    return clearZslQueueLocked();
714}
715
716status_t ZslProcessor::clearZslQueueLocked() {
717    if (NO_STREAM != mZslStreamId) {
718        // clear result metadata list first.
719        clearZslResultQueueLocked();
720        return clearInputRingBufferLocked(&mLatestClearedBufferTimestamp);
721    }
722    return OK;
723}
724
725void ZslProcessor::clearZslResultQueueLocked() {
726    mFrameList.clear();
727    mFrameListHead = 0;
728    mFrameList.insertAt(0, mFrameListDepth);
729}
730
731void ZslProcessor::dump(int fd, const Vector<String16>& /*args*/) const {
732    Mutex::Autolock l(mInputMutex);
733    if (!mLatestCapturedRequest.isEmpty()) {
734        String8 result("    Latest ZSL capture request:\n");
735        write(fd, result.string(), result.size());
736        mLatestCapturedRequest.dump(fd, 2, 6);
737    } else {
738        String8 result("    Latest ZSL capture request: none yet\n");
739        write(fd, result.string(), result.size());
740    }
741    dumpZslQueue(fd);
742}
743
744bool ZslProcessor::threadLoop() {
745    Mutex::Autolock l(mInputMutex);
746
747    if (mBuffersToDetach == 0) {
748        status_t res = mBuffersToDetachSignal.waitRelative(mInputMutex, kWaitDuration);
749        if (res == TIMED_OUT) return true;
750    }
751    while (mBuffersToDetach > 0) {
752        doNotifyInputReleasedLocked();
753        mBuffersToDetach--;
754    }
755
756    return true;
757}
758
759void ZslProcessor::dumpZslQueue(int fd) const {
760    String8 header("ZSL queue contents:");
761    String8 indent("    ");
762    ALOGV("%s", header.string());
763    if (fd != -1) {
764        header = indent + header + "\n";
765        write(fd, header.string(), header.size());
766    }
767    for (size_t i = 0; i < mZslQueue.size(); i++) {
768        const ZslPair &queueEntry = mZslQueue[i];
769        nsecs_t bufferTimestamp = queueEntry.buffer.mTimestamp;
770        camera_metadata_ro_entry_t entry;
771        nsecs_t frameTimestamp = 0;
772        int frameAeState = -1;
773        if (!queueEntry.frame.isEmpty()) {
774            entry = queueEntry.frame.find(ANDROID_SENSOR_TIMESTAMP);
775            if (entry.count > 0) frameTimestamp = entry.data.i64[0];
776            entry = queueEntry.frame.find(ANDROID_CONTROL_AE_STATE);
777            if (entry.count > 0) frameAeState = entry.data.u8[0];
778        }
779        String8 result =
780                String8::format("   %zu: b: %" PRId64 "\tf: %" PRId64 ", AE state: %d", i,
781                        bufferTimestamp, frameTimestamp, frameAeState);
782        ALOGV("%s", result.string());
783        if (fd != -1) {
784            result = indent + result + "\n";
785            write(fd, result.string(), result.size());
786        }
787
788    }
789}
790
791bool ZslProcessor::isFixedFocusMode(uint8_t afMode) const {
792    switch (afMode) {
793        case ANDROID_CONTROL_AF_MODE_AUTO:
794        case ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO:
795        case ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE:
796        case ANDROID_CONTROL_AF_MODE_MACRO:
797            return false;
798            break;
799        case ANDROID_CONTROL_AF_MODE_OFF:
800        case ANDROID_CONTROL_AF_MODE_EDOF:
801            return true;
802        default:
803            ALOGE("%s: unknown focus mode %d", __FUNCTION__, afMode);
804            return false;
805    }
806}
807
808nsecs_t ZslProcessor::getCandidateTimestampLocked(size_t* metadataIdx) const {
809    /**
810     * Find the smallest timestamp we know about so far
811     * - ensure that aeState is either converged or locked
812     */
813
814    size_t idx = 0;
815    nsecs_t minTimestamp = -1;
816
817    size_t emptyCount = mFrameList.size();
818
819    for (size_t j = 0; j < mFrameList.size(); j++) {
820        const CameraMetadata &frame = mFrameList[j];
821        if (!frame.isEmpty()) {
822
823            emptyCount--;
824
825            camera_metadata_ro_entry_t entry;
826            entry = frame.find(ANDROID_SENSOR_TIMESTAMP);
827            if (entry.count == 0) {
828                ALOGE("%s: Can't find timestamp in frame!",
829                        __FUNCTION__);
830                continue;
831            }
832            nsecs_t frameTimestamp = entry.data.i64[0];
833            if (minTimestamp > frameTimestamp || minTimestamp == -1) {
834
835                entry = frame.find(ANDROID_CONTROL_AE_STATE);
836
837                if (entry.count == 0) {
838                    /**
839                     * This is most likely a HAL bug. The aeState field is
840                     * mandatory, so it should always be in a metadata packet.
841                     */
842                    ALOGW("%s: ZSL queue frame has no AE state field!",
843                            __FUNCTION__);
844                    continue;
845                }
846                if (entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_CONVERGED &&
847                        entry.data.u8[0] != ANDROID_CONTROL_AE_STATE_LOCKED) {
848                    ALOGVV("%s: ZSL queue frame AE state is %d, need "
849                           "full capture",  __FUNCTION__, entry.data.u8[0]);
850                    continue;
851                }
852
853                entry = frame.find(ANDROID_CONTROL_AF_MODE);
854                if (entry.count == 0) {
855                    ALOGW("%s: ZSL queue frame has no AF mode field!",
856                            __FUNCTION__);
857                    continue;
858                }
859                uint8_t afMode = entry.data.u8[0];
860                if (afMode == ANDROID_CONTROL_AF_MODE_OFF) {
861                    // Skip all the ZSL buffer for manual AF mode, as we don't really
862                    // know the af state.
863                    continue;
864                }
865
866                // Check AF state if device has focuser and focus mode isn't fixed
867                if (mHasFocuser && !isFixedFocusMode(afMode)) {
868                    // Make sure the candidate frame has good focus.
869                    entry = frame.find(ANDROID_CONTROL_AF_STATE);
870                    if (entry.count == 0) {
871                        ALOGW("%s: ZSL queue frame has no AF state field!",
872                                __FUNCTION__);
873                        continue;
874                    }
875                    uint8_t afState = entry.data.u8[0];
876                    if (afState != ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED &&
877                            afState != ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED &&
878                            afState != ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED) {
879                        ALOGVV("%s: ZSL queue frame AF state is %d is not good for capture, skip it",
880                                __FUNCTION__, afState);
881                        continue;
882                    }
883                }
884
885                minTimestamp = frameTimestamp;
886                idx = j;
887            }
888
889            ALOGVV("%s: Saw timestamp %" PRId64, __FUNCTION__, frameTimestamp);
890        }
891    }
892
893    if (emptyCount == mFrameList.size()) {
894        /**
895         * This could be mildly bad and means our ZSL was triggered before
896         * there were any frames yet received by the camera framework.
897         *
898         * This is a fairly corner case which can happen under:
899         * + a user presses the shutter button real fast when the camera starts
900         *     (startPreview followed immediately by takePicture).
901         * + burst capture case (hitting shutter button as fast possible)
902         *
903         * If this happens in steady case (preview running for a while, call
904         *     a single takePicture) then this might be a fwk bug.
905         */
906        ALOGW("%s: ZSL queue has no metadata frames", __FUNCTION__);
907    }
908
909    ALOGV("%s: Candidate timestamp %" PRId64 " (idx %zu), empty frames: %zu",
910          __FUNCTION__, minTimestamp, idx, emptyCount);
911
912    if (metadataIdx) {
913        *metadataIdx = idx;
914    }
915
916    return minTimestamp;
917}
918
919}; // namespace camera2
920}; // namespace android
921