1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera2-FrameProcessor"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <utils/Log.h>
22#include <utils/Trace.h>
23
24#include "common/CameraDeviceBase.h"
25#include "api1/Camera2Client.h"
26#include "api1/client2/FrameProcessor.h"
27
28namespace android {
29namespace camera2 {
30
31FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
32                               sp<Camera2Client> client) :
33    FrameProcessorBase(device),
34    mClient(client),
35    mLastFrameNumberOfFaces(0),
36    mLast3AFrameNumber(-1) {
37
38    sp<CameraDeviceBase> d = device.promote();
39    mSynthesize3ANotify = !(d->willNotify3A());
40
41    {
42        SharedParameters::Lock l(client->getParameters());
43
44        mUsePartialResult = (mNumPartialResults > 1);
45
46        // Initialize starting 3A state
47        m3aState.afTriggerId = l.mParameters.afTriggerCounter;
48        m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
49        // Check if lens is fixed-focus
50        if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
51            m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
52        } else {
53            m3aState.afMode = ANDROID_CONTROL_AF_MODE_AUTO;
54        }
55        m3aState.awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
56        m3aState.aeState = ANDROID_CONTROL_AE_STATE_INACTIVE;
57        m3aState.afState = ANDROID_CONTROL_AF_STATE_INACTIVE;
58        m3aState.awbState = ANDROID_CONTROL_AWB_STATE_INACTIVE;
59    }
60}
61
62FrameProcessor::~FrameProcessor() {
63}
64
65bool FrameProcessor::processSingleFrame(CaptureResult &frame,
66                                        const sp<CameraDeviceBase> &device) {
67
68    sp<Camera2Client> client = mClient.promote();
69    if (!client.get()) {
70        return false;
71    }
72
73    bool isPartialResult = false;
74    if (mUsePartialResult) {
75        isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
76    }
77
78    if (!isPartialResult && processFaceDetect(frame.mMetadata, client) != OK) {
79        return false;
80    }
81
82    if (mSynthesize3ANotify) {
83        process3aState(frame, client);
84    }
85
86    return FrameProcessorBase::processSingleFrame(frame, device);
87}
88
89status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
90        const sp<Camera2Client> &client) {
91    status_t res = BAD_VALUE;
92    ATRACE_CALL();
93    camera_metadata_ro_entry_t entry;
94    bool enableFaceDetect;
95
96    {
97        SharedParameters::Lock l(client->getParameters());
98        enableFaceDetect = l.mParameters.enableFaceDetect;
99    }
100    entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
101
102    // TODO: This should be an error once implementations are compliant
103    if (entry.count == 0) {
104        return OK;
105    }
106
107    uint8_t faceDetectMode = entry.data.u8[0];
108
109    camera_frame_metadata metadata;
110    Vector<camera_face_t> faces;
111    metadata.number_of_faces = 0;
112
113    if (enableFaceDetect &&
114        faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
115
116        SharedParameters::Lock l(client->getParameters());
117        entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
118        if (entry.count == 0) {
119            // No faces this frame
120            /* warning: locks SharedCameraCallbacks */
121            callbackFaceDetection(client, metadata);
122            return OK;
123        }
124        metadata.number_of_faces = entry.count / 4;
125        if (metadata.number_of_faces >
126                l.mParameters.fastInfo.maxFaces) {
127            ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
128                    __FUNCTION__, client->getCameraId(),
129                    metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
130            return res;
131        }
132        const int32_t *faceRects = entry.data.i32;
133
134        entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
135        if (entry.count == 0) {
136            ALOGE("%s: Camera %d: Unable to read face scores",
137                    __FUNCTION__, client->getCameraId());
138            return res;
139        }
140        const uint8_t *faceScores = entry.data.u8;
141
142        const int32_t *faceLandmarks = NULL;
143        const int32_t *faceIds = NULL;
144
145        if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
146            entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
147            if (entry.count == 0) {
148                ALOGE("%s: Camera %d: Unable to read face landmarks",
149                        __FUNCTION__, client->getCameraId());
150                return res;
151            }
152            faceLandmarks = entry.data.i32;
153
154            entry = frame.find(ANDROID_STATISTICS_FACE_IDS);
155
156            if (entry.count == 0) {
157                ALOGE("%s: Camera %d: Unable to read face IDs",
158                        __FUNCTION__, client->getCameraId());
159                return res;
160            }
161            faceIds = entry.data.i32;
162        }
163
164        entry = frame.find(ANDROID_SCALER_CROP_REGION);
165        if (entry.count < 4) {
166            ALOGE("%s: Camera %d: Unable to read crop region (count = %zu)",
167                    __FUNCTION__, client->getCameraId(), entry.count);
168            return res;
169        }
170
171        Parameters::CropRegion scalerCrop = {
172            static_cast<float>(entry.data.i32[0]),
173            static_cast<float>(entry.data.i32[1]),
174            static_cast<float>(entry.data.i32[2]),
175            static_cast<float>(entry.data.i32[3])};
176
177        faces.setCapacity(metadata.number_of_faces);
178
179        size_t maxFaces = metadata.number_of_faces;
180        for (size_t i = 0; i < maxFaces; i++) {
181            if (faceScores[i] == 0) {
182                metadata.number_of_faces--;
183                continue;
184            }
185            if (faceScores[i] > 100) {
186                ALOGW("%s: Face index %zu with out of range score %d",
187                        __FUNCTION__, i, faceScores[i]);
188            }
189
190            camera_face_t face;
191
192            face.rect[0] = l.mParameters.arrayXToNormalizedWithCrop(
193                                faceRects[i*4 + 0], scalerCrop);
194            face.rect[1] = l.mParameters.arrayYToNormalizedWithCrop(
195                                faceRects[i*4 + 1], scalerCrop);
196            face.rect[2] = l.mParameters.arrayXToNormalizedWithCrop(
197                                faceRects[i*4 + 2], scalerCrop);
198            face.rect[3] = l.mParameters.arrayYToNormalizedWithCrop(
199                                faceRects[i*4 + 3], scalerCrop);
200
201            face.score = faceScores[i];
202            if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
203                face.id = faceIds[i];
204                face.left_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
205                        faceLandmarks[i*6 + 0], scalerCrop);
206                face.left_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
207                        faceLandmarks[i*6 + 1], scalerCrop);
208                face.right_eye[0] = l.mParameters.arrayXToNormalizedWithCrop(
209                        faceLandmarks[i*6 + 2], scalerCrop);
210                face.right_eye[1] = l.mParameters.arrayYToNormalizedWithCrop(
211                        faceLandmarks[i*6 + 3], scalerCrop);
212                face.mouth[0] = l.mParameters.arrayXToNormalizedWithCrop(
213                        faceLandmarks[i*6 + 4], scalerCrop);
214                face.mouth[1] = l.mParameters.arrayYToNormalizedWithCrop(
215                        faceLandmarks[i*6 + 5], scalerCrop);
216            } else {
217                face.id = 0;
218                face.left_eye[0] = face.left_eye[1] = -2000;
219                face.right_eye[0] = face.right_eye[1] = -2000;
220                face.mouth[0] = face.mouth[1] = -2000;
221            }
222            faces.push_back(face);
223        }
224
225        metadata.faces = faces.editArray();
226    }
227
228    /* warning: locks SharedCameraCallbacks */
229    callbackFaceDetection(client, metadata);
230
231    return OK;
232}
233
234status_t FrameProcessor::process3aState(const CaptureResult &frame,
235        const sp<Camera2Client> &client) {
236
237    ATRACE_CALL();
238    const CameraMetadata &metadata = frame.mMetadata;
239    camera_metadata_ro_entry_t entry;
240    int cameraId = client->getCameraId();
241
242    entry = metadata.find(ANDROID_REQUEST_FRAME_COUNT);
243    int32_t frameNumber = entry.data.i32[0];
244
245    // Don't send 3A notifications for the same frame number twice
246    if (frameNumber <= mLast3AFrameNumber) {
247        ALOGV("%s: Already sent 3A for frame number %d, skipping",
248                __FUNCTION__, frameNumber);
249
250        // Remove the entry if there is one for this frame number in mPending3AStates.
251        mPending3AStates.removeItem(frameNumber);
252        return OK;
253    }
254
255    AlgState pendingState;
256
257    ssize_t index = mPending3AStates.indexOfKey(frameNumber);
258    if (index != NAME_NOT_FOUND) {
259        pendingState = mPending3AStates.valueAt(index);
260    }
261
262    // Update 3A states from the result.
263    bool gotAllStates = true;
264
265    // TODO: Also use AE mode, AE trigger ID
266    gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
267            &pendingState.afMode, frameNumber, cameraId);
268
269    gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
270            &pendingState.awbMode, frameNumber, cameraId);
271
272    gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
273            &pendingState.aeState, frameNumber, cameraId);
274
275    gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
276            &pendingState.afState, frameNumber, cameraId);
277
278    gotAllStates &= updatePendingState<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
279            &pendingState.awbState, frameNumber, cameraId);
280
281    pendingState.afTriggerId = frame.mResultExtras.afTriggerId;
282    pendingState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
283
284    if (!gotAllStates) {
285        // If not all states are received, put the pending state to mPending3AStates.
286        if (index == NAME_NOT_FOUND) {
287            mPending3AStates.add(frameNumber, pendingState);
288        } else {
289            mPending3AStates.replaceValueAt(index, pendingState);
290        }
291        return NOT_ENOUGH_DATA;
292    }
293
294    // Once all 3A states are received, notify the client about 3A changes.
295    if (pendingState.aeState != m3aState.aeState) {
296        ALOGV("%s: Camera %d: AE state %d->%d",
297                __FUNCTION__, cameraId,
298                m3aState.aeState, pendingState.aeState);
299        client->notifyAutoExposure(pendingState.aeState, pendingState.aeTriggerId);
300    }
301
302    if (pendingState.afState != m3aState.afState ||
303        pendingState.afMode != m3aState.afMode ||
304        pendingState.afTriggerId != m3aState.afTriggerId) {
305        ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
306                __FUNCTION__, cameraId,
307                m3aState.afState, pendingState.afState,
308                m3aState.afMode, pendingState.afMode,
309                m3aState.afTriggerId, pendingState.afTriggerId);
310        client->notifyAutoFocus(pendingState.afState, pendingState.afTriggerId);
311    }
312    if (pendingState.awbState != m3aState.awbState ||
313        pendingState.awbMode != m3aState.awbMode) {
314        ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
315                __FUNCTION__, cameraId,
316                m3aState.awbState, pendingState.awbState,
317                m3aState.awbMode, pendingState.awbMode);
318        client->notifyAutoWhitebalance(pendingState.awbState,
319                pendingState.aeTriggerId);
320    }
321
322    if (index != NAME_NOT_FOUND) {
323        mPending3AStates.removeItemsAt(index);
324    }
325
326    m3aState = pendingState;
327    mLast3AFrameNumber = frameNumber;
328
329    return OK;
330}
331
332template<typename Src, typename T>
333bool FrameProcessor::updatePendingState(const CameraMetadata& result, int32_t tag,
334        T* value, int32_t frameNumber, int cameraId) {
335    camera_metadata_ro_entry_t entry;
336    if (value == NULL) {
337        ALOGE("%s: Camera %d: Value to write to is NULL",
338                __FUNCTION__, cameraId);
339        return false;
340    }
341
342    // Already got the value for this tag.
343    if (*value != static_cast<T>(NOT_SET)) {
344        return true;
345    }
346
347    entry = result.find(tag);
348    if (entry.count == 0) {
349        const camera_metadata *metaBuffer = result.getAndLock();
350        ALOGV("%s: Camera %d: No %s provided by HAL for frame %d in this result!",
351                __FUNCTION__, cameraId,
352                get_local_camera_metadata_tag_name(tag, metaBuffer),
353                frameNumber);
354        result.unlock(metaBuffer);
355        return false;
356    } else {
357        switch(sizeof(Src)){
358            case sizeof(uint8_t):
359                *value = static_cast<T>(entry.data.u8[0]);
360                break;
361            case sizeof(int32_t):
362                *value = static_cast<T>(entry.data.i32[0]);
363                break;
364            default:
365                ALOGE("%s: Camera %d: Unsupported source",
366                        __FUNCTION__, cameraId);
367                return false;
368        }
369    }
370    return true;
371}
372
373
374void FrameProcessor::callbackFaceDetection(const sp<Camera2Client>& client,
375                                     const camera_frame_metadata &metadata) {
376
377    camera_frame_metadata *metadata_ptr =
378        const_cast<camera_frame_metadata*>(&metadata);
379
380    /**
381     * Filter out repeated 0-face callbacks,
382     * but not when the last frame was >0
383     */
384    if (metadata.number_of_faces != 0 ||
385        mLastFrameNumberOfFaces != metadata.number_of_faces) {
386
387        Camera2Client::SharedCameraCallbacks::Lock
388            l(client->mSharedCameraCallbacks);
389        if (l.mRemoteCallback != NULL) {
390            l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
391                                            NULL,
392                                            metadata_ptr);
393        }
394    }
395
396    mLastFrameNumberOfFaces = metadata.number_of_faces;
397}
398
399}; // namespace camera2
400}; // namespace android
401