FrameProcessor.cpp revision 741ace8776f052245e33a47a0b99400f75996f45
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera2-FrameProcessor"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <utils/Log.h>
22#include <utils/Trace.h>
23
24#include "common/CameraDeviceBase.h"
25#include "api1/Camera2Client.h"
26#include "api1/client2/FrameProcessor.h"
27
28namespace android {
29namespace camera2 {
30
31FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
32                               sp<Camera2Client> client) :
33    FrameProcessorBase(device),
34    mClient(client),
35    mLastFrameNumberOfFaces(0),
36    mLast3AFrameNumber(-1) {
37
38    sp<CameraDeviceBase> d = device.promote();
39    mSynthesize3ANotify = !(d->willNotify3A());
40
41    {
42        SharedParameters::Lock l(client->getParameters());
43        mUsePartialQuirk = l.mParameters.quirks.partialResults;
44
45        // Initialize starting 3A state
46        m3aState.afTriggerId = l.mParameters.afTriggerCounter;
47        m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
48        // Check if lens is fixed-focus
49        if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
50            m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
51        }
52    }
53}
54
55FrameProcessor::~FrameProcessor() {
56}
57
58bool FrameProcessor::processSingleFrame(CaptureResult &frame,
59                                        const sp<CameraDeviceBase> &device) {
60
61    sp<Camera2Client> client = mClient.promote();
62    if (!client.get()) {
63        return false;
64    }
65
66    bool partialResult = false;
67    if (mUsePartialQuirk) {
68        camera_metadata_entry_t entry;
69        entry = frame.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
70        if (entry.count > 0 &&
71                entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
72            partialResult = true;
73        }
74    }
75
76    if (!partialResult && processFaceDetect(frame.mMetadata, client) != OK) {
77        return false;
78    }
79
80    if (mSynthesize3ANotify) {
81        process3aState(frame, client);
82    }
83
84    return FrameProcessorBase::processSingleFrame(frame, device);
85}
86
87status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
88        const sp<Camera2Client> &client) {
89    status_t res = BAD_VALUE;
90    ATRACE_CALL();
91    camera_metadata_ro_entry_t entry;
92    bool enableFaceDetect;
93
94    {
95        SharedParameters::Lock l(client->getParameters());
96        enableFaceDetect = l.mParameters.enableFaceDetect;
97    }
98    entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
99
100    // TODO: This should be an error once implementations are compliant
101    if (entry.count == 0) {
102        return OK;
103    }
104
105    uint8_t faceDetectMode = entry.data.u8[0];
106
107    camera_frame_metadata metadata;
108    Vector<camera_face_t> faces;
109    metadata.number_of_faces = 0;
110
111    if (enableFaceDetect &&
112        faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
113
114        SharedParameters::Lock l(client->getParameters());
115        entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
116        if (entry.count == 0) {
117            // No faces this frame
118            /* warning: locks SharedCameraCallbacks */
119            callbackFaceDetection(client, metadata);
120            return OK;
121        }
122        metadata.number_of_faces = entry.count / 4;
123        if (metadata.number_of_faces >
124                l.mParameters.fastInfo.maxFaces) {
125            ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
126                    __FUNCTION__, client->getCameraId(),
127                    metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
128            return res;
129        }
130        const int32_t *faceRects = entry.data.i32;
131
132        entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
133        if (entry.count == 0) {
134            ALOGE("%s: Camera %d: Unable to read face scores",
135                    __FUNCTION__, client->getCameraId());
136            return res;
137        }
138        const uint8_t *faceScores = entry.data.u8;
139
140        const int32_t *faceLandmarks = NULL;
141        const int32_t *faceIds = NULL;
142
143        if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
144            entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
145            if (entry.count == 0) {
146                ALOGE("%s: Camera %d: Unable to read face landmarks",
147                        __FUNCTION__, client->getCameraId());
148                return res;
149            }
150            faceLandmarks = entry.data.i32;
151
152            entry = frame.find(ANDROID_STATISTICS_FACE_IDS);
153
154            if (entry.count == 0) {
155                ALOGE("%s: Camera %d: Unable to read face IDs",
156                        __FUNCTION__, client->getCameraId());
157                return res;
158            }
159            faceIds = entry.data.i32;
160        }
161
162        faces.setCapacity(metadata.number_of_faces);
163
164        size_t maxFaces = metadata.number_of_faces;
165        for (size_t i = 0; i < maxFaces; i++) {
166            if (faceScores[i] == 0) {
167                metadata.number_of_faces--;
168                continue;
169            }
170            if (faceScores[i] > 100) {
171                ALOGW("%s: Face index %zu with out of range score %d",
172                        __FUNCTION__, i, faceScores[i]);
173            }
174
175            camera_face_t face;
176
177            face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]);
178            face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]);
179            face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]);
180            face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);
181
182            face.score = faceScores[i];
183            if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
184                face.id = faceIds[i];
185                face.left_eye[0] =
186                    l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
187                face.left_eye[1] =
188                    l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]);
189                face.right_eye[0] =
190                    l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]);
191                face.right_eye[1] =
192                    l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]);
193                face.mouth[0] =
194                    l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]);
195                face.mouth[1] =
196                    l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]);
197            } else {
198                face.id = 0;
199                face.left_eye[0] = face.left_eye[1] = -2000;
200                face.right_eye[0] = face.right_eye[1] = -2000;
201                face.mouth[0] = face.mouth[1] = -2000;
202            }
203            faces.push_back(face);
204        }
205
206        metadata.faces = faces.editArray();
207    }
208
209    /* warning: locks SharedCameraCallbacks */
210    callbackFaceDetection(client, metadata);
211
212    return OK;
213}
214
215status_t FrameProcessor::process3aState(const CaptureResult &frame,
216        const sp<Camera2Client> &client) {
217
218    ATRACE_CALL();
219    const CameraMetadata &metadata = frame.mMetadata;
220    camera_metadata_ro_entry_t entry;
221    int cameraId = client->getCameraId();
222
223    entry = metadata.find(ANDROID_REQUEST_FRAME_COUNT);
224    int32_t frameNumber = entry.data.i32[0];
225
226    // Don't send 3A notifications for the same frame number twice
227    if (frameNumber <= mLast3AFrameNumber) {
228        ALOGV("%s: Already sent 3A for frame number %d, skipping",
229                __FUNCTION__, frameNumber);
230        return OK;
231    }
232
233    mLast3AFrameNumber = frameNumber;
234
235    // Get 3A states from result metadata
236    bool gotAllStates = true;
237
238    AlgState new3aState;
239
240    // TODO: Also use AE mode, AE trigger ID
241
242    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
243            &new3aState.afMode, frameNumber, cameraId);
244
245    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
246            &new3aState.awbMode, frameNumber, cameraId);
247
248    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
249            &new3aState.aeState, frameNumber, cameraId);
250
251    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
252            &new3aState.afState, frameNumber, cameraId);
253
254    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
255            &new3aState.awbState, frameNumber, cameraId);
256
257    if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
258        new3aState.afTriggerId = frame.mResultExtras.afTriggerId;
259        new3aState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
260    } else {
261        gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AF_TRIGGER_ID,
262                 &new3aState.afTriggerId, frameNumber, cameraId);
263
264        gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AE_PRECAPTURE_ID,
265                 &new3aState.aeTriggerId, frameNumber, cameraId);
266    }
267
268    if (!gotAllStates) return BAD_VALUE;
269
270    if (new3aState.aeState != m3aState.aeState) {
271        ALOGV("%s: Camera %d: AE state %d->%d",
272                __FUNCTION__, cameraId,
273                m3aState.aeState, new3aState.aeState);
274        client->notifyAutoExposure(new3aState.aeState, new3aState.aeTriggerId);
275    }
276
277    if (new3aState.afState != m3aState.afState ||
278        new3aState.afMode != m3aState.afMode ||
279        new3aState.afTriggerId != m3aState.afTriggerId) {
280        ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
281                __FUNCTION__, cameraId,
282                m3aState.afState, new3aState.afState,
283                m3aState.afMode, new3aState.afMode,
284                m3aState.afTriggerId, new3aState.afTriggerId);
285        client->notifyAutoFocus(new3aState.afState, new3aState.afTriggerId);
286    }
287    if (new3aState.awbState != m3aState.awbState ||
288        new3aState.awbMode != m3aState.awbMode) {
289        ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
290                __FUNCTION__, cameraId,
291                m3aState.awbState, new3aState.awbState,
292                m3aState.awbMode, new3aState.awbMode);
293        client->notifyAutoWhitebalance(new3aState.awbState,
294                new3aState.aeTriggerId);
295    }
296
297    m3aState = new3aState;
298
299    return OK;
300}
301
302template<typename Src, typename T>
303bool FrameProcessor::get3aResult(const CameraMetadata& result, int32_t tag,
304        T* value, int32_t frameNumber, int cameraId) {
305    camera_metadata_ro_entry_t entry;
306    if (value == NULL) {
307        ALOGE("%s: Camera %d: Value to write to is NULL",
308                __FUNCTION__, cameraId);
309        return false;
310    }
311
312    entry = result.find(tag);
313    if (entry.count == 0) {
314        ALOGE("%s: Camera %d: No %s provided by HAL for frame %d!",
315                __FUNCTION__, cameraId,
316                get_camera_metadata_tag_name(tag), frameNumber);
317        return false;
318    } else {
319        switch(sizeof(Src)){
320            case sizeof(uint8_t):
321                *value = static_cast<T>(entry.data.u8[0]);
322                break;
323            case sizeof(int32_t):
324                *value = static_cast<T>(entry.data.i32[0]);
325                break;
326            default:
327                ALOGE("%s: Camera %d: Unsupported source",
328                        __FUNCTION__, cameraId);
329                return false;
330        }
331    }
332    return true;
333}
334
335
336void FrameProcessor::callbackFaceDetection(sp<Camera2Client> client,
337                                     const camera_frame_metadata &metadata) {
338
339    camera_frame_metadata *metadata_ptr =
340        const_cast<camera_frame_metadata*>(&metadata);
341
342    /**
343     * Filter out repeated 0-face callbacks,
344     * but not when the last frame was >0
345     */
346    if (metadata.number_of_faces != 0 ||
347        mLastFrameNumberOfFaces != metadata.number_of_faces) {
348
349        Camera2Client::SharedCameraCallbacks::Lock
350            l(client->mSharedCameraCallbacks);
351        if (l.mRemoteCallback != NULL) {
352            l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
353                                            NULL,
354                                            metadata_ptr);
355        }
356    }
357
358    mLastFrameNumberOfFaces = metadata.number_of_faces;
359}
360
361}; // namespace camera2
362}; // namespace android
363