FrameProcessor.cpp revision 204e3295e2814052aef7e45ee9edd60128efbbd0
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera2-FrameProcessor"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <utils/Log.h>
22#include <utils/Trace.h>
23
24#include "common/CameraDeviceBase.h"
25#include "api1/Camera2Client.h"
26#include "api1/client2/FrameProcessor.h"
27
28namespace android {
29namespace camera2 {
30
31FrameProcessor::FrameProcessor(wp<CameraDeviceBase> device,
32                               sp<Camera2Client> client) :
33    FrameProcessorBase(device),
34    mClient(client),
35    mLastFrameNumberOfFaces(0),
36    mLast3AFrameNumber(-1) {
37
38    sp<CameraDeviceBase> d = device.promote();
39    mSynthesize3ANotify = !(d->willNotify3A());
40
41    {
42        SharedParameters::Lock l(client->getParameters());
43
44        if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
45            mUsePartialResult = (mNumPartialResults > 1);
46        } else {
47            mUsePartialResult = l.mParameters.quirks.partialResults;
48        }
49
50        // Initialize starting 3A state
51        m3aState.afTriggerId = l.mParameters.afTriggerCounter;
52        m3aState.aeTriggerId = l.mParameters.precaptureTriggerCounter;
53        // Check if lens is fixed-focus
54        if (l.mParameters.focusMode == Parameters::FOCUS_MODE_FIXED) {
55            m3aState.afMode = ANDROID_CONTROL_AF_MODE_OFF;
56        }
57    }
58}
59
60FrameProcessor::~FrameProcessor() {
61}
62
63bool FrameProcessor::processSingleFrame(CaptureResult &frame,
64                                        const sp<CameraDeviceBase> &device) {
65
66    sp<Camera2Client> client = mClient.promote();
67    if (!client.get()) {
68        return false;
69    }
70
71    bool isPartialResult = false;
72    if (mUsePartialResult) {
73        if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
74            isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
75        } else {
76            camera_metadata_entry_t entry;
77            entry = frame.mMetadata.find(ANDROID_QUIRKS_PARTIAL_RESULT);
78            if (entry.count > 0 &&
79                    entry.data.u8[0] == ANDROID_QUIRKS_PARTIAL_RESULT_PARTIAL) {
80                isPartialResult = true;
81            }
82        }
83    }
84
85    if (!isPartialResult && processFaceDetect(frame.mMetadata, client) != OK) {
86        return false;
87    }
88
89    if (mSynthesize3ANotify) {
90        process3aState(frame, client);
91    }
92
93    return FrameProcessorBase::processSingleFrame(frame, device);
94}
95
96status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
97        const sp<Camera2Client> &client) {
98    status_t res = BAD_VALUE;
99    ATRACE_CALL();
100    camera_metadata_ro_entry_t entry;
101    bool enableFaceDetect;
102
103    {
104        SharedParameters::Lock l(client->getParameters());
105        enableFaceDetect = l.mParameters.enableFaceDetect;
106    }
107    entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
108
109    // TODO: This should be an error once implementations are compliant
110    if (entry.count == 0) {
111        return OK;
112    }
113
114    uint8_t faceDetectMode = entry.data.u8[0];
115
116    camera_frame_metadata metadata;
117    Vector<camera_face_t> faces;
118    metadata.number_of_faces = 0;
119
120    if (enableFaceDetect &&
121        faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
122
123        SharedParameters::Lock l(client->getParameters());
124        entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
125        if (entry.count == 0) {
126            // No faces this frame
127            /* warning: locks SharedCameraCallbacks */
128            callbackFaceDetection(client, metadata);
129            return OK;
130        }
131        metadata.number_of_faces = entry.count / 4;
132        if (metadata.number_of_faces >
133                l.mParameters.fastInfo.maxFaces) {
134            ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
135                    __FUNCTION__, client->getCameraId(),
136                    metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
137            return res;
138        }
139        const int32_t *faceRects = entry.data.i32;
140
141        entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
142        if (entry.count == 0) {
143            ALOGE("%s: Camera %d: Unable to read face scores",
144                    __FUNCTION__, client->getCameraId());
145            return res;
146        }
147        const uint8_t *faceScores = entry.data.u8;
148
149        const int32_t *faceLandmarks = NULL;
150        const int32_t *faceIds = NULL;
151
152        if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
153            entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
154            if (entry.count == 0) {
155                ALOGE("%s: Camera %d: Unable to read face landmarks",
156                        __FUNCTION__, client->getCameraId());
157                return res;
158            }
159            faceLandmarks = entry.data.i32;
160
161            entry = frame.find(ANDROID_STATISTICS_FACE_IDS);
162
163            if (entry.count == 0) {
164                ALOGE("%s: Camera %d: Unable to read face IDs",
165                        __FUNCTION__, client->getCameraId());
166                return res;
167            }
168            faceIds = entry.data.i32;
169        }
170
171        faces.setCapacity(metadata.number_of_faces);
172
173        size_t maxFaces = metadata.number_of_faces;
174        for (size_t i = 0; i < maxFaces; i++) {
175            if (faceScores[i] == 0) {
176                metadata.number_of_faces--;
177                continue;
178            }
179            if (faceScores[i] > 100) {
180                ALOGW("%s: Face index %zu with out of range score %d",
181                        __FUNCTION__, i, faceScores[i]);
182            }
183
184            camera_face_t face;
185
186            face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]);
187            face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]);
188            face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]);
189            face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);
190
191            face.score = faceScores[i];
192            if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
193                face.id = faceIds[i];
194                face.left_eye[0] =
195                    l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
196                face.left_eye[1] =
197                    l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]);
198                face.right_eye[0] =
199                    l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]);
200                face.right_eye[1] =
201                    l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]);
202                face.mouth[0] =
203                    l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]);
204                face.mouth[1] =
205                    l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]);
206            } else {
207                face.id = 0;
208                face.left_eye[0] = face.left_eye[1] = -2000;
209                face.right_eye[0] = face.right_eye[1] = -2000;
210                face.mouth[0] = face.mouth[1] = -2000;
211            }
212            faces.push_back(face);
213        }
214
215        metadata.faces = faces.editArray();
216    }
217
218    /* warning: locks SharedCameraCallbacks */
219    callbackFaceDetection(client, metadata);
220
221    return OK;
222}
223
224status_t FrameProcessor::process3aState(const CaptureResult &frame,
225        const sp<Camera2Client> &client) {
226
227    ATRACE_CALL();
228    const CameraMetadata &metadata = frame.mMetadata;
229    camera_metadata_ro_entry_t entry;
230    int cameraId = client->getCameraId();
231
232    entry = metadata.find(ANDROID_REQUEST_FRAME_COUNT);
233    int32_t frameNumber = entry.data.i32[0];
234
235    // Don't send 3A notifications for the same frame number twice
236    if (frameNumber <= mLast3AFrameNumber) {
237        ALOGV("%s: Already sent 3A for frame number %d, skipping",
238                __FUNCTION__, frameNumber);
239        return OK;
240    }
241
242    mLast3AFrameNumber = frameNumber;
243
244    // Get 3A states from result metadata
245    bool gotAllStates = true;
246
247    AlgState new3aState;
248
249    // TODO: Also use AE mode, AE trigger ID
250
251    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
252            &new3aState.afMode, frameNumber, cameraId);
253
254    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
255            &new3aState.awbMode, frameNumber, cameraId);
256
257    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
258            &new3aState.aeState, frameNumber, cameraId);
259
260    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
261            &new3aState.afState, frameNumber, cameraId);
262
263    gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
264            &new3aState.awbState, frameNumber, cameraId);
265
266    if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
267        new3aState.afTriggerId = frame.mResultExtras.afTriggerId;
268        new3aState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
269    } else {
270        gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AF_TRIGGER_ID,
271                 &new3aState.afTriggerId, frameNumber, cameraId);
272
273        gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AE_PRECAPTURE_ID,
274                 &new3aState.aeTriggerId, frameNumber, cameraId);
275    }
276
277    if (!gotAllStates) return BAD_VALUE;
278
279    if (new3aState.aeState != m3aState.aeState) {
280        ALOGV("%s: Camera %d: AE state %d->%d",
281                __FUNCTION__, cameraId,
282                m3aState.aeState, new3aState.aeState);
283        client->notifyAutoExposure(new3aState.aeState, new3aState.aeTriggerId);
284    }
285
286    if (new3aState.afState != m3aState.afState ||
287        new3aState.afMode != m3aState.afMode ||
288        new3aState.afTriggerId != m3aState.afTriggerId) {
289        ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
290                __FUNCTION__, cameraId,
291                m3aState.afState, new3aState.afState,
292                m3aState.afMode, new3aState.afMode,
293                m3aState.afTriggerId, new3aState.afTriggerId);
294        client->notifyAutoFocus(new3aState.afState, new3aState.afTriggerId);
295    }
296    if (new3aState.awbState != m3aState.awbState ||
297        new3aState.awbMode != m3aState.awbMode) {
298        ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
299                __FUNCTION__, cameraId,
300                m3aState.awbState, new3aState.awbState,
301                m3aState.awbMode, new3aState.awbMode);
302        client->notifyAutoWhitebalance(new3aState.awbState,
303                new3aState.aeTriggerId);
304    }
305
306    m3aState = new3aState;
307
308    return OK;
309}
310
311template<typename Src, typename T>
312bool FrameProcessor::get3aResult(const CameraMetadata& result, int32_t tag,
313        T* value, int32_t frameNumber, int cameraId) {
314    camera_metadata_ro_entry_t entry;
315    if (value == NULL) {
316        ALOGE("%s: Camera %d: Value to write to is NULL",
317                __FUNCTION__, cameraId);
318        return false;
319    }
320
321    entry = result.find(tag);
322    if (entry.count == 0) {
323        ALOGE("%s: Camera %d: No %s provided by HAL for frame %d!",
324                __FUNCTION__, cameraId,
325                get_camera_metadata_tag_name(tag), frameNumber);
326        return false;
327    } else {
328        switch(sizeof(Src)){
329            case sizeof(uint8_t):
330                *value = static_cast<T>(entry.data.u8[0]);
331                break;
332            case sizeof(int32_t):
333                *value = static_cast<T>(entry.data.i32[0]);
334                break;
335            default:
336                ALOGE("%s: Camera %d: Unsupported source",
337                        __FUNCTION__, cameraId);
338                return false;
339        }
340    }
341    return true;
342}
343
344
345void FrameProcessor::callbackFaceDetection(sp<Camera2Client> client,
346                                     const camera_frame_metadata &metadata) {
347
348    camera_frame_metadata *metadata_ptr =
349        const_cast<camera_frame_metadata*>(&metadata);
350
351    /**
352     * Filter out repeated 0-face callbacks,
353     * but not when the last frame was >0
354     */
355    if (metadata.number_of_faces != 0 ||
356        mLastFrameNumberOfFaces != metadata.number_of_faces) {
357
358        Camera2Client::SharedCameraCallbacks::Lock
359            l(client->mSharedCameraCallbacks);
360        if (l.mRemoteCallback != NULL) {
361            l.mRemoteCallback->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
362                                            NULL,
363                                            metadata_ptr);
364        }
365    }
366
367    mLastFrameNumberOfFaces = metadata.number_of_faces;
368}
369
370}; // namespace camera2
371}; // namespace android
372