1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera2-FrameProcessor"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <utils/Log.h>
22#include <utils/Trace.h>
23
24#include "FrameProcessor.h"
25#include "../Camera2Device.h"
26#include "../Camera2Client.h"
27
28namespace android {
29namespace camera2 {
30
31FrameProcessor::FrameProcessor(wp<Camera2Client> client):
32        Thread(false), mClient(client), mLastFrameNumberOfFaces(0) {
33}
34
35FrameProcessor::~FrameProcessor() {
36    ALOGV("%s: Exit", __FUNCTION__);
37}
38
39status_t FrameProcessor::registerListener(int32_t minId,
40        int32_t maxId, wp<FilteredListener> listener) {
41    Mutex::Autolock l(mInputMutex);
42    ALOGV("%s: Registering listener for frame id range %d - %d",
43            __FUNCTION__, minId, maxId);
44    RangeListener rListener = { minId, maxId, listener };
45    mRangeListeners.push_back(rListener);
46    return OK;
47}
48
49status_t FrameProcessor::removeListener(int32_t minId,
50        int32_t maxId, wp<FilteredListener> listener) {
51    Mutex::Autolock l(mInputMutex);
52    List<RangeListener>::iterator item = mRangeListeners.begin();
53    while (item != mRangeListeners.end()) {
54        if (item->minId == minId &&
55                item->maxId == maxId &&
56                item->listener == listener) {
57            item = mRangeListeners.erase(item);
58        } else {
59            item++;
60        }
61    }
62    return OK;
63}
64
65void FrameProcessor::dump(int fd, const Vector<String16>& args) {
66    String8 result("    Latest received frame:\n");
67    write(fd, result.string(), result.size());
68    mLastFrame.dump(fd, 2, 6);
69}
70
71bool FrameProcessor::threadLoop() {
72    status_t res;
73
74    sp<Camera2Device> device;
75    {
76        sp<Camera2Client> client = mClient.promote();
77        if (client == 0) return false;
78        device = client->getCameraDevice();
79        if (device == 0) return false;
80    }
81
82    res = device->waitForNextFrame(kWaitDuration);
83    if (res == OK) {
84        sp<Camera2Client> client = mClient.promote();
85        if (client == 0) return false;
86        processNewFrames(client);
87    } else if (res != TIMED_OUT) {
88        ALOGE("Camera2Client::FrameProcessor: Error waiting for new "
89                "frames: %s (%d)", strerror(-res), res);
90    }
91
92    return true;
93}
94
95void FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
96    status_t res;
97    ATRACE_CALL();
98    CameraMetadata frame;
99    while ( (res = client->getCameraDevice()->getNextFrame(&frame)) == OK) {
100        camera_metadata_entry_t entry;
101
102        entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
103        if (entry.count == 0) {
104            ALOGE("%s: Camera %d: Error reading frame number",
105                    __FUNCTION__, client->getCameraId());
106            break;
107        }
108        ATRACE_INT("cam2_frame", entry.data.i32[0]);
109
110        res = processFaceDetect(frame, client);
111        if (res != OK) break;
112
113        res = processListeners(frame, client);
114        if (res != OK) break;
115
116        if (!frame.isEmpty()) {
117            mLastFrame.acquire(frame);
118        }
119    }
120    if (res != NOT_ENOUGH_DATA) {
121        ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
122                __FUNCTION__, client->getCameraId(), strerror(-res), res);
123        return;
124    }
125
126    return;
127}
128
129status_t FrameProcessor::processListeners(const CameraMetadata &frame,
130        sp<Camera2Client> &client) {
131    status_t res;
132    ATRACE_CALL();
133    camera_metadata_ro_entry_t entry;
134
135    entry = frame.find(ANDROID_REQUEST_ID);
136    if (entry.count == 0) {
137        ALOGE("%s: Camera %d: Error reading frame id",
138                __FUNCTION__, client->getCameraId());
139        return BAD_VALUE;
140    }
141    int32_t frameId = entry.data.i32[0];
142
143    List<sp<FilteredListener> > listeners;
144    {
145        Mutex::Autolock l(mInputMutex);
146
147        List<RangeListener>::iterator item = mRangeListeners.begin();
148        while (item != mRangeListeners.end()) {
149            if (frameId >= item->minId &&
150                    frameId < item->maxId) {
151                sp<FilteredListener> listener = item->listener.promote();
152                if (listener == 0) {
153                    item = mRangeListeners.erase(item);
154                    continue;
155                } else {
156                    listeners.push_back(listener);
157                }
158            }
159            item++;
160        }
161    }
162    ALOGV("Got %d range listeners out of %d", listeners.size(), mRangeListeners.size());
163    List<sp<FilteredListener> >::iterator item = listeners.begin();
164    for (; item != listeners.end(); item++) {
165        (*item)->onFrameAvailable(frameId, frame);
166    }
167    return OK;
168}
169
170status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
171        sp<Camera2Client> &client) {
172    status_t res = BAD_VALUE;
173    ATRACE_CALL();
174    camera_metadata_ro_entry_t entry;
175    bool enableFaceDetect;
176    int maxFaces;
177    {
178        SharedParameters::Lock l(client->getParameters());
179        enableFaceDetect = l.mParameters.enableFaceDetect;
180    }
181    entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE);
182
183    // TODO: This should be an error once implementations are compliant
184    if (entry.count == 0) {
185        return OK;
186    }
187
188    uint8_t faceDetectMode = entry.data.u8[0];
189
190    camera_frame_metadata metadata;
191    Vector<camera_face_t> faces;
192    metadata.number_of_faces = 0;
193
194    if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) {
195        SharedParameters::Lock l(client->getParameters());
196        entry = frame.find(ANDROID_STATS_FACE_RECTANGLES);
197        if (entry.count == 0) {
198            // No faces this frame
199            /* warning: locks SharedCameraClient */
200            callbackFaceDetection(client, metadata);
201            return OK;
202        }
203        metadata.number_of_faces = entry.count / 4;
204        if (metadata.number_of_faces >
205                l.mParameters.fastInfo.maxFaces) {
206            ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
207                    __FUNCTION__, client->getCameraId(),
208                    metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
209            return res;
210        }
211        const int32_t *faceRects = entry.data.i32;
212
213        entry = frame.find(ANDROID_STATS_FACE_SCORES);
214        if (entry.count == 0) {
215            ALOGE("%s: Camera %d: Unable to read face scores",
216                    __FUNCTION__, client->getCameraId());
217            return res;
218        }
219        const uint8_t *faceScores = entry.data.u8;
220
221        const int32_t *faceLandmarks = NULL;
222        const int32_t *faceIds = NULL;
223
224        if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
225            entry = frame.find(ANDROID_STATS_FACE_LANDMARKS);
226            if (entry.count == 0) {
227                ALOGE("%s: Camera %d: Unable to read face landmarks",
228                        __FUNCTION__, client->getCameraId());
229                return res;
230            }
231            faceLandmarks = entry.data.i32;
232
233            entry = frame.find(ANDROID_STATS_FACE_IDS);
234
235            if (entry.count == 0) {
236                ALOGE("%s: Camera %d: Unable to read face IDs",
237                        __FUNCTION__, client->getCameraId());
238                return res;
239            }
240            faceIds = entry.data.i32;
241        }
242
243        faces.setCapacity(metadata.number_of_faces);
244
245        size_t maxFaces = metadata.number_of_faces;
246        for (size_t i = 0; i < maxFaces; i++) {
247            if (faceScores[i] == 0) {
248                metadata.number_of_faces--;
249                continue;
250            }
251
252            camera_face_t face;
253
254            face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]);
255            face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]);
256            face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]);
257            face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);
258
259            face.score = faceScores[i];
260            if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
261                face.id = faceIds[i];
262                face.left_eye[0] =
263                        l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
264                face.left_eye[1] =
265                        l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]);
266                face.right_eye[0] =
267                        l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]);
268                face.right_eye[1] =
269                        l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]);
270                face.mouth[0] =
271                        l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]);
272                face.mouth[1] =
273                        l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]);
274            } else {
275                face.id = 0;
276                face.left_eye[0] = face.left_eye[1] = -2000;
277                face.right_eye[0] = face.right_eye[1] = -2000;
278                face.mouth[0] = face.mouth[1] = -2000;
279            }
280            faces.push_back(face);
281        }
282
283        metadata.faces = faces.editArray();
284    }
285
286    /* warning: locks SharedCameraClient */
287    callbackFaceDetection(client, metadata);
288
289    return OK;
290}
291
292void FrameProcessor::callbackFaceDetection(sp<Camera2Client> client,
293                               /*in*/camera_frame_metadata &metadata) {
294
295    /* Filter out repeated 0-face callbacks, but not when the last frame was >0 */
296    if (metadata.number_of_faces != 0 || mLastFrameNumberOfFaces != metadata.number_of_faces) {
297        Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient);
298        if (l.mCameraClient != NULL) {
299            l.mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
300                    NULL, &metadata);
301        }
302    }
303
304    mLastFrameNumberOfFaces = metadata.number_of_faces;
305}
306
307}; // namespace camera2
308}; // namespace android
309