FrameProcessor.cpp revision a16733eeb9c40db4793bec408f29b4204e5f23b1
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera2Client::FrameProcessor"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <utils/Log.h>
22#include <utils/Trace.h>
23
24#include "FrameProcessor.h"
25#include "../Camera2Device.h"
26#include "../Camera2Client.h"
27
28namespace android {
29namespace camera2 {
30
31FrameProcessor::FrameProcessor(wp<Camera2Client> client):
32        Thread(false), mClient(client) {
33}
34
35FrameProcessor::~FrameProcessor() {
36    ALOGV("%s: Exit", __FUNCTION__);
37}
38
39void FrameProcessor::dump(int fd, const Vector<String16>& args) {
40    String8 result("    Latest received frame:\n");
41    write(fd, result.string(), result.size());
42    mLastFrame.dump(fd, 2, 6);
43}
44
45bool FrameProcessor::threadLoop() {
46    status_t res;
47
48    sp<Camera2Device> device;
49    {
50        sp<Camera2Client> client = mClient.promote();
51        if (client == 0) return false;
52        device = client->getCameraDevice();
53    }
54
55    res = device->waitForNextFrame(kWaitDuration);
56    if (res == OK) {
57        sp<Camera2Client> client = mClient.promote();
58        if (client == 0) return false;
59        processNewFrames(client);
60    } else if (res != TIMED_OUT) {
61        ALOGE("Camera2Client::FrameProcessor: Error waiting for new "
62                "frames: %s (%d)", strerror(-res), res);
63    }
64
65    return true;
66}
67
68void FrameProcessor::processNewFrames(sp<Camera2Client> &client) {
69    status_t res;
70    CameraMetadata frame;
71    while ( (res = client->getCameraDevice()->getNextFrame(&frame)) == OK) {
72        camera_metadata_entry_t entry;
73        entry = frame.find(ANDROID_REQUEST_FRAME_COUNT);
74        if (entry.count == 0) {
75            ALOGE("%s: Camera %d: Error reading frame number: %s (%d)",
76                    __FUNCTION__, client->getCameraId(), strerror(-res), res);
77            break;
78        }
79
80        res = processFaceDetect(frame, client);
81        if (res != OK) break;
82
83        mLastFrame.acquire(frame);
84    }
85    if (res != NOT_ENOUGH_DATA) {
86        ALOGE("%s: Camera %d: Error getting next frame: %s (%d)",
87                __FUNCTION__, client->getCameraId(), strerror(-res), res);
88        return;
89    }
90
91    return;
92}
93
94status_t FrameProcessor::processFaceDetect(
95    const CameraMetadata &frame, sp<Camera2Client> &client) {
96    status_t res;
97    camera_metadata_ro_entry_t entry;
98    bool enableFaceDetect;
99    int maxFaces;
100    {
101        SharedParameters::Lock l(client->getParameters());
102        enableFaceDetect = l.mParameters.enableFaceDetect;
103    }
104    entry = frame.find(ANDROID_STATS_FACE_DETECT_MODE);
105
106    // TODO: This should be an error once implementations are compliant
107    if (entry.count == 0) {
108        return OK;
109    }
110
111    uint8_t faceDetectMode = entry.data.u8[0];
112
113    camera_frame_metadata metadata;
114    Vector<camera_face_t> faces;
115    metadata.number_of_faces = 0;
116
117    if (enableFaceDetect && faceDetectMode != ANDROID_STATS_FACE_DETECTION_OFF) {
118        SharedParameters::Lock l(client->getParameters());
119        entry = frame.find(ANDROID_STATS_FACE_RECTANGLES);
120        if (entry.count == 0) {
121            ALOGE("%s: Camera %d: Unable to read face rectangles",
122                    __FUNCTION__, client->getCameraId());
123            return res;
124        }
125        metadata.number_of_faces = entry.count / 4;
126        if (metadata.number_of_faces >
127                l.mParameters.fastInfo.maxFaces) {
128            ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
129                    __FUNCTION__, client->getCameraId(),
130                    metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
131            return res;
132        }
133        const int32_t *faceRects = entry.data.i32;
134
135        entry = frame.find(ANDROID_STATS_FACE_SCORES);
136        if (entry.count == 0) {
137            ALOGE("%s: Camera %d: Unable to read face scores",
138                    __FUNCTION__, client->getCameraId());
139            return res;
140        }
141        const uint8_t *faceScores = entry.data.u8;
142
143        const int32_t *faceLandmarks = NULL;
144        const int32_t *faceIds = NULL;
145
146        if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
147            entry = frame.find(ANDROID_STATS_FACE_LANDMARKS);
148            if (entry.count == 0) {
149                ALOGE("%s: Camera %d: Unable to read face landmarks",
150                        __FUNCTION__, client->getCameraId());
151                return res;
152            }
153            faceLandmarks = entry.data.i32;
154
155            entry = frame.find(ANDROID_STATS_FACE_IDS);
156
157            if (entry.count == 0) {
158                ALOGE("%s: Camera %d: Unable to read face IDs",
159                        __FUNCTION__, client->getCameraId());
160                return res;
161            }
162            faceIds = entry.data.i32;
163        }
164
165        faces.setCapacity(metadata.number_of_faces);
166
167        for (int i = 0; i < metadata.number_of_faces; i++) {
168            camera_face_t face;
169
170            face.rect[0] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 0]);
171            face.rect[1] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 1]);
172            face.rect[2] = l.mParameters.arrayXToNormalized(faceRects[i*4 + 2]);
173            face.rect[3] = l.mParameters.arrayYToNormalized(faceRects[i*4 + 3]);
174
175            face.score = faceScores[i];
176            if (faceDetectMode == ANDROID_STATS_FACE_DETECTION_FULL) {
177                face.id = faceIds[i];
178                face.left_eye[0] =
179                        l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 0]);
180                face.left_eye[1] =
181                        l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 1]);
182                face.right_eye[0] =
183                        l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 2]);
184                face.right_eye[1] =
185                        l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 3]);
186                face.mouth[0] =
187                        l.mParameters.arrayXToNormalized(faceLandmarks[i*6 + 4]);
188                face.mouth[1] =
189                        l.mParameters.arrayYToNormalized(faceLandmarks[i*6 + 5]);
190            } else {
191                face.id = 0;
192                face.left_eye[0] = face.left_eye[1] = -2000;
193                face.right_eye[0] = face.right_eye[1] = -2000;
194                face.mouth[0] = face.mouth[1] = -2000;
195            }
196            faces.push_back(face);
197        }
198
199        metadata.faces = faces.editArray();
200    }
201
202    if (metadata.number_of_faces != 0) {
203        Camera2Client::SharedCameraClient::Lock l(client->mSharedCameraClient);
204        if (l.mCameraClient != NULL) {
205            l.mCameraClient->dataCallback(CAMERA_MSG_PREVIEW_METADATA,
206                    NULL, &metadata);
207        }
208    }
209    return OK;
210}
211
212
213}; // namespace camera2
214}; // namespace android
215