JpegProcessor.cpp revision 7b82efe7a376c882f8f938e1c41b8311a8cdda4a
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera2-JpegProcessor"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <netinet/in.h>
22
23#include <binder/MemoryBase.h>
24#include <binder/MemoryHeapBase.h>
25#include <utils/Log.h>
26#include <utils/Trace.h>
27#include <gui/Surface.h>
28
29#include "common/CameraDeviceBase.h"
30#include "api1/Camera2Client.h"
31#include "api1/client2/Camera2Heap.h"
32#include "api1/client2/CaptureSequencer.h"
33#include "api1/client2/JpegProcessor.h"
34
35namespace android {
36namespace camera2 {
37
38JpegProcessor::JpegProcessor(
39    sp<Camera2Client> client,
40    wp<CaptureSequencer> sequencer):
41        Thread(false),
42        mDevice(client->getCameraDevice()),
43        mSequencer(sequencer),
44        mId(client->getCameraId()),
45        mCaptureAvailable(false),
46        mCaptureStreamId(NO_STREAM) {
47}
48
49JpegProcessor::~JpegProcessor() {
50    ALOGV("%s: Exit", __FUNCTION__);
51    deleteStream();
52}
53
54void JpegProcessor::onFrameAvailable() {
55    Mutex::Autolock l(mInputMutex);
56    if (!mCaptureAvailable) {
57        mCaptureAvailable = true;
58        mCaptureAvailableSignal.signal();
59    }
60}
61
62status_t JpegProcessor::updateStream(const Parameters &params) {
63    ATRACE_CALL();
64    ALOGV("%s", __FUNCTION__);
65    status_t res;
66
67    Mutex::Autolock l(mInputMutex);
68
69    sp<CameraDeviceBase> device = mDevice.promote();
70    if (device == 0) {
71        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
72        return INVALID_OPERATION;
73    }
74
75    // Find out buffer size for JPEG
76    camera_metadata_ro_entry_t maxJpegSize =
77            params.staticInfo(ANDROID_JPEG_MAX_SIZE);
78    if (maxJpegSize.count == 0) {
79        ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!",
80                __FUNCTION__, mId);
81        return INVALID_OPERATION;
82    }
83
84    if (mCaptureConsumer == 0) {
85        // Create CPU buffer queue endpoint
86        sp<BufferQueue> bq = new BufferQueue();
87        mCaptureConsumer = new CpuConsumer(bq, 1);
88        mCaptureConsumer->setFrameAvailableListener(this);
89        mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
90        mCaptureWindow = new Surface(
91            mCaptureConsumer->getProducerInterface());
92        // Create memory for API consumption
93        mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0,
94                                       "Camera2Client::CaptureHeap");
95        if (mCaptureHeap->getSize() == 0) {
96            ALOGE("%s: Camera %d: Unable to allocate memory for capture",
97                    __FUNCTION__, mId);
98            return NO_MEMORY;
99        }
100    }
101
102    if (mCaptureStreamId != NO_STREAM) {
103        // Check if stream parameters have to change
104        uint32_t currentWidth, currentHeight;
105        res = device->getStreamInfo(mCaptureStreamId,
106                &currentWidth, &currentHeight, 0);
107        if (res != OK) {
108            ALOGE("%s: Camera %d: Error querying capture output stream info: "
109                    "%s (%d)", __FUNCTION__,
110                    mId, strerror(-res), res);
111            return res;
112        }
113        if (currentWidth != (uint32_t)params.pictureWidth ||
114                currentHeight != (uint32_t)params.pictureHeight) {
115            ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
116                __FUNCTION__, mId, mCaptureStreamId);
117            res = device->deleteStream(mCaptureStreamId);
118            if (res == -EBUSY) {
119                ALOGV("%s: Camera %d: Device is busy, call updateStream again "
120                      " after it becomes idle", __FUNCTION__, mId);
121                return res;
122            } else if (res != OK) {
123                ALOGE("%s: Camera %d: Unable to delete old output stream "
124                        "for capture: %s (%d)", __FUNCTION__,
125                        mId, strerror(-res), res);
126                return res;
127            }
128            mCaptureStreamId = NO_STREAM;
129        }
130    }
131
132    if (mCaptureStreamId == NO_STREAM) {
133        // Create stream for HAL production
134        res = device->createStream(mCaptureWindow,
135                params.pictureWidth, params.pictureHeight,
136                HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0],
137                &mCaptureStreamId);
138        if (res != OK) {
139            ALOGE("%s: Camera %d: Can't create output stream for capture: "
140                    "%s (%d)", __FUNCTION__, mId,
141                    strerror(-res), res);
142            return res;
143        }
144
145    }
146    return OK;
147}
148
149status_t JpegProcessor::deleteStream() {
150    ATRACE_CALL();
151
152    Mutex::Autolock l(mInputMutex);
153
154    if (mCaptureStreamId != NO_STREAM) {
155        sp<CameraDeviceBase> device = mDevice.promote();
156        if (device == 0) {
157            ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
158            return INVALID_OPERATION;
159        }
160
161        device->deleteStream(mCaptureStreamId);
162
163        mCaptureHeap.clear();
164        mCaptureWindow.clear();
165        mCaptureConsumer.clear();
166
167        mCaptureStreamId = NO_STREAM;
168    }
169    return OK;
170}
171
172int JpegProcessor::getStreamId() const {
173    Mutex::Autolock l(mInputMutex);
174    return mCaptureStreamId;
175}
176
177void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
178}
179
180bool JpegProcessor::threadLoop() {
181    status_t res;
182
183    {
184        Mutex::Autolock l(mInputMutex);
185        while (!mCaptureAvailable) {
186            res = mCaptureAvailableSignal.waitRelative(mInputMutex,
187                    kWaitDuration);
188            if (res == TIMED_OUT) return true;
189        }
190        mCaptureAvailable = false;
191    }
192
193    do {
194        res = processNewCapture();
195    } while (res == OK);
196
197    return true;
198}
199
200status_t JpegProcessor::processNewCapture() {
201    ATRACE_CALL();
202    status_t res;
203    sp<Camera2Heap> captureHeap;
204
205    CpuConsumer::LockedBuffer imgBuffer;
206
207    res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
208    if (res != OK) {
209        if (res != BAD_VALUE) {
210            ALOGE("%s: Camera %d: Error receiving still image buffer: "
211                    "%s (%d)", __FUNCTION__,
212                    mId, strerror(-res), res);
213        }
214        return res;
215    }
216
217    ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
218            mId);
219
220    if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
221        ALOGE("%s: Camera %d: Unexpected format for still image: "
222                "%x, expected %x", __FUNCTION__, mId,
223                imgBuffer.format,
224                HAL_PIXEL_FORMAT_BLOB);
225        mCaptureConsumer->unlockBuffer(imgBuffer);
226        return OK;
227    }
228
229    // Find size of JPEG image
230    size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
231    if (jpegSize == 0) { // failed to find size, default to whole buffer
232        jpegSize = imgBuffer.width;
233    }
234    size_t heapSize = mCaptureHeap->getSize();
235    if (jpegSize > heapSize) {
236        ALOGW("%s: JPEG image is larger than expected, truncating "
237                "(got %d, expected at most %d bytes)",
238                __FUNCTION__, jpegSize, heapSize);
239        jpegSize = heapSize;
240    }
241
242    // TODO: Optimize this to avoid memcopy
243    sp<MemoryBase> captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
244    void* captureMemory = mCaptureHeap->getBase();
245    memcpy(captureMemory, imgBuffer.data, jpegSize);
246
247    mCaptureConsumer->unlockBuffer(imgBuffer);
248
249    sp<CaptureSequencer> sequencer = mSequencer.promote();
250    if (sequencer != 0) {
251        sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer);
252    }
253
254    return OK;
255}
256
257/*
258 * JPEG FILE FORMAT OVERVIEW.
259 * http://www.jpeg.org/public/jfif.pdf
260 * (JPEG is the image compression algorithm, actual file format is called JFIF)
261 *
262 * "Markers" are 2-byte patterns used to distinguish parts of JFIF files.  The
263 * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
264 * (inclusive).  Because every marker begins with the same byte, they are
265 * referred to by the second byte's value.
266 *
267 * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
268 * Following it, "segment" sections begin with other markers, followed by a
269 * 2-byte length (in network byte order), then the segment data.
270 *
271 * For our purposes we will ignore the data, and just use the length to skip to
272 * the next segment.  This is necessary because the data inside segments are
273 * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
274 * naievely scanning until the end.
275 *
276 * After all the segments are processed, the jpeg compressed image stream begins.
277 * This can be considered an opaque format with one requirement: all 0xFF bytes
278 * in this stream must be followed with a 0x00 byte.  This prevents any of the
279 * image data to be interpreted as a segment.  The only exception to this is at
280 * the end of the image stream there is an End of Image (EOI) marker, which is
281 * 0xFF followed by a non-zero (0xD9) byte.
282 */
283
284const uint8_t MARK = 0xFF; // First byte of marker
285const uint8_t SOI = 0xD8; // Start of Image
286const uint8_t EOI = 0xD9; // End of Image
287const size_t MARKER_LENGTH = 2; // length of a marker
288
289#pragma pack(push)
290#pragma pack(1)
291typedef struct segment {
292    uint8_t marker[MARKER_LENGTH];
293    uint16_t length;
294} segment_t;
295#pragma pack(pop)
296
297/* HELPER FUNCTIONS */
298
299// check for Start of Image marker
300bool checkJpegStart(uint8_t* buf) {
301    return buf[0] == MARK && buf[1] == SOI;
302}
303// check for End of Image marker
304bool checkJpegEnd(uint8_t *buf) {
305    return buf[0] == MARK && buf[1] == EOI;
306}
307// check for arbitrary marker, returns marker type (second byte)
308// returns 0 if no marker found. Note: 0x00 is not a valid marker type
309uint8_t checkJpegMarker(uint8_t *buf) {
310    if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
311        return buf[1];
312    }
313    return 0;
314}
315
316// Return the size of the JPEG, 0 indicates failure
317size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
318    size_t size;
319
320    // First check for JPEG transport header at the end of the buffer
321    uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
322    struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
323    if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
324        size = blob->jpeg_size;
325        if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
326            // Verify SOI and EOI markers
327            size_t offset = size - MARKER_LENGTH;
328            uint8_t *end = jpegBuffer + offset;
329            if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
330                ALOGV("Found JPEG transport header, img size %d", size);
331                return size;
332            } else {
333                ALOGW("Found JPEG transport header with bad Image Start/End");
334            }
335        } else {
336            ALOGW("Found JPEG transport header with bad size %d", size);
337        }
338    }
339
340    // Check Start of Image
341    if ( !checkJpegStart(jpegBuffer) ) {
342        ALOGE("Could not find start of JPEG marker");
343        return 0;
344    }
345
346    // Read JFIF segment markers, skip over segment data
347    size = 0;
348    while (size <= maxSize - MARKER_LENGTH) {
349        segment_t *segment = (segment_t*)(jpegBuffer + size);
350        uint8_t type = checkJpegMarker(segment->marker);
351        if (type == 0) { // invalid marker, no more segments, begin JPEG data
352            ALOGV("JPEG stream found beginning at offset %d", size);
353            break;
354        }
355        if (type == EOI || size > maxSize - sizeof(segment_t)) {
356            ALOGE("Got premature End before JPEG data, offset %d", size);
357            return 0;
358        }
359        size_t length = ntohs(segment->length);
360        ALOGV("JFIF Segment, type %x length %x", type, length);
361        size += length + MARKER_LENGTH;
362    }
363
364    // Find End of Image
365    // Scan JPEG buffer until End of Image (EOI)
366    bool foundEnd = false;
367    for ( ; size <= maxSize - MARKER_LENGTH; size++) {
368        if ( checkJpegEnd(jpegBuffer + size) ) {
369            foundEnd = true;
370            size += MARKER_LENGTH;
371            break;
372        }
373    }
374    if (!foundEnd) {
375        ALOGE("Could not find end of JPEG marker");
376        return 0;
377    }
378
379    if (size > maxSize) {
380        ALOGW("JPEG size %d too large, reducing to maxSize %d", size, maxSize);
381        size = maxSize;
382    }
383    ALOGV("Final JPEG size %d", size);
384    return size;
385}
386
387}; // namespace camera2
388}; // namespace android
389