JpegProcessor.cpp revision dca254aef0c09867e50fe6613c6fad405de72842
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera2-JpegProcessor"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <netinet/in.h>
22
23#include <binder/MemoryBase.h>
24#include <binder/MemoryHeapBase.h>
25#include <utils/Log.h>
26#include <utils/Trace.h>
27
28#include "JpegProcessor.h"
29#include <gui/Surface.h>
30#include "../CameraDeviceBase.h"
31#include "../Camera2Client.h"
32
33
34namespace android {
35namespace camera2 {
36
37JpegProcessor::JpegProcessor(
38    sp<Camera2Client> client,
39    wp<CaptureSequencer> sequencer):
40        Thread(false),
41        mDevice(client->getCameraDevice()),
42        mSequencer(sequencer),
43        mId(client->getCameraId()),
44        mCaptureAvailable(false),
45        mCaptureStreamId(NO_STREAM) {
46}
47
48JpegProcessor::~JpegProcessor() {
49    ALOGV("%s: Exit", __FUNCTION__);
50    deleteStream();
51}
52
53void JpegProcessor::onFrameAvailable() {
54    Mutex::Autolock l(mInputMutex);
55    if (!mCaptureAvailable) {
56        mCaptureAvailable = true;
57        mCaptureAvailableSignal.signal();
58    }
59}
60
61status_t JpegProcessor::updateStream(const Parameters &params) {
62    ATRACE_CALL();
63    ALOGV("%s", __FUNCTION__);
64    status_t res;
65
66    Mutex::Autolock l(mInputMutex);
67
68    sp<CameraDeviceBase> device = mDevice.promote();
69    if (device == 0) {
70        ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
71        return INVALID_OPERATION;
72    }
73
74    // Find out buffer size for JPEG
75    camera_metadata_ro_entry_t maxJpegSize =
76            params.staticInfo(ANDROID_JPEG_MAX_SIZE);
77    if (maxJpegSize.count == 0) {
78        ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!",
79                __FUNCTION__, mId);
80        return INVALID_OPERATION;
81    }
82
83    if (mCaptureConsumer == 0) {
84        // Create CPU buffer queue endpoint
85        mCaptureConsumer = new CpuConsumer(1);
86        mCaptureConsumer->setFrameAvailableListener(this);
87        mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
88        mCaptureWindow = new Surface(
89            mCaptureConsumer->getProducerInterface());
90        // Create memory for API consumption
91        mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0,
92                                       "Camera2Client::CaptureHeap");
93        if (mCaptureHeap->getSize() == 0) {
94            ALOGE("%s: Camera %d: Unable to allocate memory for capture",
95                    __FUNCTION__, mId);
96            return NO_MEMORY;
97        }
98    }
99
100    if (mCaptureStreamId != NO_STREAM) {
101        // Check if stream parameters have to change
102        uint32_t currentWidth, currentHeight;
103        res = device->getStreamInfo(mCaptureStreamId,
104                &currentWidth, &currentHeight, 0);
105        if (res != OK) {
106            ALOGE("%s: Camera %d: Error querying capture output stream info: "
107                    "%s (%d)", __FUNCTION__,
108                    mId, strerror(-res), res);
109            return res;
110        }
111        if (currentWidth != (uint32_t)params.pictureWidth ||
112                currentHeight != (uint32_t)params.pictureHeight) {
113            ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
114                __FUNCTION__, mId, mCaptureStreamId);
115            res = device->deleteStream(mCaptureStreamId);
116            if (res == -EBUSY) {
117                ALOGV("%s: Camera %d: Device is busy, call updateStream again "
118                      " after it becomes idle", __FUNCTION__, mId);
119                return res;
120            } else if (res != OK) {
121                ALOGE("%s: Camera %d: Unable to delete old output stream "
122                        "for capture: %s (%d)", __FUNCTION__,
123                        mId, strerror(-res), res);
124                return res;
125            }
126            mCaptureStreamId = NO_STREAM;
127        }
128    }
129
130    if (mCaptureStreamId == NO_STREAM) {
131        // Create stream for HAL production
132        res = device->createStream(mCaptureWindow,
133                params.pictureWidth, params.pictureHeight,
134                HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0],
135                &mCaptureStreamId);
136        if (res != OK) {
137            ALOGE("%s: Camera %d: Can't create output stream for capture: "
138                    "%s (%d)", __FUNCTION__, mId,
139                    strerror(-res), res);
140            return res;
141        }
142
143    }
144    return OK;
145}
146
147status_t JpegProcessor::deleteStream() {
148    ATRACE_CALL();
149
150    Mutex::Autolock l(mInputMutex);
151
152    if (mCaptureStreamId != NO_STREAM) {
153        sp<CameraDeviceBase> device = mDevice.promote();
154        if (device == 0) {
155            ALOGE("%s: Camera %d: Device does not exist", __FUNCTION__, mId);
156            return INVALID_OPERATION;
157        }
158
159        device->deleteStream(mCaptureStreamId);
160
161        mCaptureHeap.clear();
162        mCaptureWindow.clear();
163        mCaptureConsumer.clear();
164
165        mCaptureStreamId = NO_STREAM;
166    }
167    return OK;
168}
169
170int JpegProcessor::getStreamId() const {
171    Mutex::Autolock l(mInputMutex);
172    return mCaptureStreamId;
173}
174
175void JpegProcessor::dump(int /*fd*/, const Vector<String16>& /*args*/) const {
176}
177
178bool JpegProcessor::threadLoop() {
179    status_t res;
180
181    {
182        Mutex::Autolock l(mInputMutex);
183        while (!mCaptureAvailable) {
184            res = mCaptureAvailableSignal.waitRelative(mInputMutex,
185                    kWaitDuration);
186            if (res == TIMED_OUT) return true;
187        }
188        mCaptureAvailable = false;
189    }
190
191    do {
192        res = processNewCapture();
193    } while (res == OK);
194
195    return true;
196}
197
198status_t JpegProcessor::processNewCapture() {
199    ATRACE_CALL();
200    status_t res;
201    sp<Camera2Heap> captureHeap;
202
203    CpuConsumer::LockedBuffer imgBuffer;
204
205    res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
206    if (res != OK) {
207        if (res != BAD_VALUE) {
208            ALOGE("%s: Camera %d: Error receiving still image buffer: "
209                    "%s (%d)", __FUNCTION__,
210                    mId, strerror(-res), res);
211        }
212        return res;
213    }
214
215    ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
216            mId);
217
218    if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
219        ALOGE("%s: Camera %d: Unexpected format for still image: "
220                "%x, expected %x", __FUNCTION__, mId,
221                imgBuffer.format,
222                HAL_PIXEL_FORMAT_BLOB);
223        mCaptureConsumer->unlockBuffer(imgBuffer);
224        return OK;
225    }
226
227    // Find size of JPEG image
228    size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
229    if (jpegSize == 0) { // failed to find size, default to whole buffer
230        jpegSize = imgBuffer.width;
231    }
232    size_t heapSize = mCaptureHeap->getSize();
233    if (jpegSize > heapSize) {
234        ALOGW("%s: JPEG image is larger than expected, truncating "
235                "(got %d, expected at most %d bytes)",
236                __FUNCTION__, jpegSize, heapSize);
237        jpegSize = heapSize;
238    }
239
240    // TODO: Optimize this to avoid memcopy
241    sp<MemoryBase> captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
242    void* captureMemory = mCaptureHeap->getBase();
243    memcpy(captureMemory, imgBuffer.data, jpegSize);
244
245    mCaptureConsumer->unlockBuffer(imgBuffer);
246
247    sp<CaptureSequencer> sequencer = mSequencer.promote();
248    if (sequencer != 0) {
249        sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer);
250    }
251
252    return OK;
253}
254
255/*
256 * JPEG FILE FORMAT OVERVIEW.
257 * http://www.jpeg.org/public/jfif.pdf
258 * (JPEG is the image compression algorithm, actual file format is called JFIF)
259 *
260 * "Markers" are 2-byte patterns used to distinguish parts of JFIF files.  The
261 * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
262 * (inclusive).  Because every marker begins with the same byte, they are
263 * referred to by the second byte's value.
264 *
265 * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
266 * Following it, "segment" sections begin with other markers, followed by a
267 * 2-byte length (in network byte order), then the segment data.
268 *
269 * For our purposes we will ignore the data, and just use the length to skip to
270 * the next segment.  This is necessary because the data inside segments are
271 * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
272 * naievely scanning until the end.
273 *
274 * After all the segments are processed, the jpeg compressed image stream begins.
275 * This can be considered an opaque format with one requirement: all 0xFF bytes
276 * in this stream must be followed with a 0x00 byte.  This prevents any of the
277 * image data to be interpreted as a segment.  The only exception to this is at
278 * the end of the image stream there is an End of Image (EOI) marker, which is
279 * 0xFF followed by a non-zero (0xD9) byte.
280 */
281
282const uint8_t MARK = 0xFF; // First byte of marker
283const uint8_t SOI = 0xD8; // Start of Image
284const uint8_t EOI = 0xD9; // End of Image
285const size_t MARKER_LENGTH = 2; // length of a marker
286
287#pragma pack(push)
288#pragma pack(1)
289typedef struct segment {
290    uint8_t marker[MARKER_LENGTH];
291    uint16_t length;
292} segment_t;
293#pragma pack(pop)
294
295/* HELPER FUNCTIONS */
296
297// check for Start of Image marker
298bool checkJpegStart(uint8_t* buf) {
299    return buf[0] == MARK && buf[1] == SOI;
300}
301// check for End of Image marker
302bool checkJpegEnd(uint8_t *buf) {
303    return buf[0] == MARK && buf[1] == EOI;
304}
305// check for arbitrary marker, returns marker type (second byte)
306// returns 0 if no marker found. Note: 0x00 is not a valid marker type
307uint8_t checkJpegMarker(uint8_t *buf) {
308    if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
309        return buf[1];
310    }
311    return 0;
312}
313
314// Return the size of the JPEG, 0 indicates failure
315size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
316    size_t size;
317
318    // First check for JPEG transport header at the end of the buffer
319    uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
320    struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
321    if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
322        size = blob->jpeg_size;
323        if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
324            // Verify SOI and EOI markers
325            size_t offset = size - MARKER_LENGTH;
326            uint8_t *end = jpegBuffer + offset;
327            if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
328                ALOGV("Found JPEG transport header, img size %d", size);
329                return size;
330            } else {
331                ALOGW("Found JPEG transport header with bad Image Start/End");
332            }
333        } else {
334            ALOGW("Found JPEG transport header with bad size %d", size);
335        }
336    }
337
338    // Check Start of Image
339    if ( !checkJpegStart(jpegBuffer) ) {
340        ALOGE("Could not find start of JPEG marker");
341        return 0;
342    }
343
344    // Read JFIF segment markers, skip over segment data
345    size = 0;
346    while (size <= maxSize - MARKER_LENGTH) {
347        segment_t *segment = (segment_t*)(jpegBuffer + size);
348        uint8_t type = checkJpegMarker(segment->marker);
349        if (type == 0) { // invalid marker, no more segments, begin JPEG data
350            ALOGV("JPEG stream found beginning at offset %d", size);
351            break;
352        }
353        if (type == EOI || size > maxSize - sizeof(segment_t)) {
354            ALOGE("Got premature End before JPEG data, offset %d", size);
355            return 0;
356        }
357        size_t length = ntohs(segment->length);
358        ALOGV("JFIF Segment, type %x length %x", type, length);
359        size += length + MARKER_LENGTH;
360    }
361
362    // Find End of Image
363    // Scan JPEG buffer until End of Image (EOI)
364    bool foundEnd = false;
365    for ( ; size <= maxSize - MARKER_LENGTH; size++) {
366        if ( checkJpegEnd(jpegBuffer + size) ) {
367            foundEnd = true;
368            size += MARKER_LENGTH;
369            break;
370        }
371    }
372    if (!foundEnd) {
373        ALOGE("Could not find end of JPEG marker");
374        return 0;
375    }
376
377    if (size > maxSize) {
378        ALOGW("JPEG size %d too large, reducing to maxSize %d", size, maxSize);
379        size = maxSize;
380    }
381    ALOGV("Final JPEG size %d", size);
382    return size;
383}
384
385}; // namespace camera2
386}; // namespace android
387