JpegProcessor.cpp revision 0fa1e760f7093d025c977e5813e8d3a04c863ce3
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera2-JpegProcessor"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <netinet/in.h>
22
23#include <utils/Log.h>
24#include <utils/Trace.h>
25
26#include "JpegProcessor.h"
27#include <gui/SurfaceTextureClient.h>
28#include "../Camera2Device.h"
29#include "../Camera2Client.h"
30
31
32namespace android {
33namespace camera2 {
34
35JpegProcessor::JpegProcessor(
36    wp<Camera2Client> client,
37    wp<CaptureSequencer> sequencer):
38        Thread(false),
39        mClient(client),
40        mSequencer(sequencer),
41        mCaptureAvailable(false),
42        mCaptureStreamId(NO_STREAM) {
43}
44
45JpegProcessor::~JpegProcessor() {
46    ALOGV("%s: Exit", __FUNCTION__);
47    deleteStream();
48}
49
50void JpegProcessor::onFrameAvailable() {
51    Mutex::Autolock l(mInputMutex);
52    if (!mCaptureAvailable) {
53        mCaptureAvailable = true;
54        mCaptureAvailableSignal.signal();
55    }
56}
57
58status_t JpegProcessor::updateStream(const Parameters &params) {
59    ATRACE_CALL();
60    ALOGV("%s", __FUNCTION__);
61    status_t res;
62
63    Mutex::Autolock l(mInputMutex);
64
65    sp<Camera2Client> client = mClient.promote();
66    if (client == 0) return OK;
67    sp<Camera2Device> device = client->getCameraDevice();
68
69    // Find out buffer size for JPEG
70    camera_metadata_ro_entry_t maxJpegSize =
71            params.staticInfo(ANDROID_JPEG_MAX_SIZE);
72    if (maxJpegSize.count == 0) {
73        ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!",
74                __FUNCTION__, client->getCameraId());
75        return INVALID_OPERATION;
76    }
77
78    if (mCaptureConsumer == 0) {
79        // Create CPU buffer queue endpoint
80        mCaptureConsumer = new CpuConsumer(1);
81        mCaptureConsumer->setFrameAvailableListener(this);
82        mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
83        mCaptureWindow = new SurfaceTextureClient(
84            mCaptureConsumer->getProducerInterface());
85        // Create memory for API consumption
86        mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0,
87                                       "Camera2Client::CaptureHeap");
88        if (mCaptureHeap->getSize() == 0) {
89            ALOGE("%s: Camera %d: Unable to allocate memory for capture",
90                    __FUNCTION__, client->getCameraId());
91            return NO_MEMORY;
92        }
93    }
94
95    if (mCaptureStreamId != NO_STREAM) {
96        // Check if stream parameters have to change
97        uint32_t currentWidth, currentHeight;
98        res = device->getStreamInfo(mCaptureStreamId,
99                &currentWidth, &currentHeight, 0);
100        if (res != OK) {
101            ALOGE("%s: Camera %d: Error querying capture output stream info: "
102                    "%s (%d)", __FUNCTION__,
103                    client->getCameraId(), strerror(-res), res);
104            return res;
105        }
106        if (currentWidth != (uint32_t)params.pictureWidth ||
107                currentHeight != (uint32_t)params.pictureHeight) {
108            res = device->deleteStream(mCaptureStreamId);
109            if (res != OK) {
110                ALOGE("%s: Camera %d: Unable to delete old output stream "
111                        "for capture: %s (%d)", __FUNCTION__,
112                        client->getCameraId(), strerror(-res), res);
113                return res;
114            }
115            mCaptureStreamId = NO_STREAM;
116        }
117    }
118
119    if (mCaptureStreamId == NO_STREAM) {
120        // Create stream for HAL production
121        res = device->createStream(mCaptureWindow,
122                params.pictureWidth, params.pictureHeight,
123                HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0],
124                &mCaptureStreamId);
125        if (res != OK) {
126            ALOGE("%s: Camera %d: Can't create output stream for capture: "
127                    "%s (%d)", __FUNCTION__, client->getCameraId(),
128                    strerror(-res), res);
129            return res;
130        }
131
132    }
133    return OK;
134}
135
136status_t JpegProcessor::deleteStream() {
137    ATRACE_CALL();
138    status_t res;
139
140    Mutex::Autolock l(mInputMutex);
141
142    if (mCaptureStreamId != NO_STREAM) {
143        sp<Camera2Client> client = mClient.promote();
144        if (client == 0) return OK;
145        sp<Camera2Device> device = client->getCameraDevice();
146
147        device->deleteStream(mCaptureStreamId);
148
149        mCaptureHeap.clear();
150        mCaptureWindow.clear();
151        mCaptureConsumer.clear();
152
153        mCaptureStreamId = NO_STREAM;
154    }
155    return OK;
156}
157
158int JpegProcessor::getStreamId() const {
159    Mutex::Autolock l(mInputMutex);
160    return mCaptureStreamId;
161}
162
163void JpegProcessor::dump(int fd, const Vector<String16>& args) const {
164}
165
166bool JpegProcessor::threadLoop() {
167    status_t res;
168
169    {
170        Mutex::Autolock l(mInputMutex);
171        while (!mCaptureAvailable) {
172            res = mCaptureAvailableSignal.waitRelative(mInputMutex,
173                    kWaitDuration);
174            if (res == TIMED_OUT) return true;
175        }
176        mCaptureAvailable = false;
177    }
178
179    do {
180        sp<Camera2Client> client = mClient.promote();
181        if (client == 0) return false;
182        res = processNewCapture(client);
183    } while (res == OK);
184
185    return true;
186}
187
188status_t JpegProcessor::processNewCapture(sp<Camera2Client> &client) {
189    ATRACE_CALL();
190    status_t res;
191    sp<Camera2Heap> captureHeap;
192
193    CpuConsumer::LockedBuffer imgBuffer;
194
195    res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
196    if (res != OK) {
197        if (res != BAD_VALUE) {
198            ALOGE("%s: Camera %d: Error receiving still image buffer: "
199                    "%s (%d)", __FUNCTION__,
200                    client->getCameraId(), strerror(-res), res);
201        }
202        return res;
203    }
204
205    ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
206            client->getCameraId());
207
208    // TODO: Signal errors here upstream
209    {
210        SharedParameters::Lock l(client->getParameters());
211
212        switch (l.mParameters.state) {
213            case Parameters::STILL_CAPTURE:
214            case Parameters::VIDEO_SNAPSHOT:
215                break;
216            default:
217                ALOGE("%s: Camera %d: Still image produced unexpectedly "
218                        "in state %s!",
219                        __FUNCTION__, client->getCameraId(),
220                        Parameters::getStateName(l.mParameters.state));
221                mCaptureConsumer->unlockBuffer(imgBuffer);
222                return BAD_VALUE;
223        }
224    }
225
226    if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
227        ALOGE("%s: Camera %d: Unexpected format for still image: "
228                "%x, expected %x", __FUNCTION__, client->getCameraId(),
229                imgBuffer.format,
230                HAL_PIXEL_FORMAT_BLOB);
231        mCaptureConsumer->unlockBuffer(imgBuffer);
232        return OK;
233    }
234
235    // Find size of JPEG image
236    size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
237    if (jpegSize == 0) { // failed to find size, default to whole buffer
238        jpegSize = imgBuffer.width;
239    }
240    size_t heapSize = mCaptureHeap->getSize();
241    if (jpegSize > heapSize) {
242        ALOGW("%s: JPEG image is larger than expected, truncating "
243                "(got %d, expected at most %d bytes)",
244                __FUNCTION__, jpegSize, heapSize);
245        jpegSize = heapSize;
246    }
247
248    // TODO: Optimize this to avoid memcopy
249    sp<MemoryBase> captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
250    void* captureMemory = mCaptureHeap->getBase();
251    memcpy(captureMemory, imgBuffer.data, jpegSize);
252
253    mCaptureConsumer->unlockBuffer(imgBuffer);
254
255    sp<CaptureSequencer> sequencer = mSequencer.promote();
256    if (sequencer != 0) {
257        sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer);
258    }
259
260    return OK;
261}
262
263/*
264 * JPEG FILE FORMAT OVERVIEW.
265 * http://www.jpeg.org/public/jfif.pdf
266 * (JPEG is the image compression algorithm, actual file format is called JFIF)
267 *
268 * "Markers" are 2-byte patterns used to distinguish parts of JFIF files.  The
269 * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
270 * (inclusive).  Because every marker begins with the same byte, they are
271 * referred to by the second byte's value.
272 *
273 * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
274 * Following it, "segment" sections begin with other markers, followed by a
275 * 2-byte length (in network byte order), then the segment data.
276 *
277 * For our purposes we will ignore the data, and just use the length to skip to
278 * the next segment.  This is necessary because the data inside segments are
279 * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
280 * naievely scanning until the end.
281 *
282 * After all the segments are processed, the jpeg compressed image stream begins.
283 * This can be considered an opaque format with one requirement: all 0xFF bytes
284 * in this stream must be followed with a 0x00 byte.  This prevents any of the
285 * image data to be interpreted as a segment.  The only exception to this is at
286 * the end of the image stream there is an End of Image (EOI) marker, which is
287 * 0xFF followed by a non-zero (0xD9) byte.
288 */
289
290const uint8_t MARK = 0xFF; // First byte of marker
291const uint8_t SOI = 0xD8; // Start of Image
292const uint8_t EOI = 0xD9; // End of Image
293const size_t MARKER_LENGTH = 2; // length of a marker
294
295#pragma pack(push)
296#pragma pack(1)
297typedef struct segment {
298    uint8_t marker[MARKER_LENGTH];
299    uint16_t length;
300} segment_t;
301#pragma pack(pop)
302
303/* HELPER FUNCTIONS */
304
305// check for Start of Image marker
306bool checkJpegStart(uint8_t* buf) {
307    return buf[0] == MARK && buf[1] == SOI;
308}
309// check for End of Image marker
310bool checkJpegEnd(uint8_t *buf) {
311    return buf[0] == MARK && buf[1] == EOI;
312}
313// check for arbitrary marker, returns marker type (second byte)
314// returns 0 if no marker found. Note: 0x00 is not a valid marker type
315uint8_t checkJpegMarker(uint8_t *buf) {
316    if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
317        return buf[1];
318    }
319    return 0;
320}
321
322// Return the size of the JPEG, 0 indicates failure
323size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
324    size_t size;
325
326    // First check for JPEG transport header at the end of the buffer
327    uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
328    struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
329    if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
330        size = blob->jpeg_size;
331        if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
332            // Verify SOI and EOI markers
333            size_t offset = size - MARKER_LENGTH;
334            uint8_t *end = jpegBuffer + offset;
335            if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
336                ALOGV("Found JPEG transport header, img size %d", size);
337                return size;
338            } else {
339                ALOGW("Found JPEG transport header with bad Image Start/End");
340            }
341        } else {
342            ALOGW("Found JPEG transport header with bad size %d", size);
343        }
344    }
345
346    // Check Start of Image
347    if ( !checkJpegStart(jpegBuffer) ) {
348        ALOGE("Could not find start of JPEG marker");
349        return 0;
350    }
351
352    // Read JFIF segment markers, skip over segment data
353    size = 0;
354    while (size <= maxSize - MARKER_LENGTH) {
355        segment_t *segment = (segment_t*)(jpegBuffer + size);
356        uint8_t type = checkJpegMarker(segment->marker);
357        if (type == 0) { // invalid marker, no more segments, begin JPEG data
358            ALOGV("JPEG stream found beginning at offset %d", size);
359            break;
360        }
361        if (type == EOI || size > maxSize - sizeof(segment_t)) {
362            ALOGE("Got premature End before JPEG data, offset %d", size);
363            return 0;
364        }
365        size_t length = ntohs(segment->length);
366        ALOGV("JFIF Segment, type %x length %x", type, length);
367        size += length + MARKER_LENGTH;
368    }
369
370    // Find End of Image
371    // Scan JPEG buffer until End of Image (EOI)
372    bool foundEnd = false;
373    for (size; size <= maxSize - MARKER_LENGTH; size++) {
374        if ( checkJpegEnd(jpegBuffer + size) ) {
375            foundEnd = true;
376            size += MARKER_LENGTH;
377            break;
378        }
379    }
380    if (!foundEnd) {
381        ALOGE("Could not find end of JPEG marker");
382        return 0;
383    }
384
385    if (size > maxSize) {
386        ALOGW("JPEG size %d too large, reducing to maxSize %d", size, maxSize);
387        size = maxSize;
388    }
389    ALOGV("Final JPEG size %d", size);
390    return size;
391}
392
393}; // namespace camera2
394}; // namespace android
395