JpegProcessor.cpp revision ecf17e82505fdb60d59e00b6dd59036df93de655
1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera2-JpegProcessor"
18#define ATRACE_TAG ATRACE_TAG_CAMERA
19//#define LOG_NDEBUG 0
20
21#include <netinet/in.h>
22
23#include <binder/MemoryBase.h>
24#include <binder/MemoryHeapBase.h>
25#include <utils/Log.h>
26#include <utils/Trace.h>
27
28#include "JpegProcessor.h"
29#include <gui/SurfaceTextureClient.h>
30#include "../Camera2Device.h"
31#include "../Camera2Client.h"
32
33
34namespace android {
35namespace camera2 {
36
37JpegProcessor::JpegProcessor(
38    wp<Camera2Client> client,
39    wp<CaptureSequencer> sequencer):
40        Thread(false),
41        mClient(client),
42        mSequencer(sequencer),
43        mCaptureAvailable(false),
44        mCaptureStreamId(NO_STREAM) {
45}
46
47JpegProcessor::~JpegProcessor() {
48    ALOGV("%s: Exit", __FUNCTION__);
49    deleteStream();
50}
51
52void JpegProcessor::onFrameAvailable() {
53    Mutex::Autolock l(mInputMutex);
54    if (!mCaptureAvailable) {
55        mCaptureAvailable = true;
56        mCaptureAvailableSignal.signal();
57    }
58}
59
60status_t JpegProcessor::updateStream(const Parameters &params) {
61    ATRACE_CALL();
62    ALOGV("%s", __FUNCTION__);
63    status_t res;
64
65    Mutex::Autolock l(mInputMutex);
66
67    sp<Camera2Client> client = mClient.promote();
68    if (client == 0) return OK;
69    sp<Camera2Device> device = client->getCameraDevice();
70
71    // Find out buffer size for JPEG
72    camera_metadata_ro_entry_t maxJpegSize =
73            params.staticInfo(ANDROID_JPEG_MAX_SIZE);
74    if (maxJpegSize.count == 0) {
75        ALOGE("%s: Camera %d: Can't find ANDROID_JPEG_MAX_SIZE!",
76                __FUNCTION__, client->getCameraId());
77        return INVALID_OPERATION;
78    }
79
80    if (mCaptureConsumer == 0) {
81        // Create CPU buffer queue endpoint
82        mCaptureConsumer = new CpuConsumer(1);
83        mCaptureConsumer->setFrameAvailableListener(this);
84        mCaptureConsumer->setName(String8("Camera2Client::CaptureConsumer"));
85        mCaptureWindow = new SurfaceTextureClient(
86            mCaptureConsumer->getProducerInterface());
87        // Create memory for API consumption
88        mCaptureHeap = new MemoryHeapBase(maxJpegSize.data.i32[0], 0,
89                                       "Camera2Client::CaptureHeap");
90        if (mCaptureHeap->getSize() == 0) {
91            ALOGE("%s: Camera %d: Unable to allocate memory for capture",
92                    __FUNCTION__, client->getCameraId());
93            return NO_MEMORY;
94        }
95    }
96
97    if (mCaptureStreamId != NO_STREAM) {
98        // Check if stream parameters have to change
99        uint32_t currentWidth, currentHeight;
100        res = device->getStreamInfo(mCaptureStreamId,
101                &currentWidth, &currentHeight, 0);
102        if (res != OK) {
103            ALOGE("%s: Camera %d: Error querying capture output stream info: "
104                    "%s (%d)", __FUNCTION__,
105                    client->getCameraId(), strerror(-res), res);
106            return res;
107        }
108        if (currentWidth != (uint32_t)params.pictureWidth ||
109                currentHeight != (uint32_t)params.pictureHeight) {
110            ALOGV("%s: Camera %d: Deleting stream %d since the buffer dimensions changed",
111                __FUNCTION__, client->getCameraId(), mCaptureStreamId);
112            res = device->deleteStream(mCaptureStreamId);
113            if (res != OK) {
114                ALOGE("%s: Camera %d: Unable to delete old output stream "
115                        "for capture: %s (%d)", __FUNCTION__,
116                        client->getCameraId(), strerror(-res), res);
117                return res;
118            }
119            mCaptureStreamId = NO_STREAM;
120        }
121    }
122
123    if (mCaptureStreamId == NO_STREAM) {
124        // Create stream for HAL production
125        res = device->createStream(mCaptureWindow,
126                params.pictureWidth, params.pictureHeight,
127                HAL_PIXEL_FORMAT_BLOB, maxJpegSize.data.i32[0],
128                &mCaptureStreamId);
129        if (res != OK) {
130            ALOGE("%s: Camera %d: Can't create output stream for capture: "
131                    "%s (%d)", __FUNCTION__, client->getCameraId(),
132                    strerror(-res), res);
133            return res;
134        }
135
136    }
137    return OK;
138}
139
140status_t JpegProcessor::deleteStream() {
141    ATRACE_CALL();
142    status_t res;
143
144    Mutex::Autolock l(mInputMutex);
145
146    if (mCaptureStreamId != NO_STREAM) {
147        sp<Camera2Client> client = mClient.promote();
148        if (client == 0) return OK;
149        sp<Camera2Device> device = client->getCameraDevice();
150
151        device->deleteStream(mCaptureStreamId);
152
153        mCaptureHeap.clear();
154        mCaptureWindow.clear();
155        mCaptureConsumer.clear();
156
157        mCaptureStreamId = NO_STREAM;
158    }
159    return OK;
160}
161
162int JpegProcessor::getStreamId() const {
163    Mutex::Autolock l(mInputMutex);
164    return mCaptureStreamId;
165}
166
167void JpegProcessor::dump(int fd, const Vector<String16>& args) const {
168}
169
170bool JpegProcessor::threadLoop() {
171    status_t res;
172
173    {
174        Mutex::Autolock l(mInputMutex);
175        while (!mCaptureAvailable) {
176            res = mCaptureAvailableSignal.waitRelative(mInputMutex,
177                    kWaitDuration);
178            if (res == TIMED_OUT) return true;
179        }
180        mCaptureAvailable = false;
181    }
182
183    do {
184        sp<Camera2Client> client = mClient.promote();
185        if (client == 0) return false;
186        res = processNewCapture(client);
187    } while (res == OK);
188
189    return true;
190}
191
192status_t JpegProcessor::processNewCapture(sp<Camera2Client> &client) {
193    ATRACE_CALL();
194    status_t res;
195    sp<Camera2Heap> captureHeap;
196
197    CpuConsumer::LockedBuffer imgBuffer;
198
199    res = mCaptureConsumer->lockNextBuffer(&imgBuffer);
200    if (res != OK) {
201        if (res != BAD_VALUE) {
202            ALOGE("%s: Camera %d: Error receiving still image buffer: "
203                    "%s (%d)", __FUNCTION__,
204                    client->getCameraId(), strerror(-res), res);
205        }
206        return res;
207    }
208
209    ALOGV("%s: Camera %d: Still capture available", __FUNCTION__,
210            client->getCameraId());
211
212    // TODO: Signal errors here upstream
213    {
214        SharedParameters::Lock l(client->getParameters());
215
216        switch (l.mParameters.state) {
217            case Parameters::STILL_CAPTURE:
218            case Parameters::VIDEO_SNAPSHOT:
219                break;
220            default:
221                ALOGE("%s: Camera %d: Still image produced unexpectedly "
222                        "in state %s!",
223                        __FUNCTION__, client->getCameraId(),
224                        Parameters::getStateName(l.mParameters.state));
225                mCaptureConsumer->unlockBuffer(imgBuffer);
226                return BAD_VALUE;
227        }
228    }
229
230    if (imgBuffer.format != HAL_PIXEL_FORMAT_BLOB) {
231        ALOGE("%s: Camera %d: Unexpected format for still image: "
232                "%x, expected %x", __FUNCTION__, client->getCameraId(),
233                imgBuffer.format,
234                HAL_PIXEL_FORMAT_BLOB);
235        mCaptureConsumer->unlockBuffer(imgBuffer);
236        return OK;
237    }
238
239    // Find size of JPEG image
240    size_t jpegSize = findJpegSize(imgBuffer.data, imgBuffer.width);
241    if (jpegSize == 0) { // failed to find size, default to whole buffer
242        jpegSize = imgBuffer.width;
243    }
244    size_t heapSize = mCaptureHeap->getSize();
245    if (jpegSize > heapSize) {
246        ALOGW("%s: JPEG image is larger than expected, truncating "
247                "(got %d, expected at most %d bytes)",
248                __FUNCTION__, jpegSize, heapSize);
249        jpegSize = heapSize;
250    }
251
252    // TODO: Optimize this to avoid memcopy
253    sp<MemoryBase> captureBuffer = new MemoryBase(mCaptureHeap, 0, jpegSize);
254    void* captureMemory = mCaptureHeap->getBase();
255    memcpy(captureMemory, imgBuffer.data, jpegSize);
256
257    mCaptureConsumer->unlockBuffer(imgBuffer);
258
259    sp<CaptureSequencer> sequencer = mSequencer.promote();
260    if (sequencer != 0) {
261        sequencer->onCaptureAvailable(imgBuffer.timestamp, captureBuffer);
262    }
263
264    return OK;
265}
266
267/*
268 * JPEG FILE FORMAT OVERVIEW.
269 * http://www.jpeg.org/public/jfif.pdf
270 * (JPEG is the image compression algorithm, actual file format is called JFIF)
271 *
272 * "Markers" are 2-byte patterns used to distinguish parts of JFIF files.  The
273 * first byte is always 0xFF, and the second byte is between 0x01 and 0xFE
274 * (inclusive).  Because every marker begins with the same byte, they are
275 * referred to by the second byte's value.
276 *
277 * JFIF files all begin with the Start of Image (SOI) marker, which is 0xD8.
278 * Following it, "segment" sections begin with other markers, followed by a
279 * 2-byte length (in network byte order), then the segment data.
280 *
281 * For our purposes we will ignore the data, and just use the length to skip to
282 * the next segment.  This is necessary because the data inside segments are
283 * allowed to contain the End of Image marker (0xFF 0xD9), preventing us from
284 * naievely scanning until the end.
285 *
286 * After all the segments are processed, the jpeg compressed image stream begins.
287 * This can be considered an opaque format with one requirement: all 0xFF bytes
288 * in this stream must be followed with a 0x00 byte.  This prevents any of the
289 * image data to be interpreted as a segment.  The only exception to this is at
290 * the end of the image stream there is an End of Image (EOI) marker, which is
291 * 0xFF followed by a non-zero (0xD9) byte.
292 */
293
294const uint8_t MARK = 0xFF; // First byte of marker
295const uint8_t SOI = 0xD8; // Start of Image
296const uint8_t EOI = 0xD9; // End of Image
297const size_t MARKER_LENGTH = 2; // length of a marker
298
299#pragma pack(push)
300#pragma pack(1)
301typedef struct segment {
302    uint8_t marker[MARKER_LENGTH];
303    uint16_t length;
304} segment_t;
305#pragma pack(pop)
306
307/* HELPER FUNCTIONS */
308
309// check for Start of Image marker
310bool checkJpegStart(uint8_t* buf) {
311    return buf[0] == MARK && buf[1] == SOI;
312}
313// check for End of Image marker
314bool checkJpegEnd(uint8_t *buf) {
315    return buf[0] == MARK && buf[1] == EOI;
316}
317// check for arbitrary marker, returns marker type (second byte)
318// returns 0 if no marker found. Note: 0x00 is not a valid marker type
319uint8_t checkJpegMarker(uint8_t *buf) {
320    if (buf[0] == MARK && buf[1] > 0 && buf[1] < 0xFF) {
321        return buf[1];
322    }
323    return 0;
324}
325
326// Return the size of the JPEG, 0 indicates failure
327size_t JpegProcessor::findJpegSize(uint8_t* jpegBuffer, size_t maxSize) {
328    size_t size;
329
330    // First check for JPEG transport header at the end of the buffer
331    uint8_t *header = jpegBuffer + (maxSize - sizeof(struct camera2_jpeg_blob));
332    struct camera2_jpeg_blob *blob = (struct camera2_jpeg_blob*)(header);
333    if (blob->jpeg_blob_id == CAMERA2_JPEG_BLOB_ID) {
334        size = blob->jpeg_size;
335        if (size > 0 && size <= maxSize - sizeof(struct camera2_jpeg_blob)) {
336            // Verify SOI and EOI markers
337            size_t offset = size - MARKER_LENGTH;
338            uint8_t *end = jpegBuffer + offset;
339            if (checkJpegStart(jpegBuffer) && checkJpegEnd(end)) {
340                ALOGV("Found JPEG transport header, img size %d", size);
341                return size;
342            } else {
343                ALOGW("Found JPEG transport header with bad Image Start/End");
344            }
345        } else {
346            ALOGW("Found JPEG transport header with bad size %d", size);
347        }
348    }
349
350    // Check Start of Image
351    if ( !checkJpegStart(jpegBuffer) ) {
352        ALOGE("Could not find start of JPEG marker");
353        return 0;
354    }
355
356    // Read JFIF segment markers, skip over segment data
357    size = 0;
358    while (size <= maxSize - MARKER_LENGTH) {
359        segment_t *segment = (segment_t*)(jpegBuffer + size);
360        uint8_t type = checkJpegMarker(segment->marker);
361        if (type == 0) { // invalid marker, no more segments, begin JPEG data
362            ALOGV("JPEG stream found beginning at offset %d", size);
363            break;
364        }
365        if (type == EOI || size > maxSize - sizeof(segment_t)) {
366            ALOGE("Got premature End before JPEG data, offset %d", size);
367            return 0;
368        }
369        size_t length = ntohs(segment->length);
370        ALOGV("JFIF Segment, type %x length %x", type, length);
371        size += length + MARKER_LENGTH;
372    }
373
374    // Find End of Image
375    // Scan JPEG buffer until End of Image (EOI)
376    bool foundEnd = false;
377    for (size; size <= maxSize - MARKER_LENGTH; size++) {
378        if ( checkJpegEnd(jpegBuffer + size) ) {
379            foundEnd = true;
380            size += MARKER_LENGTH;
381            break;
382        }
383    }
384    if (!foundEnd) {
385        ALOGE("Could not find end of JPEG marker");
386        return 0;
387    }
388
389    if (size > maxSize) {
390        ALOGW("JPEG size %d too large, reducing to maxSize %d", size, maxSize);
391        size = maxSize;
392    }
393    ALOGV("Final JPEG size %d", size);
394    return size;
395}
396
397}; // namespace camera2
398}; // namespace android
399