1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef __ANDROID_HAL_CAMERA2_TESTS_STREAM_FIXTURE__
18#define __ANDROID_HAL_CAMERA2_TESTS_STREAM_FIXTURE__
19
20#include <gtest/gtest.h>
21#include <iostream>
22#include <fstream>
23
24#include <gui/CpuConsumer.h>
25#include <gui/Surface.h>
26#include <utils/Condition.h>
27#include <utils/Mutex.h>
28#include <system/camera_metadata.h>
29
30#include "CameraModuleFixture.h"
31#include "TestExtensions.h"
32
33#define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
34
35namespace android {
36namespace camera2 {
37namespace tests {
38
39// Format specifier for picking the best format for CPU reading the given device
40// version
41#define CAMERA_STREAM_AUTO_CPU_FORMAT (-1)
42
43struct CameraStreamParams;
44
45void PrintTo(const CameraStreamParams& p, ::std::ostream* os);
46
47struct CameraStreamParams {
48    int mFormat;
49    int mHeapCount;
50
51};
52
53inline ::std::ostream& operator<<(::std::ostream& os, const CameraStreamParams &p) {
54    PrintTo(p, &os);
55    return os;
56}
57
58inline void PrintTo(const CameraStreamParams& p, ::std::ostream* os) {
59    char fmt[100];
60    camera_metadata_enum_snprint(
61        ANDROID_SCALER_AVAILABLE_FORMATS, p.mFormat, fmt, sizeof(fmt));
62
63    *os <<  "{ ";
64    *os <<  "Format: 0x"  << std::hex << p.mFormat    << ", ";
65    *os <<  "Format name: " << fmt << ", ";
66    *os <<  "HeapCount: " <<             p.mHeapCount;
67    *os << " }";
68}
69
70class CameraStreamFixture
71    : public CameraModuleFixture</*InfoQuirk*/true> {
72
73public:
74    CameraStreamFixture(CameraStreamParams p)
75    : CameraModuleFixture(TestSettings::DeviceId()) {
76        TEST_EXTENSION_FORKING_CONSTRUCTOR;
77
78        mParam = p;
79
80        SetUp();
81    }
82
83    ~CameraStreamFixture() {
84        TEST_EXTENSION_FORKING_DESTRUCTOR;
85
86        TearDown();
87    }
88
89private:
90
91    void SetUp() {
92        TEST_EXTENSION_FORKING_SET_UP;
93
94        CameraModuleFixture::SetUp();
95
96        sp<CameraDeviceBase> device = mDevice;
97
98        /* use an arbitrary w,h */
99        if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
100            const int tag = ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES;
101
102            const CameraMetadata& staticInfo = device->info();
103            camera_metadata_ro_entry entry = staticInfo.find(tag);
104            ASSERT_NE(0u, entry.count)
105                << "Missing tag android.scaler.availableProcessedSizes";
106
107            ASSERT_LE(2u, entry.count);
108            /* this seems like it would always be the smallest w,h
109               but we actually make no contract that it's sorted asc */
110            mWidth = entry.data.i32[0];
111            mHeight = entry.data.i32[1];
112        } else {
113            buildOutputResolutions();
114            const int32_t *implDefResolutions;
115            size_t   implDefResolutionsCount;
116
117            int format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
118
119            getResolutionList(format,
120                    &implDefResolutions, &implDefResolutionsCount);
121            ASSERT_NE(0u, implDefResolutionsCount)
122                << "Missing implementation defined sizes";
123            mWidth = implDefResolutions[0];
124            mHeight = implDefResolutions[1];
125        }
126    }
127    void TearDown() {
128        TEST_EXTENSION_FORKING_TEAR_DOWN;
129
130        // important: shut down HAL before releasing streams
131        CameraModuleFixture::TearDown();
132
133        deleteOutputResolutions();
134        mNativeWindow.clear();
135        mCpuConsumer.clear();
136        mFrameListener.clear();
137    }
138
139protected:
140
141    int64_t getMinFrameDurationFor(int32_t format, int32_t width, int32_t height) {
142        int64_t minFrameDuration = -1L;
143        const int tag = ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
144        sp<CameraDeviceBase> device = mDevice;
145        const CameraMetadata& staticInfo = device->info();
146        camera_metadata_ro_entry_t availableMinDurations = staticInfo.find(tag);
147        for (uint32_t i = 0; i < availableMinDurations.count; i += 4) {
148            if (format == availableMinDurations.data.i64[i] &&
149                    width == availableMinDurations.data.i64[i + 1] &&
150                    height == availableMinDurations.data.i64[i + 2]) {
151                minFrameDuration = availableMinDurations.data.i64[i + 3];
152                break;
153            }
154        }
155        return minFrameDuration;
156    }
157
158    void buildOutputResolutions() {
159        if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
160            return;
161        }
162        if (mOutputResolutions.isEmpty()) {
163            const int tag = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
164            const CameraMetadata& staticInfo = mDevice->info();
165            camera_metadata_ro_entry_t availableStrmConfigs = staticInfo.find(tag);
166            ASSERT_EQ(0u, availableStrmConfigs.count % 4);
167            for (uint32_t i = 0; i < availableStrmConfigs.count; i += 4) {
168                int32_t format = availableStrmConfigs.data.i32[i];
169                int32_t width = availableStrmConfigs.data.i32[i + 1];
170                int32_t height = availableStrmConfigs.data.i32[i + 2];
171                int32_t inOrOut = availableStrmConfigs.data.i32[i + 3];
172                if (inOrOut == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
173                    int index = mOutputResolutions.indexOfKey(format);
174                    if (index < 0) {
175                        index = mOutputResolutions.add(format, new Vector<int32_t>());
176                        ASSERT_TRUE(index >= 0);
177                    }
178                    Vector<int32_t> *resolutions = mOutputResolutions.editValueAt(index);
179                    resolutions->add(width);
180                    resolutions->add(height);
181                }
182            }
183        }
184    }
185
186    void getResolutionList(int32_t format,
187            const int32_t **list,
188            size_t *count) {
189        status_t res;
190        ALOGV("Getting resolutions for format %x", format);
191        if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
192            return;
193        }
194        int index = mOutputResolutions.indexOfKey(format);
195        ASSERT_TRUE(index >= 0);
196        Vector<int32_t>* resolutions = mOutputResolutions.valueAt(index);
197        *list = resolutions->array();
198        *count = resolutions->size();
199    }
200
201    void deleteOutputResolutions() {
202        for (uint32_t i = 0; i < mOutputResolutions.size(); i++) {
203            Vector<int32_t>* resolutions = mOutputResolutions.editValueAt(i);
204            delete resolutions;
205        }
206        mOutputResolutions.clear();
207    }
208
209    struct FrameListener : public ConsumerBase::FrameAvailableListener {
210
211        FrameListener() {
212            mPendingFrames = 0;
213        }
214
215        // CpuConsumer::FrameAvailableListener implementation
216        virtual void onFrameAvailable(const BufferItem& /* item */) {
217            ALOGV("Frame now available (start)");
218
219            Mutex::Autolock lock(mMutex);
220            mPendingFrames++;
221            mCondition.signal();
222
223            ALOGV("Frame now available (end)");
224        }
225
226        status_t waitForFrame(nsecs_t timeout) {
227            status_t res;
228            Mutex::Autolock lock(mMutex);
229            while (mPendingFrames == 0) {
230                res = mCondition.waitRelative(mMutex, timeout);
231                if (res != OK) return res;
232            }
233            mPendingFrames--;
234            return OK;
235        }
236
237    private:
238        Mutex mMutex;
239        Condition mCondition;
240        int mPendingFrames;
241    };
242
243    void CreateStream() {
244        sp<CameraDeviceBase> device = mDevice;
245        CameraStreamParams p = mParam;
246
247        sp<IGraphicBufferProducer> producer;
248        sp<IGraphicBufferConsumer> consumer;
249        BufferQueue::createBufferQueue(&producer, &consumer);
250        mCpuConsumer = new CpuConsumer(consumer, p.mHeapCount);
251        mCpuConsumer->setName(String8("CameraStreamTest::mCpuConsumer"));
252
253        mNativeWindow = new Surface(producer);
254
255        int format = MapAutoFormat(p.mFormat);
256
257        ASSERT_EQ(OK,
258            device->createStream(mNativeWindow,
259                mWidth, mHeight, format,
260                &mStreamId));
261
262        ASSERT_NE(-1, mStreamId);
263
264        // do not make 'this' a FrameListener or the lifetime policy will clash
265        mFrameListener = new FrameListener();
266        mCpuConsumer->setFrameAvailableListener(mFrameListener);
267    }
268
269    void DeleteStream() {
270        ASSERT_EQ(OK, mDevice->deleteStream(mStreamId));
271    }
272
273    int MapAutoFormat(int format) {
274        if (format == CAMERA_STREAM_AUTO_CPU_FORMAT) {
275            if (getDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_0) {
276                format = HAL_PIXEL_FORMAT_YCbCr_420_888;
277            } else {
278                format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
279            }
280        }
281        return format;
282    }
283
284    void DumpYuvToFile(const String8 &fileName, const CpuConsumer::LockedBuffer &img) {
285        uint8_t *dataCb, *dataCr;
286        uint32_t stride;
287        uint32_t chromaStride;
288        uint32_t chromaStep;
289
290        switch (img.format) {
291            case HAL_PIXEL_FORMAT_YCbCr_420_888:
292                stride = img.stride;
293                chromaStride = img.chromaStride;
294                chromaStep = img.chromaStep;
295                dataCb = img.dataCb;
296                dataCr = img.dataCr;
297                break;
298            case HAL_PIXEL_FORMAT_YCrCb_420_SP:
299                stride = img.width;
300                chromaStride = img.width;
301                chromaStep = 2;
302                dataCr = img.data + img.width * img.height;
303                dataCb = dataCr + 1;
304                break;
305            case HAL_PIXEL_FORMAT_YV12:
306                stride = img.stride;
307                chromaStride = ALIGN(img.width / 2, 16);
308                chromaStep = 1;
309                dataCr = img.data + img.stride * img.height;
310                dataCb = dataCr + chromaStride * img.height/2;
311                break;
312            default:
313                ALOGE("Unknown format %d, not dumping", img.format);
314                return;
315        }
316
317        // Write Y
318        FILE *yuvFile = fopen(fileName.string(), "w");
319
320        size_t bytes;
321
322        for (size_t y = 0; y < img.height; ++y) {
323            bytes = fwrite(
324                reinterpret_cast<const char*>(img.data + stride * y),
325                1, img.width, yuvFile);
326            if (bytes != img.width) {
327                ALOGE("Unable to write to file %s", fileName.string());
328                fclose(yuvFile);
329                return;
330            }
331        }
332
333        // Write Cb/Cr
334        uint8_t *src = dataCb;
335        for (int c = 0; c < 2; ++c) {
336            for (size_t y = 0; y < img.height / 2; ++y) {
337                uint8_t *px = src + y * chromaStride;
338                if (chromaStep != 1) {
339                    for (size_t x = 0; x < img.width / 2; ++x) {
340                        fputc(*px, yuvFile);
341                        px += chromaStep;
342                    }
343                } else {
344                    bytes = fwrite(reinterpret_cast<const char*>(px),
345                            1, img.width / 2, yuvFile);
346                    if (bytes != img.width / 2) {
347                        ALOGE("Unable to write to file %s", fileName.string());
348                        fclose(yuvFile);
349                        return;
350                    }
351                }
352            }
353            src = dataCr;
354        }
355        fclose(yuvFile);
356    }
357
358    int mWidth;
359    int mHeight;
360
361    int mStreamId;
362
363    android::sp<FrameListener>       mFrameListener;
364    android::sp<CpuConsumer>         mCpuConsumer;
365    android::sp<ANativeWindow>       mNativeWindow;
366    KeyedVector<int32_t, Vector<int32_t>* > mOutputResolutions;
367
368private:
369    CameraStreamParams mParam;
370};
371
372}
373}
374}
375
376#endif
377