1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "Camera2_test"
18//#define LOG_NDEBUG 0
19
20#include <utils/Log.h>
21#include <gtest/gtest.h>
22#include <iostream>
23#include <fstream>
24
25#include <utils/Vector.h>
26#include <gui/CpuConsumer.h>
27#include <ui/PixelFormat.h>
28#include <system/camera_metadata.h>
29
30#include "camera2_utils.h"
31#include "TestExtensions.h"
32
33namespace android {
34namespace camera2 {
35namespace tests {
36
37class Camera2Test: public testing::Test {
38  public:
39    void SetUpModule() {
40        int res;
41
42        hw_module_t *module = NULL;
43        res = hw_get_module(CAMERA_HARDWARE_MODULE_ID,
44                (const hw_module_t **)&module);
45
46        ASSERT_EQ(0, res)
47                << "Failure opening camera hardware module: " << res;
48        ASSERT_TRUE(NULL != module)
49                << "No camera module was set by hw_get_module";
50
51        IF_ALOGV() {
52            std::cout << "  Camera module name: "
53                    << module->name << std::endl;
54            std::cout << "  Camera module author: "
55                    << module->author << std::endl;
56            std::cout << "  Camera module API version: 0x" << std::hex
57                    << module->module_api_version << std::endl;
58            std::cout << "  Camera module HAL API version: 0x" << std::hex
59                    << module->hal_api_version << std::endl;
60        }
61
62        int16_t version2_0 = CAMERA_MODULE_API_VERSION_2_0;
63        ASSERT_LE(version2_0, module->module_api_version)
64                << "Camera module version is 0x"
65                << std::hex << module->module_api_version
66                << ", should be at least 2.0. (0x"
67                << std::hex << CAMERA_MODULE_API_VERSION_2_0 << ")";
68
69        sCameraModule = reinterpret_cast<camera_module_t*>(module);
70
71        sNumCameras = sCameraModule->get_number_of_cameras();
72        ASSERT_LT(0, sNumCameras) << "No camera devices available!";
73
74        IF_ALOGV() {
75            std::cout << "  Camera device count: " << sNumCameras << std::endl;
76        }
77
78        sCameraSupportsHal2 = new bool[sNumCameras];
79
80        for (int i = 0; i < sNumCameras; i++) {
81            camera_info info;
82            res = sCameraModule->get_camera_info(i, &info);
83            ASSERT_EQ(0, res)
84                    << "Failure getting camera info for camera " << i;
85            IF_ALOGV() {
86                std::cout << "  Camera device: " << std::dec
87                          << i << std::endl;;
88                std::cout << "    Facing: " << std::dec
89                          << info.facing  << std::endl;
90                std::cout << "    Orientation: " << std::dec
91                          << info.orientation  << std::endl;
92                std::cout << "    Version: 0x" << std::hex <<
93                        info.device_version  << std::endl;
94            }
95            if (info.device_version >= CAMERA_DEVICE_API_VERSION_2_0 &&
96                    info.device_version < CAMERA_DEVICE_API_VERSION_3_0) {
97                sCameraSupportsHal2[i] = true;
98                ASSERT_TRUE(NULL != info.static_camera_characteristics);
99                IF_ALOGV() {
100                    std::cout << "    Static camera metadata:"  << std::endl;
101                    dump_indented_camera_metadata(info.static_camera_characteristics,
102                            0, 1, 6);
103                }
104            } else {
105                sCameraSupportsHal2[i] = false;
106            }
107        }
108    }
109
110    void TearDownModule() {
111        hw_module_t *module = reinterpret_cast<hw_module_t*>(sCameraModule);
112        ASSERT_EQ(0, HWModuleHelpers::closeModule(module));
113    }
114
115    static const camera_module_t *getCameraModule() {
116        return sCameraModule;
117    }
118
119    static int getNumCameras() {
120        return sNumCameras;
121    }
122
123    static bool isHal2Supported(int id) {
124        return sCameraSupportsHal2[id];
125    }
126
127    static camera2_device_t *openCameraDevice(int id) {
128        ALOGV("Opening camera %d", id);
129        if (NULL == sCameraSupportsHal2) return NULL;
130        if (id >= sNumCameras) return NULL;
131        if (!sCameraSupportsHal2[id]) return NULL;
132
133        hw_device_t *device = NULL;
134        const camera_module_t *cam_module = getCameraModule();
135        if (cam_module == NULL) {
136            return NULL;
137        }
138
139        char camId[10];
140        int res;
141
142        snprintf(camId, 10, "%d", id);
143        res = cam_module->common.methods->open(
144            (const hw_module_t*)cam_module,
145            camId,
146            &device);
147        if (res != NO_ERROR || device == NULL) {
148            return NULL;
149        }
150        camera2_device_t *cam_device =
151                reinterpret_cast<camera2_device_t*>(device);
152        return cam_device;
153    }
154
155    static status_t configureCameraDevice(camera2_device_t *dev,
156            MetadataQueue &requestQueue,
157            MetadataQueue  &frameQueue,
158            NotifierListener &listener) {
159
160        status_t err;
161
162        err = dev->ops->set_request_queue_src_ops(dev,
163                requestQueue.getToConsumerInterface());
164        if (err != OK) return err;
165
166        requestQueue.setFromConsumerInterface(dev);
167
168        err = dev->ops->set_frame_queue_dst_ops(dev,
169                frameQueue.getToProducerInterface());
170        if (err != OK) return err;
171
172        err = listener.getNotificationsFrom(dev);
173        if (err != OK) return err;
174
175        vendor_tag_query_ops_t *vendor_metadata_tag_ops;
176        err = dev->ops->get_metadata_vendor_tag_ops(dev, &vendor_metadata_tag_ops);
177        if (err != OK) return err;
178
179        err = set_camera_metadata_vendor_tag_ops(vendor_metadata_tag_ops);
180        if (err != OK) return err;
181
182        return OK;
183    }
184
185    static status_t closeCameraDevice(camera2_device_t **cam_dev) {
186        int res;
187        if (*cam_dev == NULL ) return OK;
188
189        ALOGV("Closing camera %p", cam_dev);
190
191        hw_device_t *dev = reinterpret_cast<hw_device_t *>(*cam_dev);
192        res = dev->close(dev);
193        *cam_dev = NULL;
194        return res;
195    }
196
197    void setUpCamera(int id) {
198        ASSERT_GT(sNumCameras, id);
199        status_t res;
200
201        if (mDevice != NULL) {
202            closeCameraDevice(&mDevice);
203        }
204        mId = id;
205        mDevice = openCameraDevice(mId);
206        ASSERT_TRUE(NULL != mDevice) << "Failed to open camera device";
207
208        camera_info info;
209        res = sCameraModule->get_camera_info(id, &info);
210        ASSERT_EQ(OK, res);
211
212        mStaticInfo = info.static_camera_characteristics;
213
214        res = configureCameraDevice(mDevice,
215                mRequests,
216                mFrames,
217                mNotifications);
218        ASSERT_EQ(OK, res) << "Failure to configure camera device";
219
220    }
221
222    void setUpStream(sp<IGraphicBufferProducer> consumer,
223            int width, int height, int format, int *id) {
224        status_t res;
225
226        StreamAdapter* stream = new StreamAdapter(consumer);
227
228        ALOGV("Creating stream, format 0x%x, %d x %d", format, width, height);
229        res = stream->connectToDevice(mDevice, width, height, format);
230        ASSERT_EQ(NO_ERROR, res) << "Failed to connect to stream: "
231                                 << strerror(-res);
232        mStreams.push_back(stream);
233
234        *id = stream->getId();
235    }
236
237    void disconnectStream(int id) {
238        status_t res;
239        unsigned int i=0;
240        for (; i < mStreams.size(); i++) {
241            if (mStreams[i]->getId() == id) {
242                res = mStreams[i]->disconnect();
243                ASSERT_EQ(NO_ERROR, res) <<
244                        "Failed to disconnect stream " << id;
245                break;
246            }
247        }
248        ASSERT_GT(mStreams.size(), i) << "Stream id not found:" << id;
249    }
250
251    void getResolutionList(int32_t format,
252            const int32_t **list,
253            size_t *count) {
254        ALOGV("Getting resolutions for format %x", format);
255        status_t res;
256        if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
257            camera_metadata_ro_entry_t availableFormats;
258            res = find_camera_metadata_ro_entry(mStaticInfo,
259                    ANDROID_SCALER_AVAILABLE_FORMATS,
260                    &availableFormats);
261            ASSERT_EQ(OK, res);
262
263            uint32_t formatIdx;
264            for (formatIdx=0; formatIdx < availableFormats.count; formatIdx++) {
265                if (availableFormats.data.i32[formatIdx] == format) break;
266            }
267            ASSERT_NE(availableFormats.count, formatIdx)
268                << "No support found for format 0x" << std::hex << format;
269        }
270
271        camera_metadata_ro_entry_t availableSizes;
272        if (format == HAL_PIXEL_FORMAT_RAW_SENSOR) {
273            res = find_camera_metadata_ro_entry(mStaticInfo,
274                    ANDROID_SCALER_AVAILABLE_RAW_SIZES,
275                    &availableSizes);
276        } else if (format == HAL_PIXEL_FORMAT_BLOB) {
277            res = find_camera_metadata_ro_entry(mStaticInfo,
278                    ANDROID_SCALER_AVAILABLE_JPEG_SIZES,
279                    &availableSizes);
280        } else {
281            res = find_camera_metadata_ro_entry(mStaticInfo,
282                    ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
283                    &availableSizes);
284        }
285        ASSERT_EQ(OK, res);
286
287        *list = availableSizes.data.i32;
288        *count = availableSizes.count;
289    }
290
291    status_t waitUntilDrained() {
292        static const uint32_t kSleepTime = 50000; // 50 ms
293        static const uint32_t kMaxSleepTime = 10000000; // 10 s
294        ALOGV("%s: Camera %d: Starting wait", __FUNCTION__, mId);
295
296        // TODO: Set up notifications from HAL, instead of sleeping here
297        uint32_t totalTime = 0;
298        while (mDevice->ops->get_in_progress_count(mDevice) > 0) {
299            usleep(kSleepTime);
300            totalTime += kSleepTime;
301            if (totalTime > kMaxSleepTime) {
302                ALOGE("%s: Waited %d us, %d requests still in flight", __FUNCTION__,
303                        mDevice->ops->get_in_progress_count(mDevice), totalTime);
304                return TIMED_OUT;
305            }
306        }
307        ALOGV("%s: Camera %d: HAL is idle", __FUNCTION__, mId);
308        return OK;
309    }
310
311    virtual void SetUp() {
312        TEST_EXTENSION_FORKING_SET_UP;
313
314        SetUpModule();
315
316        const ::testing::TestInfo* const testInfo =
317                ::testing::UnitTest::GetInstance()->current_test_info();
318        (void)testInfo;
319
320        ALOGV("*** Starting test %s in test case %s", testInfo->name(),
321              testInfo->test_case_name());
322        mDevice = NULL;
323    }
324
325    virtual void TearDown() {
326        TEST_EXTENSION_FORKING_TEAR_DOWN;
327
328        for (unsigned int i = 0; i < mStreams.size(); i++) {
329            delete mStreams[i];
330        }
331        if (mDevice != NULL) {
332            closeCameraDevice(&mDevice);
333        }
334
335        TearDownModule();
336    }
337
338    int mId;
339    camera2_device    *mDevice;
340    const camera_metadata_t *mStaticInfo;
341
342    MetadataQueue    mRequests;
343    MetadataQueue    mFrames;
344    NotifierListener mNotifications;
345
346    Vector<StreamAdapter*> mStreams;
347
348  private:
349    static camera_module_t *sCameraModule;
350    static int              sNumCameras;
351    static bool            *sCameraSupportsHal2;
352};
353
354camera_module_t *Camera2Test::sCameraModule = NULL;
355bool *Camera2Test::sCameraSupportsHal2      = NULL;
356int Camera2Test::sNumCameras                = 0;
357
358static const nsecs_t USEC = 1000;
359static const nsecs_t MSEC = 1000*USEC;
360static const nsecs_t SEC = 1000*MSEC;
361
362
363TEST_F(Camera2Test, OpenClose) {
364
365    TEST_EXTENSION_FORKING_INIT;
366
367    status_t res;
368
369    for (int id = 0; id < getNumCameras(); id++) {
370        if (!isHal2Supported(id)) continue;
371
372        camera2_device_t *d = openCameraDevice(id);
373        ASSERT_TRUE(NULL != d) << "Failed to open camera device";
374
375        res = closeCameraDevice(&d);
376        ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
377    }
378}
379
380TEST_F(Camera2Test, Capture1Raw) {
381
382    TEST_EXTENSION_FORKING_INIT;
383
384    status_t res;
385
386    for (int id = 0; id < getNumCameras(); id++) {
387        if (!isHal2Supported(id)) continue;
388
389        ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
390
391        sp<BufferQueue> bq = new BufferQueue();
392        sp<CpuConsumer> rawConsumer = new CpuConsumer(bq, 1);
393        sp<FrameWaiter> rawWaiter = new FrameWaiter();
394        rawConsumer->setFrameAvailableListener(rawWaiter);
395
396        const int32_t *rawResolutions;
397        size_t   rawResolutionsCount;
398
399        int format = HAL_PIXEL_FORMAT_RAW_SENSOR;
400
401        getResolutionList(format,
402                &rawResolutions, &rawResolutionsCount);
403
404        if (rawResolutionsCount <= 0) {
405            const ::testing::TestInfo* const test_info =
406                ::testing::UnitTest::GetInstance()->current_test_info();
407            std::cerr << "Skipping test "
408                      << test_info->test_case_name() << "."
409                      << test_info->name()
410                      << " because the optional format was not available: "
411                      << "RAW_SENSOR" << std::endl;
412            return;
413        }
414
415        ASSERT_LT((size_t)0, rawResolutionsCount);
416
417        // Pick first available raw resolution
418        int width = rawResolutions[0];
419        int height = rawResolutions[1];
420
421        int streamId;
422        ASSERT_NO_FATAL_FAILURE(
423            setUpStream(bq, width, height, format, &streamId) );
424
425        camera_metadata_t *request;
426        request = allocate_camera_metadata(20, 2000);
427
428        uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
429        add_camera_metadata_entry(request,
430                ANDROID_REQUEST_METADATA_MODE,
431                (void**)&metadataMode, 1);
432        uint32_t outputStreams = streamId;
433        add_camera_metadata_entry(request,
434                ANDROID_REQUEST_OUTPUT_STREAMS,
435                (void**)&outputStreams, 1);
436
437        uint64_t exposureTime = 10*MSEC;
438        add_camera_metadata_entry(request,
439                ANDROID_SENSOR_EXPOSURE_TIME,
440                (void**)&exposureTime, 1);
441        uint64_t frameDuration = 30*MSEC;
442        add_camera_metadata_entry(request,
443                ANDROID_SENSOR_FRAME_DURATION,
444                (void**)&frameDuration, 1);
445        uint32_t sensitivity = 100;
446        add_camera_metadata_entry(request,
447                ANDROID_SENSOR_SENSITIVITY,
448                (void**)&sensitivity, 1);
449        uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
450        add_camera_metadata_entry(request,
451                ANDROID_REQUEST_TYPE,
452                (void**)&requestType, 1);
453
454        uint32_t hourOfDay = 12;
455        add_camera_metadata_entry(request,
456                0x80000000, // EMULATOR_HOUROFDAY
457                &hourOfDay, 1);
458
459        IF_ALOGV() {
460            std::cout << "Input request: " << std::endl;
461            dump_indented_camera_metadata(request, 0, 1, 2);
462        }
463
464        res = mRequests.enqueue(request);
465        ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res);
466
467        res = mFrames.waitForBuffer(exposureTime + SEC);
468        ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
469
470        camera_metadata_t *frame;
471        res = mFrames.dequeue(&frame);
472        ASSERT_EQ(NO_ERROR, res);
473        ASSERT_TRUE(frame != NULL);
474
475        IF_ALOGV() {
476            std::cout << "Output frame:" << std::endl;
477            dump_indented_camera_metadata(frame, 0, 1, 2);
478        }
479
480        res = rawWaiter->waitForFrame(exposureTime + SEC);
481        ASSERT_EQ(NO_ERROR, res);
482
483        CpuConsumer::LockedBuffer buffer;
484        res = rawConsumer->lockNextBuffer(&buffer);
485        ASSERT_EQ(NO_ERROR, res);
486
487        IF_ALOGV() {
488            const char *dumpname =
489                    "/data/local/tmp/camera2_test-capture1raw-dump.raw";
490            ALOGV("Dumping raw buffer to %s", dumpname);
491            // Write to file
492            std::ofstream rawFile(dumpname);
493            size_t bpp = 2;
494            for (unsigned int y = 0; y < buffer.height; y++) {
495                rawFile.write(
496                        (const char *)(buffer.data + y * buffer.stride * bpp),
497                        buffer.width * bpp);
498            }
499            rawFile.close();
500        }
501
502        res = rawConsumer->unlockBuffer(buffer);
503        ASSERT_EQ(NO_ERROR, res);
504
505        ASSERT_EQ(OK, waitUntilDrained());
506        ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId));
507
508        res = closeCameraDevice(&mDevice);
509        ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
510
511    }
512}
513
514TEST_F(Camera2Test, CaptureBurstRaw) {
515
516    TEST_EXTENSION_FORKING_INIT;
517
518    status_t res;
519
520    for (int id = 0; id < getNumCameras(); id++) {
521        if (!isHal2Supported(id)) continue;
522
523        ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
524
525        sp<BufferQueue> bq = new BufferQueue();
526        sp<CpuConsumer> rawConsumer = new CpuConsumer(bq, 1);
527        sp<FrameWaiter> rawWaiter = new FrameWaiter();
528        rawConsumer->setFrameAvailableListener(rawWaiter);
529
530        const int32_t *rawResolutions;
531        size_t    rawResolutionsCount;
532
533        int format = HAL_PIXEL_FORMAT_RAW_SENSOR;
534
535        getResolutionList(format,
536                &rawResolutions, &rawResolutionsCount);
537
538        if (rawResolutionsCount <= 0) {
539            const ::testing::TestInfo* const test_info =
540                ::testing::UnitTest::GetInstance()->current_test_info();
541            std::cerr << "Skipping test "
542                      << test_info->test_case_name() << "."
543                      << test_info->name()
544                      << " because the optional format was not available: "
545                      << "RAW_SENSOR" << std::endl;
546            return;
547        }
548
549        ASSERT_LT((uint32_t)0, rawResolutionsCount);
550
551        // Pick first available raw resolution
552        int width = rawResolutions[0];
553        int height = rawResolutions[1];
554
555        int streamId;
556        ASSERT_NO_FATAL_FAILURE(
557            setUpStream(bq, width, height, format, &streamId) );
558
559        camera_metadata_t *request;
560        request = allocate_camera_metadata(20, 2000);
561
562        uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
563        add_camera_metadata_entry(request,
564                ANDROID_REQUEST_METADATA_MODE,
565                (void**)&metadataMode, 1);
566        uint32_t outputStreams = streamId;
567        add_camera_metadata_entry(request,
568                ANDROID_REQUEST_OUTPUT_STREAMS,
569                (void**)&outputStreams, 1);
570
571        uint64_t frameDuration = 30*MSEC;
572        add_camera_metadata_entry(request,
573                ANDROID_SENSOR_FRAME_DURATION,
574                (void**)&frameDuration, 1);
575        uint32_t sensitivity = 100;
576        add_camera_metadata_entry(request,
577                ANDROID_SENSOR_SENSITIVITY,
578                (void**)&sensitivity, 1);
579        uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
580        add_camera_metadata_entry(request,
581                ANDROID_REQUEST_TYPE,
582                (void**)&requestType, 1);
583
584        uint32_t hourOfDay = 12;
585        add_camera_metadata_entry(request,
586                0x80000000, // EMULATOR_HOUROFDAY
587                &hourOfDay, 1);
588
589        IF_ALOGV() {
590            std::cout << "Input request template: " << std::endl;
591            dump_indented_camera_metadata(request, 0, 1, 2);
592        }
593
594        int numCaptures = 10;
595
596        // Enqueue numCaptures requests with increasing exposure time
597
598        uint64_t exposureTime = 100 * USEC;
599        for (int reqCount = 0; reqCount < numCaptures; reqCount++ ) {
600            camera_metadata_t *req;
601            req = allocate_camera_metadata(20, 2000);
602            append_camera_metadata(req, request);
603
604            add_camera_metadata_entry(req,
605                    ANDROID_SENSOR_EXPOSURE_TIME,
606                    (void**)&exposureTime, 1);
607            exposureTime *= 2;
608
609            res = mRequests.enqueue(req);
610            ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: "
611                    << strerror(-res);
612        }
613
614        // Get frames and image buffers one by one
615        uint64_t expectedExposureTime = 100 * USEC;
616        for (int frameCount = 0; frameCount < 10; frameCount++) {
617            res = mFrames.waitForBuffer(SEC + expectedExposureTime);
618            ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
619
620            camera_metadata_t *frame;
621            res = mFrames.dequeue(&frame);
622            ASSERT_EQ(NO_ERROR, res);
623            ASSERT_TRUE(frame != NULL);
624
625            camera_metadata_entry_t frameNumber;
626            res = find_camera_metadata_entry(frame,
627                    ANDROID_REQUEST_FRAME_COUNT,
628                    &frameNumber);
629            ASSERT_EQ(NO_ERROR, res);
630            ASSERT_EQ(frameCount, *frameNumber.data.i32);
631
632            res = rawWaiter->waitForFrame(SEC + expectedExposureTime);
633            ASSERT_EQ(NO_ERROR, res) <<
634                    "Never got raw data for capture " << frameCount;
635
636            CpuConsumer::LockedBuffer buffer;
637            res = rawConsumer->lockNextBuffer(&buffer);
638            ASSERT_EQ(NO_ERROR, res);
639
640            IF_ALOGV() {
641                char dumpname[60];
642                snprintf(dumpname, 60,
643                        "/data/local/tmp/camera2_test-"
644                        "captureBurstRaw-dump_%d.raw",
645                        frameCount);
646                ALOGV("Dumping raw buffer to %s", dumpname);
647                // Write to file
648                std::ofstream rawFile(dumpname);
649                for (unsigned int y = 0; y < buffer.height; y++) {
650                    rawFile.write(
651                            (const char *)(buffer.data + y * buffer.stride * 2),
652                            buffer.width * 2);
653                }
654                rawFile.close();
655            }
656
657            res = rawConsumer->unlockBuffer(buffer);
658            ASSERT_EQ(NO_ERROR, res);
659
660            expectedExposureTime *= 2;
661        }
662    }
663}
664
665TEST_F(Camera2Test, ConstructDefaultRequests) {
666
667    TEST_EXTENSION_FORKING_INIT;
668
669    status_t res;
670
671    for (int id = 0; id < getNumCameras(); id++) {
672        if (!isHal2Supported(id)) continue;
673
674        ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
675
676        for (int i = CAMERA2_TEMPLATE_PREVIEW; i < CAMERA2_TEMPLATE_COUNT;
677             i++) {
678            camera_metadata_t *request = NULL;
679            res = mDevice->ops->construct_default_request(mDevice,
680                    i,
681                    &request);
682            EXPECT_EQ(NO_ERROR, res) <<
683                    "Unable to construct request from template type " << i;
684            EXPECT_TRUE(request != NULL);
685            EXPECT_LT((size_t)0, get_camera_metadata_entry_count(request));
686            EXPECT_LT((size_t)0, get_camera_metadata_data_count(request));
687
688            IF_ALOGV() {
689                std::cout << "  ** Template type " << i << ":"<<std::endl;
690                dump_indented_camera_metadata(request, 0, 2, 4);
691            }
692
693            free_camera_metadata(request);
694        }
695    }
696}
697
698TEST_F(Camera2Test, Capture1Jpeg) {
699    status_t res;
700
701    for (int id = 0; id < getNumCameras(); id++) {
702        if (!isHal2Supported(id)) continue;
703
704        ASSERT_NO_FATAL_FAILURE(setUpCamera(id));
705
706        sp<BufferQueue> bq = new BufferQueue();
707        sp<CpuConsumer> jpegConsumer = new CpuConsumer(bq, 1);
708        sp<FrameWaiter> jpegWaiter = new FrameWaiter();
709        jpegConsumer->setFrameAvailableListener(jpegWaiter);
710
711        const int32_t *jpegResolutions;
712        size_t   jpegResolutionsCount;
713
714        int format = HAL_PIXEL_FORMAT_BLOB;
715
716        getResolutionList(format,
717                &jpegResolutions, &jpegResolutionsCount);
718        ASSERT_LT((size_t)0, jpegResolutionsCount);
719
720        // Pick first available JPEG resolution
721        int width = jpegResolutions[0];
722        int height = jpegResolutions[1];
723
724        int streamId;
725        ASSERT_NO_FATAL_FAILURE(
726            setUpStream(bq, width, height, format, &streamId) );
727
728        camera_metadata_t *request;
729        request = allocate_camera_metadata(20, 2000);
730
731        uint8_t metadataMode = ANDROID_REQUEST_METADATA_MODE_FULL;
732        add_camera_metadata_entry(request,
733                ANDROID_REQUEST_METADATA_MODE,
734                (void**)&metadataMode, 1);
735        uint32_t outputStreams = streamId;
736        add_camera_metadata_entry(request,
737                ANDROID_REQUEST_OUTPUT_STREAMS,
738                (void**)&outputStreams, 1);
739
740        uint64_t exposureTime = 10*MSEC;
741        add_camera_metadata_entry(request,
742                ANDROID_SENSOR_EXPOSURE_TIME,
743                (void**)&exposureTime, 1);
744        uint64_t frameDuration = 30*MSEC;
745        add_camera_metadata_entry(request,
746                ANDROID_SENSOR_FRAME_DURATION,
747                (void**)&frameDuration, 1);
748        uint32_t sensitivity = 100;
749        add_camera_metadata_entry(request,
750                ANDROID_SENSOR_SENSITIVITY,
751                (void**)&sensitivity, 1);
752        uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
753        add_camera_metadata_entry(request,
754                ANDROID_REQUEST_TYPE,
755                (void**)&requestType, 1);
756
757        uint32_t hourOfDay = 12;
758        add_camera_metadata_entry(request,
759                0x80000000, // EMULATOR_HOUROFDAY
760                &hourOfDay, 1);
761
762        IF_ALOGV() {
763            std::cout << "Input request: " << std::endl;
764            dump_indented_camera_metadata(request, 0, 1, 4);
765        }
766
767        res = mRequests.enqueue(request);
768        ASSERT_EQ(NO_ERROR, res) << "Can't enqueue request: " << strerror(-res);
769
770        res = mFrames.waitForBuffer(exposureTime + SEC);
771        ASSERT_EQ(NO_ERROR, res) << "No frame to get: " << strerror(-res);
772
773        camera_metadata_t *frame;
774        res = mFrames.dequeue(&frame);
775        ASSERT_EQ(NO_ERROR, res);
776        ASSERT_TRUE(frame != NULL);
777
778        IF_ALOGV() {
779            std::cout << "Output frame:" << std::endl;
780            dump_indented_camera_metadata(frame, 0, 1, 4);
781        }
782
783        res = jpegWaiter->waitForFrame(exposureTime + SEC);
784        ASSERT_EQ(NO_ERROR, res);
785
786        CpuConsumer::LockedBuffer buffer;
787        res = jpegConsumer->lockNextBuffer(&buffer);
788        ASSERT_EQ(NO_ERROR, res);
789
790        IF_ALOGV() {
791            const char *dumpname =
792                    "/data/local/tmp/camera2_test-capture1jpeg-dump.jpeg";
793            ALOGV("Dumping raw buffer to %s", dumpname);
794            // Write to file
795            std::ofstream jpegFile(dumpname);
796            size_t bpp = 1;
797            for (unsigned int y = 0; y < buffer.height; y++) {
798                jpegFile.write(
799                        (const char *)(buffer.data + y * buffer.stride * bpp),
800                        buffer.width * bpp);
801            }
802            jpegFile.close();
803        }
804
805        res = jpegConsumer->unlockBuffer(buffer);
806        ASSERT_EQ(NO_ERROR, res);
807
808        ASSERT_EQ(OK, waitUntilDrained());
809        ASSERT_NO_FATAL_FAILURE(disconnectStream(streamId));
810
811        res = closeCameraDevice(&mDevice);
812        ASSERT_EQ(NO_ERROR, res) << "Failed to close camera device";
813
814    }
815}
816
817} // namespace tests
818} // namespace camera2
819} // namespace android
820