ProCameraTests.cpp revision dcb07d51e307019731147751946774f45321edfb
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <gtest/gtest.h>
18#include <iostream>
19
20#include <binder/IPCThreadState.h>
21#include <utils/Thread.h>
22
23#include "Camera.h"
24#include "ProCamera.h"
25#include <utils/Vector.h>
26#include <utils/Mutex.h>
27#include <utils/Condition.h>
28
29#include <gui/SurfaceComposerClient.h>
30#include <gui/Surface.h>
31
32#include <system/camera_metadata.h>
33#include <hardware/camera2.h> // for CAMERA2_TEMPLATE_PREVIEW only
34#include <camera/CameraMetadata.h>
35
36namespace android {
37namespace camera2 {
38namespace tests {
39namespace client {
40
41#define CAMERA_ID 0
42#define TEST_DEBUGGING 0
43
44#define TEST_LISTENER_TIMEOUT 1000000000 // 1 second listener timeout
45#define TEST_FORMAT HAL_PIXEL_FORMAT_Y16 //TODO: YUY2 instead
46
47#define TEST_FORMAT_MAIN HAL_PIXEL_FORMAT_Y8
48#define TEST_FORMAT_DEPTH HAL_PIXEL_FORMAT_Y16
49
50// defaults for display "test"
51#define TEST_DISPLAY_FORMAT HAL_PIXEL_FORMAT_Y16
52#define TEST_DISPLAY_WIDTH 1280
53#define TEST_DISPLAY_HEIGHT 960
54
55#define TEST_CPU_FRAME_COUNT 2
56#define TEST_CPU_HEAP_COUNT 5
57
58#if TEST_DEBUGGING
59#define dout std::cerr
60#else
61#define dout if (0) std::cerr
62#endif
63
64#define EXPECT_OK(x) EXPECT_EQ(OK, (x))
65#define ASSERT_OK(x) ASSERT_EQ(OK, (x))
66
67class ProCameraTest;
68
69enum ProEvent {
70    UNKNOWN,
71    ACQUIRED,
72    RELEASED,
73    STOLEN,
74    BUFFER_RECEIVED,
75    RESULT_RECEIVED,
76};
77
78inline int ProEvent_Mask(ProEvent e) {
79    return (1 << static_cast<int>(e));
80}
81
82typedef Vector<ProEvent> EventList;
83
84class ProCameraTestThread : public Thread
85{
86public:
87    ProCameraTestThread() {
88    }
89
90    virtual bool threadLoop() {
91        mProc = ProcessState::self();
92        mProc->startThreadPool();
93
94        IPCThreadState *ptr = IPCThreadState::self();
95
96        ptr->joinThreadPool();
97
98        return false;
99    }
100
101    sp<ProcessState> mProc;
102};
103
104class ProCameraTestListener : public ProCameraListener {
105
106public:
107    static const int EVENT_MASK_ALL = 0xFFFFFFFF;
108
109    ProCameraTestListener() {
110        mEventMask = EVENT_MASK_ALL;
111    }
112
113    status_t WaitForEvent() {
114        Mutex::Autolock cal(mConditionMutex);
115
116        {
117            Mutex::Autolock al(mListenerMutex);
118
119            if (mProEventList.size() > 0) {
120                return OK;
121            }
122        }
123
124        return mListenerCondition.waitRelative(mConditionMutex,
125                                               TEST_LISTENER_TIMEOUT);
126    }
127
128    /* Read events into out. Existing queue is flushed */
129    void ReadEvents(EventList& out) {
130        Mutex::Autolock al(mListenerMutex);
131
132        for (size_t i = 0; i < mProEventList.size(); ++i) {
133            out.push(mProEventList[i]);
134        }
135
136        mProEventList.clear();
137    }
138
139    /**
140      * Dequeue 1 event from the event queue.
141      * Returns UNKNOWN if queue is empty
142      */
143    ProEvent ReadEvent() {
144        Mutex::Autolock al(mListenerMutex);
145
146        if (mProEventList.size() == 0) {
147            return UNKNOWN;
148        }
149
150        ProEvent ev = mProEventList[0];
151        mProEventList.removeAt(0);
152
153        return ev;
154    }
155
156    void SetEventMask(int eventMask) {
157        Mutex::Autolock al(mListenerMutex);
158        mEventMask = eventMask;
159    }
160
161private:
162    void QueueEvent(ProEvent ev) {
163        bool eventAdded = false;
164        {
165            Mutex::Autolock al(mListenerMutex);
166
167            if (ProEvent_Mask(ev) & mEventMask) {
168                mProEventList.push(ev);
169                eventAdded = true;
170            }
171        }
172
173        if (eventAdded) {
174            mListenerCondition.broadcast();
175        }
176    }
177
178protected:
179
180    //////////////////////////////////////////////////
181    ///////// ProCameraListener //////////////////////
182    //////////////////////////////////////////////////
183
184
185    // Lock has been acquired. Write operations now available.
186    virtual void onLockAcquired() {
187        QueueEvent(ACQUIRED);
188    }
189    // Lock has been released with exclusiveUnlock
190    virtual void onLockReleased() {
191        QueueEvent(RELEASED);
192    }
193
194    // Lock has been stolen by another client.
195    virtual void onLockStolen() {
196        QueueEvent(STOLEN);
197    }
198
199    // Lock free.
200    virtual void onTriggerNotify(int32_t ext1, int32_t ext2, int32_t ext3) {
201
202        dout << "Trigger notify: " << ext1 << " " << ext2
203             << " " << ext3 << std::endl;
204    }
205
206    virtual void onBufferReceived(int streamId,
207                                  const CpuConsumer::LockedBuffer& buf) {
208
209        dout << "Buffer received on streamId = " << streamId <<
210                ", dataPtr = " << (void*)buf.data << std::endl;
211
212        QueueEvent(BUFFER_RECEIVED);
213
214    }
215    virtual void onResultReceived(int32_t frameId,
216                                  camera_metadata* request) {
217        dout << "Result received frameId = " << frameId
218             << ", requestPtr = " << (void*)request << std::endl;
219        QueueEvent(RESULT_RECEIVED);
220        free_camera_metadata(request);
221    }
222
223    // TODO: remove
224
225    virtual void notify(int32_t , int32_t , int32_t ) {}
226    virtual void postData(int32_t , const sp<IMemory>& ,
227                          camera_frame_metadata_t *) {}
228    virtual void postDataTimestamp(nsecs_t , int32_t , const sp<IMemory>& ) {}
229
230
231    Vector<ProEvent> mProEventList;
232    Mutex             mListenerMutex;
233    Mutex             mConditionMutex;
234    Condition         mListenerCondition;
235    int               mEventMask;
236};
237
238class ProCameraTest : public ::testing::Test {
239
240public:
241    ProCameraTest() {
242        char* displaySecsEnv = getenv("TEST_DISPLAY_SECS");
243        if (displaySecsEnv != NULL) {
244            mDisplaySecs = atoi(displaySecsEnv);
245            if (mDisplaySecs < 0) {
246                mDisplaySecs = 0;
247            }
248        } else {
249            mDisplaySecs = 0;
250        }
251
252        char* displayFmtEnv = getenv("TEST_DISPLAY_FORMAT");
253        if (displayFmtEnv != NULL) {
254            mDisplayFmt = FormatFromString(displayFmtEnv);
255        } else {
256            mDisplayFmt = TEST_DISPLAY_FORMAT;
257        }
258
259        char* displayWidthEnv = getenv("TEST_DISPLAY_WIDTH");
260        if (displayWidthEnv != NULL) {
261            mDisplayW = atoi(displayWidthEnv);
262            if (mDisplayW < 0) {
263                mDisplayW = 0;
264            }
265        } else {
266            mDisplayW = TEST_DISPLAY_WIDTH;
267        }
268
269        char* displayHeightEnv = getenv("TEST_DISPLAY_HEIGHT");
270        if (displayHeightEnv != NULL) {
271            mDisplayH = atoi(displayHeightEnv);
272            if (mDisplayH < 0) {
273                mDisplayH = 0;
274            }
275        } else {
276            mDisplayH = TEST_DISPLAY_HEIGHT;
277        }
278    }
279
280    static void SetUpTestCase() {
281        // Binder Thread Pool Initialization
282        mTestThread = new ProCameraTestThread();
283        mTestThread->run("ProCameraTestThread");
284    }
285
286    virtual void SetUp() {
287        mCamera = ProCamera::connect(CAMERA_ID);
288        ASSERT_NE((void*)NULL, mCamera.get());
289
290        mListener = new ProCameraTestListener();
291        mCamera->setListener(mListener);
292    }
293
294    virtual void TearDown() {
295        ASSERT_NE((void*)NULL, mCamera.get());
296        mCamera->disconnect();
297    }
298
299protected:
300    sp<ProCamera> mCamera;
301    sp<ProCameraTestListener> mListener;
302
303    static sp<Thread> mTestThread;
304
305    int mDisplaySecs;
306    int mDisplayFmt;
307    int mDisplayW;
308    int mDisplayH;
309
310    sp<SurfaceComposerClient> mComposerClient;
311    sp<SurfaceControl> mSurfaceControl;
312
313    sp<SurfaceComposerClient> mDepthComposerClient;
314    sp<SurfaceControl> mDepthSurfaceControl;
315
316    int getSurfaceWidth() {
317        return 512;
318    }
319    int getSurfaceHeight() {
320        return 512;
321    }
322
323    void createOnScreenSurface(sp<Surface>& surface) {
324        mComposerClient = new SurfaceComposerClient;
325        ASSERT_EQ(NO_ERROR, mComposerClient->initCheck());
326
327        mSurfaceControl = mComposerClient->createSurface(
328                String8("ProCameraTest StreamingImage Surface"),
329                getSurfaceWidth(), getSurfaceHeight(),
330                PIXEL_FORMAT_RGB_888, 0);
331
332        mSurfaceControl->setPosition(0, 0);
333
334        ASSERT_TRUE(mSurfaceControl != NULL);
335        ASSERT_TRUE(mSurfaceControl->isValid());
336
337        SurfaceComposerClient::openGlobalTransaction();
338        ASSERT_EQ(NO_ERROR, mSurfaceControl->setLayer(0x7FFFFFFF));
339        ASSERT_EQ(NO_ERROR, mSurfaceControl->show());
340        SurfaceComposerClient::closeGlobalTransaction();
341
342        sp<ANativeWindow> window = mSurfaceControl->getSurface();
343        surface = mSurfaceControl->getSurface();
344
345        ASSERT_NE((void*)NULL, surface.get());
346    }
347
348    void createDepthOnScreenSurface(sp<Surface>& surface) {
349        mDepthComposerClient = new SurfaceComposerClient;
350        ASSERT_EQ(NO_ERROR, mDepthComposerClient->initCheck());
351
352        mDepthSurfaceControl = mDepthComposerClient->createSurface(
353                String8("ProCameraTest StreamingImage Surface"),
354                getSurfaceWidth(), getSurfaceHeight(),
355                PIXEL_FORMAT_RGB_888, 0);
356
357        mDepthSurfaceControl->setPosition(640, 0);
358
359        ASSERT_TRUE(mDepthSurfaceControl != NULL);
360        ASSERT_TRUE(mDepthSurfaceControl->isValid());
361
362        SurfaceComposerClient::openGlobalTransaction();
363        ASSERT_EQ(NO_ERROR, mDepthSurfaceControl->setLayer(0x7FFFFFFF));
364        ASSERT_EQ(NO_ERROR, mDepthSurfaceControl->show());
365        SurfaceComposerClient::closeGlobalTransaction();
366
367        sp<ANativeWindow> window = mDepthSurfaceControl->getSurface();
368        surface = mDepthSurfaceControl->getSurface();
369
370        ASSERT_NE((void*)NULL, surface.get());
371    }
372
373    template <typename T>
374    static bool ExistsItem(T needle, T* array, size_t count) {
375        if (!array) {
376            return false;
377        }
378
379        for (int i = 0; i < count; ++i) {
380            if (array[i] == needle) {
381                return true;
382            }
383        }
384        return false;
385    }
386
387
388    static int FormatFromString(const char* str) {
389        std::string s(str);
390
391#define CMP_STR(x, y)                               \
392        if (s == #x) return HAL_PIXEL_FORMAT_ ## y;
393#define CMP_STR_SAME(x) CMP_STR(x, x)
394
395        CMP_STR_SAME( Y16);
396        CMP_STR_SAME( Y8);
397        CMP_STR_SAME( YV12);
398        CMP_STR(NV16, YCbCr_422_SP);
399        CMP_STR(NV21, YCrCb_420_SP);
400        CMP_STR(YUY2, YCbCr_422_I);
401        CMP_STR(RAW,  RAW_SENSOR);
402        CMP_STR(RGBA, RGBA_8888);
403
404        std::cerr << "Unknown format string " << str << std::endl;
405        return -1;
406
407    }
408
409    /**
410     * Creating a streaming request for these output streams from a template,
411     *  and submit it
412     */
413    void createSubmitRequestForStreams(uint8_t* streamIds, size_t count) {
414
415        ASSERT_NE((void*)NULL, streamIds);
416        ASSERT_LT(0, count);
417
418        camera_metadata_t *requestTmp = NULL;
419        EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
420                                                /*out*/&requestTmp));
421        ASSERT_NE((void*)NULL, requestTmp);
422        CameraMetadata request(requestTmp);
423
424        // set the output streams. default is empty
425
426        uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
427        request.update(tag, streamIds, count);
428
429        requestTmp = request.release();
430        EXPECT_OK(mCamera->submitRequest(requestTmp, /*streaming*/true));
431        request.acquire(requestTmp);
432    }
433
434};
435
436sp<Thread> ProCameraTest::mTestThread;
437
438TEST_F(ProCameraTest, AvailableFormats) {
439    if (HasFatalFailure()) {
440        return;
441    }
442
443    CameraMetadata staticInfo = mCamera->getCameraInfo(CAMERA_ID);
444    ASSERT_FALSE(staticInfo.isEmpty());
445
446    uint32_t tag = static_cast<uint32_t>(ANDROID_SCALER_AVAILABLE_FORMATS);
447    EXPECT_TRUE(staticInfo.exists(tag));
448    camera_metadata_entry_t entry = staticInfo.find(tag);
449
450    EXPECT_TRUE(ExistsItem<int32_t>(HAL_PIXEL_FORMAT_YV12,
451                                                  entry.data.i32, entry.count));
452    EXPECT_TRUE(ExistsItem<int32_t>(HAL_PIXEL_FORMAT_YCrCb_420_SP,
453                                                  entry.data.i32, entry.count));
454}
455
456// test around exclusiveTryLock (immediate locking)
457TEST_F(ProCameraTest, LockingImmediate) {
458
459    if (HasFatalFailure()) {
460        return;
461    }
462
463    mListener->SetEventMask(ProEvent_Mask(ACQUIRED) |
464                            ProEvent_Mask(STOLEN)   |
465                            ProEvent_Mask(RELEASED));
466
467    EXPECT_FALSE(mCamera->hasExclusiveLock());
468    EXPECT_EQ(OK, mCamera->exclusiveTryLock());
469    // at this point we definitely have the lock
470
471    EXPECT_EQ(OK, mListener->WaitForEvent());
472    EXPECT_EQ(ACQUIRED, mListener->ReadEvent());
473
474    EXPECT_TRUE(mCamera->hasExclusiveLock());
475    EXPECT_EQ(OK, mCamera->exclusiveUnlock());
476
477    EXPECT_EQ(OK, mListener->WaitForEvent());
478    EXPECT_EQ(RELEASED, mListener->ReadEvent());
479
480    EXPECT_FALSE(mCamera->hasExclusiveLock());
481}
482
483// test around exclusiveLock (locking at some future point in time)
484TEST_F(ProCameraTest, LockingAsynchronous) {
485
486    if (HasFatalFailure()) {
487        return;
488    }
489
490
491    mListener->SetEventMask(ProEvent_Mask(ACQUIRED) |
492                            ProEvent_Mask(STOLEN)   |
493                            ProEvent_Mask(RELEASED));
494
495    // TODO: Add another procamera that has a lock here.
496    // then we can be test that the lock wont immediately be acquired
497
498    EXPECT_FALSE(mCamera->hasExclusiveLock());
499    EXPECT_EQ(OK, mCamera->exclusiveTryLock());
500    // at this point we definitely have the lock
501
502    EXPECT_EQ(OK, mListener->WaitForEvent());
503    EXPECT_EQ(ACQUIRED, mListener->ReadEvent());
504
505    EXPECT_TRUE(mCamera->hasExclusiveLock());
506    EXPECT_EQ(OK, mCamera->exclusiveUnlock());
507
508    EXPECT_EQ(OK, mListener->WaitForEvent());
509    EXPECT_EQ(RELEASED, mListener->ReadEvent());
510
511    EXPECT_FALSE(mCamera->hasExclusiveLock());
512}
513
514// Stream directly to the screen.
515TEST_F(ProCameraTest, DISABLED_StreamingImageSingle) {
516    if (HasFatalFailure()) {
517        return;
518    }
519
520    sp<Surface> surface;
521    if (mDisplaySecs > 0) {
522        createOnScreenSurface(/*out*/surface);
523    }
524    else {
525        dout << "Skipping, will not render to screen" << std::endl;
526        return;
527    }
528
529    int depthStreamId = -1;
530    EXPECT_OK(mCamera->createStream(mDisplayW, mDisplayH, mDisplayFmt, surface,
531                                    &depthStreamId));
532    EXPECT_NE(-1, depthStreamId);
533
534    EXPECT_OK(mCamera->exclusiveTryLock());
535
536    uint8_t streams[] = { depthStreamId };
537    ASSERT_NO_FATAL_FAILURE(createSubmitRequestForStreams(streams, /*count*/1));
538
539    dout << "will sleep now for " << mDisplaySecs << std::endl;
540    sleep(mDisplaySecs);
541
542    EXPECT_OK(mCamera->deleteStream(depthStreamId));
543    EXPECT_OK(mCamera->exclusiveUnlock());
544}
545
546// Stream directly to the screen.
547TEST_F(ProCameraTest, DISABLED_StreamingImageDual) {
548    if (HasFatalFailure()) {
549        return;
550    }
551    sp<Surface> surface;
552    sp<Surface> depthSurface;
553    if (mDisplaySecs > 0) {
554        createOnScreenSurface(/*out*/surface);
555        createDepthOnScreenSurface(/*out*/depthSurface);
556    }
557
558    int streamId = -1;
559    EXPECT_OK(mCamera->createStream(/*width*/1280, /*height*/960,
560              TEST_FORMAT_MAIN, surface, &streamId));
561    EXPECT_NE(-1, streamId);
562
563    int depthStreamId = -1;
564    EXPECT_OK(mCamera->createStream(/*width*/320, /*height*/240,
565              TEST_FORMAT_DEPTH, depthSurface, &depthStreamId));
566    EXPECT_NE(-1, depthStreamId);
567
568    EXPECT_OK(mCamera->exclusiveTryLock());
569    /*
570    */
571    /* iterate in a loop submitting requests every frame.
572     *  what kind of requests doesnt really matter, just whatever.
573     */
574
575    // it would probably be better to use CameraMetadata from camera service.
576    camera_metadata_t *request = NULL;
577    EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
578              /*out*/&request));
579    EXPECT_NE((void*)NULL, request);
580
581    /*FIXME: dont need this later, at which point the above should become an
582             ASSERT_NE*/
583    if(request == NULL) request = allocate_camera_metadata(10, 100);
584
585    // set the output streams to just this stream ID
586
587    // wow what a verbose API.
588    uint8_t allStreams[] = { streamId, depthStreamId };
589    // IMPORTANT. bad things will happen if its not a uint8.
590    size_t streamCount = sizeof(allStreams) / sizeof(allStreams[0]);
591    camera_metadata_entry_t entry;
592    uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
593    int find = find_camera_metadata_entry(request, tag, &entry);
594    if (find == -ENOENT) {
595        if (add_camera_metadata_entry(request, tag, &allStreams,
596                                      /*data_count*/streamCount) != OK) {
597            camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000);
598            ASSERT_OK(append_camera_metadata(tmp, request));
599            free_camera_metadata(request);
600            request = tmp;
601
602            ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams,
603                                                /*data_count*/streamCount));
604        }
605    } else {
606        ASSERT_OK(update_camera_metadata_entry(request, entry.index,
607                  &allStreams, /*data_count*/streamCount, &entry));
608    }
609
610    EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true));
611
612    dout << "will sleep now for " << mDisplaySecs << std::endl;
613    sleep(mDisplaySecs);
614
615    free_camera_metadata(request);
616
617    for (int i = 0; i < streamCount; ++i) {
618        EXPECT_OK(mCamera->deleteStream(allStreams[i]));
619    }
620    EXPECT_OK(mCamera->exclusiveUnlock());
621}
622
623TEST_F(ProCameraTest, CpuConsumerSingle) {
624    if (HasFatalFailure()) {
625        return;
626    }
627
628    mListener->SetEventMask(ProEvent_Mask(BUFFER_RECEIVED));
629
630    int streamId = -1;
631    EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240,
632        TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &streamId));
633    EXPECT_NE(-1, streamId);
634
635    EXPECT_OK(mCamera->exclusiveTryLock());
636    EXPECT_EQ(OK, mListener->WaitForEvent());
637    EXPECT_EQ(ACQUIRED, mListener->ReadEvent());
638    /* iterate in a loop submitting requests every frame.
639     *  what kind of requests doesnt really matter, just whatever.
640     */
641
642    // it would probably be better to use CameraMetadata from camera service.
643    camera_metadata_t *request = NULL;
644    EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
645        /*out*/&request));
646    EXPECT_NE((void*)NULL, request);
647
648    /*FIXME: dont need this later, at which point the above should become an
649      ASSERT_NE*/
650    if(request == NULL) request = allocate_camera_metadata(10, 100);
651
652    // set the output streams to just this stream ID
653
654    uint8_t allStreams[] = { streamId };
655    camera_metadata_entry_t entry;
656    uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
657    int find = find_camera_metadata_entry(request, tag, &entry);
658    if (find == -ENOENT) {
659        if (add_camera_metadata_entry(request, tag, &allStreams,
660                /*data_count*/1) != OK) {
661            camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000);
662            ASSERT_OK(append_camera_metadata(tmp, request));
663            free_camera_metadata(request);
664            request = tmp;
665
666            ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams,
667                /*data_count*/1));
668        }
669    } else {
670        ASSERT_OK(update_camera_metadata_entry(request, entry.index,
671            &allStreams, /*data_count*/1, &entry));
672    }
673
674    EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true));
675
676    // Consume a couple of frames
677    for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
678        EXPECT_EQ(OK, mListener->WaitForEvent());
679        EXPECT_EQ(BUFFER_RECEIVED, mListener->ReadEvent());
680    }
681
682    // Done: clean up
683    free_camera_metadata(request);
684    EXPECT_OK(mCamera->deleteStream(streamId));
685    EXPECT_OK(mCamera->exclusiveUnlock());
686}
687
688TEST_F(ProCameraTest, CpuConsumerDual) {
689    if (HasFatalFailure()) {
690        return;
691    }
692
693    mListener->SetEventMask(ProEvent_Mask(BUFFER_RECEIVED));
694
695    int streamId = -1;
696    EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
697                            TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &streamId));
698    EXPECT_NE(-1, streamId);
699
700    int depthStreamId = -1;
701    EXPECT_OK(mCamera->createStreamCpu(/*width*/320, /*height*/240,
702                     TEST_FORMAT_DEPTH, TEST_CPU_HEAP_COUNT, &depthStreamId));
703    EXPECT_NE(-1, depthStreamId);
704
705    EXPECT_OK(mCamera->exclusiveTryLock());
706    /*
707    */
708    /* iterate in a loop submitting requests every frame.
709     *  what kind of requests doesnt really matter, just whatever.
710     */
711
712    // it would probably be better to use CameraMetadata from camera service.
713    camera_metadata_t *request = NULL;
714    EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
715                                            /*out*/&request));
716    EXPECT_NE((void*)NULL, request);
717
718    if(request == NULL) request = allocate_camera_metadata(10, 100);
719
720    // set the output streams to just this stream ID
721
722    // wow what a verbose API.
723    uint8_t allStreams[] = { streamId, depthStreamId };
724    size_t streamCount = 2;
725    camera_metadata_entry_t entry;
726    uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
727    int find = find_camera_metadata_entry(request, tag, &entry);
728    if (find == -ENOENT) {
729        if (add_camera_metadata_entry(request, tag, &allStreams,
730                                      /*data_count*/streamCount) != OK) {
731            camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000);
732            ASSERT_OK(append_camera_metadata(tmp, request));
733            free_camera_metadata(request);
734            request = tmp;
735
736            ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams,
737                                                   /*data_count*/streamCount));
738        }
739    } else {
740        ASSERT_OK(update_camera_metadata_entry(request, entry.index,
741                              &allStreams, /*data_count*/streamCount, &entry));
742    }
743
744    EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true));
745
746    // Consume a couple of frames
747    for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
748        // stream id 1
749        EXPECT_EQ(OK, mListener->WaitForEvent());
750        EXPECT_EQ(BUFFER_RECEIVED, mListener->ReadEvent());
751
752        // stream id 2
753        EXPECT_EQ(OK, mListener->WaitForEvent());
754        EXPECT_EQ(BUFFER_RECEIVED, mListener->ReadEvent());
755
756        //TODO: events should be a struct with some data like the stream id
757    }
758
759    // Done: clean up
760    free_camera_metadata(request);
761    EXPECT_OK(mCamera->deleteStream(streamId));
762    EXPECT_OK(mCamera->exclusiveUnlock());
763}
764
765TEST_F(ProCameraTest, ResultReceiver) {
766    if (HasFatalFailure()) {
767        return;
768    }
769
770    mListener->SetEventMask(ProEvent_Mask(RESULT_RECEIVED));
771    //FIXME: if this is run right after the previous test we get BUFFER_RECEIVED
772    // need to filter out events at read time
773
774    int streamId = -1;
775    EXPECT_OK(mCamera->createStreamCpu(/*width*/1280, /*height*/960,
776                             TEST_FORMAT_MAIN, TEST_CPU_HEAP_COUNT, &streamId));
777    EXPECT_NE(-1, streamId);
778
779    EXPECT_OK(mCamera->exclusiveTryLock());
780    /*
781    */
782    /* iterate in a loop submitting requests every frame.
783     *  what kind of requests doesnt really matter, just whatever.
784     */
785
786    camera_metadata_t *request = NULL;
787    EXPECT_OK(mCamera->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
788                                            /*out*/&request));
789    EXPECT_NE((void*)NULL, request);
790
791    /*FIXME*/
792    if(request == NULL) request = allocate_camera_metadata(10, 100);
793
794    // set the output streams to just this stream ID
795
796    uint8_t allStreams[] = { streamId };
797    size_t streamCount = 1;
798    camera_metadata_entry_t entry;
799    uint32_t tag = static_cast<uint32_t>(ANDROID_REQUEST_OUTPUT_STREAMS);
800    int find = find_camera_metadata_entry(request, tag, &entry);
801    if (find == -ENOENT) {
802        if (add_camera_metadata_entry(request, tag, &allStreams,
803                                      /*data_count*/streamCount) != OK) {
804            camera_metadata_t *tmp = allocate_camera_metadata(1000, 10000);
805            ASSERT_OK(append_camera_metadata(tmp, request));
806            free_camera_metadata(request);
807            request = tmp;
808
809            ASSERT_OK(add_camera_metadata_entry(request, tag, &allStreams,
810                                                /*data_count*/streamCount));
811        }
812    } else {
813        ASSERT_OK(update_camera_metadata_entry(request, entry.index,
814                               &allStreams, /*data_count*/streamCount, &entry));
815    }
816
817    EXPECT_OK(mCamera->submitRequest(request, /*streaming*/true));
818
819    // Consume a couple of results
820    for (int i = 0; i < TEST_CPU_FRAME_COUNT; ++i) {
821        EXPECT_EQ(OK, mListener->WaitForEvent());
822        EXPECT_EQ(RESULT_RECEIVED, mListener->ReadEvent());
823    }
824
825    // Done: clean up
826    free_camera_metadata(request);
827    EXPECT_OK(mCamera->deleteStream(streamId));
828    EXPECT_OK(mCamera->exclusiveUnlock());
829}
830
831}
832}
833}
834}
835
836