1/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <gtest/gtest.h>
18
19#define LOG_TAG "CameraBurstTest"
20//#define LOG_NDEBUG 0
21#include <utils/Log.h>
22#include <utils/Timers.h>
23
24#include <cmath>
25
26#include "CameraStreamFixture.h"
27#include "TestExtensions.h"
28
29#define CAMERA_FRAME_TIMEOUT    1000000000LL //nsecs (1 secs)
30#define CAMERA_HEAP_COUNT       2 //HALBUG: 1 means registerBuffers fails
31#define CAMERA_BURST_DEBUGGING  0
32#define CAMERA_FRAME_BURST_COUNT 10
33
34/* constants for the exposure test */
35#define CAMERA_EXPOSURE_DOUBLE  2
36#define CAMERA_EXPOSURE_DOUBLING_THRESHOLD 1.0f
37#define CAMERA_EXPOSURE_DOUBLING_COUNT 4
38#define CAMERA_EXPOSURE_FORMAT CAMERA_STREAM_AUTO_CPU_FORMAT
39#define CAMERA_EXPOSURE_STARTING 100000 // 1/10ms, up to 51.2ms with 10 steps
40
41#define USEC 1000LL        // in ns
42#define MSEC 1000000LL     // in ns
43#define SEC  1000000000LL  // in ns
44
45#if CAMERA_BURST_DEBUGGING
46#define dout std::cout
47#else
48#define dout if (0) std::cout
49#endif
50
51#define WARN_UNLESS(condition) (!(condition) ? (std::cerr) : (std::ostream(NULL)) << "Warning: ")
52#define WARN_LE(exp, act) WARN_UNLESS((exp) <= (act))
53#define WARN_LT(exp, act) WARN_UNLESS((exp) < (act))
54#define WARN_GT(exp, act) WARN_UNLESS((exp) > (act))
55
56using namespace android;
57using namespace android::camera2;
58
59namespace android {
60namespace camera2 {
61namespace tests {
62
63static CameraStreamParams STREAM_PARAMETERS = {
64    /*mFormat*/     CAMERA_EXPOSURE_FORMAT,
65    /*mHeapCount*/  CAMERA_HEAP_COUNT
66};
67
68class CameraBurstTest
69    : public ::testing::Test,
70      public CameraStreamFixture {
71
72public:
73    CameraBurstTest() : CameraStreamFixture(STREAM_PARAMETERS) {
74        TEST_EXTENSION_FORKING_CONSTRUCTOR;
75
76        if (HasFatalFailure()) {
77            return;
78        }
79
80        CreateStream();
81    }
82
83    ~CameraBurstTest() {
84        TEST_EXTENSION_FORKING_DESTRUCTOR;
85
86        if (mDevice.get()) {
87            mDevice->waitUntilDrained();
88        }
89        DeleteStream();
90    }
91
92    virtual void SetUp() {
93        TEST_EXTENSION_FORKING_SET_UP;
94    }
95    virtual void TearDown() {
96        TEST_EXTENSION_FORKING_TEAR_DOWN;
97    }
98
99    /* this assumes the format is YUV420sp or flexible YUV */
100    long long TotalBrightness(const CpuConsumer::LockedBuffer& imgBuffer,
101                              int *underexposed,
102                              int *overexposed) const {
103
104        const uint8_t* buf = imgBuffer.data;
105        size_t stride = imgBuffer.stride;
106
107        /* iterate over the Y plane only */
108        long long acc = 0;
109
110        *underexposed = 0;
111        *overexposed = 0;
112
113        for (size_t y = 0; y < imgBuffer.height; ++y) {
114            for (size_t x = 0; x < imgBuffer.width; ++x) {
115                const uint8_t p = buf[y * stride + x];
116
117                if (p == 0) {
118                    if (underexposed) {
119                        ++*underexposed;
120                    }
121                    continue;
122                } else if (p == 255) {
123                    if (overexposed) {
124                        ++*overexposed;
125                    }
126                    continue;
127                }
128
129                acc += p;
130            }
131        }
132
133        return acc;
134    }
135
136    // Parses a comma-separated string list into a Vector
137    template<typename T>
138    void ParseList(const char *src, Vector<T> &list) {
139        std::istringstream s(src);
140        while (!s.eof()) {
141            char c = s.peek();
142            if (c == ',' || c == ' ') {
143                s.ignore(1, EOF);
144                continue;
145            }
146            T val;
147            s >> val;
148            list.push_back(val);
149        }
150    }
151
152};
153
154TEST_F(CameraBurstTest, ManualExposureControl) {
155
156    TEST_EXTENSION_FORKING_INIT;
157
158    // Range of valid exposure times, in nanoseconds
159    int64_t minExp, maxExp;
160    {
161        camera_metadata_ro_entry exposureTimeRange =
162            GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
163
164        ASSERT_EQ(2u, exposureTimeRange.count);
165        minExp = exposureTimeRange.data.i64[0];
166        maxExp = exposureTimeRange.data.i64[1];
167    }
168
169    dout << "Min exposure is " << minExp;
170    dout << " max exposure is " << maxExp << std::endl;
171
172    // Calculate some set of valid exposure times for each request
173    int64_t exposures[CAMERA_FRAME_BURST_COUNT];
174    exposures[0] = CAMERA_EXPOSURE_STARTING;
175    for (int i = 1; i < CAMERA_FRAME_BURST_COUNT; ++i) {
176        exposures[i] = exposures[i-1] * CAMERA_EXPOSURE_DOUBLE;
177    }
178    // Our calculated exposure times should be in [minExp, maxExp]
179    EXPECT_LE(minExp, exposures[0])
180        << "Minimum exposure range is too high, wanted at most "
181        << exposures[0] << "ns";
182    EXPECT_GE(maxExp, exposures[CAMERA_FRAME_BURST_COUNT-1])
183        << "Maximum exposure range is too low, wanted at least "
184        << exposures[CAMERA_FRAME_BURST_COUNT-1] << "ns";
185
186    // Create a preview request, turning off all 3A
187    CameraMetadata previewRequest;
188    ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
189                                                &previewRequest));
190    {
191        Vector<int32_t> outputStreamIds;
192        outputStreamIds.push(mStreamId);
193        ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
194                                            outputStreamIds));
195
196        // Disable all 3A routines
197        uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
198        ASSERT_EQ(OK, previewRequest.update(ANDROID_CONTROL_MODE,
199                                            &cmOff, 1));
200
201        int requestId = 1;
202        ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID,
203                                            &requestId, 1));
204
205        if (CAMERA_BURST_DEBUGGING) {
206            int frameCount = 0;
207            ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_FRAME_COUNT,
208                                                &frameCount, 1));
209        }
210    }
211
212    if (CAMERA_BURST_DEBUGGING) {
213        previewRequest.dump(STDOUT_FILENO);
214    }
215
216    // Submit capture requests
217    for (int i = 0; i < CAMERA_FRAME_BURST_COUNT; ++i) {
218        CameraMetadata tmpRequest = previewRequest;
219        ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_EXPOSURE_TIME,
220                                        &exposures[i], 1));
221        ALOGV("Submitting capture request %d with exposure %lld", i,
222            exposures[i]);
223        dout << "Capture request " << i << " exposure is "
224             << (exposures[i]/1e6f) << std::endl;
225        ASSERT_EQ(OK, mDevice->capture(tmpRequest));
226    }
227
228    dout << "Buffer dimensions " << mWidth << "x" << mHeight << std::endl;
229
230    float brightnesses[CAMERA_FRAME_BURST_COUNT];
231    // Get each frame (metadata) and then the buffer. Calculate brightness.
232    for (int i = 0; i < CAMERA_FRAME_BURST_COUNT; ++i) {
233        ALOGV("Reading capture request %d with exposure %lld", i, exposures[i]);
234        ASSERT_EQ(OK, mDevice->waitForNextFrame(CAMERA_FRAME_TIMEOUT));
235        ALOGV("Reading capture request-1 %d", i);
236        CameraMetadata frameMetadata;
237        ASSERT_EQ(OK, mDevice->getNextFrame(&frameMetadata));
238        ALOGV("Reading capture request-2 %d", i);
239
240        ASSERT_EQ(OK, mFrameListener->waitForFrame(CAMERA_FRAME_TIMEOUT));
241        ALOGV("We got the frame now");
242
243        CpuConsumer::LockedBuffer imgBuffer;
244        ASSERT_EQ(OK, mCpuConsumer->lockNextBuffer(&imgBuffer));
245
246        int underexposed, overexposed;
247        long long brightness = TotalBrightness(imgBuffer, &underexposed,
248                                               &overexposed);
249        float avgBrightness = brightness * 1.0f /
250                              (mWidth * mHeight - (underexposed + overexposed));
251        ALOGV("Total brightness for frame %d was %lld (underexposed %d, "
252              "overexposed %d), avg %f", i, brightness, underexposed,
253              overexposed, avgBrightness);
254        dout << "Average brightness (frame " << i << ") was " << avgBrightness
255             << " (underexposed " << underexposed << ", overexposed "
256             << overexposed << ")" << std::endl;
257
258        ASSERT_EQ(OK, mCpuConsumer->unlockBuffer(imgBuffer));
259
260        brightnesses[i] = avgBrightness;
261    }
262
263    // Calculate max consecutive frame exposure doubling
264    float prev = brightnesses[0];
265    int doubling_count = 1;
266    int max_doubling_count = 0;
267    for (int i = 1; i < CAMERA_FRAME_BURST_COUNT; ++i) {
268        if (fabs(brightnesses[i] - prev*CAMERA_EXPOSURE_DOUBLE)
269            <= CAMERA_EXPOSURE_DOUBLING_THRESHOLD) {
270            doubling_count++;
271        }
272        else {
273            max_doubling_count = std::max(max_doubling_count, doubling_count);
274            doubling_count = 1;
275        }
276        prev = brightnesses[i];
277    }
278
279    dout << "max doubling count: " << max_doubling_count << std::endl;
280
281    /**
282     * Make this check warning only, since the brightness calculation is not reliable
283     * and we have separate test to cover this case. Plus it is pretty subtle to make
284     * it right without complicating the test too much.
285     */
286    WARN_LE(CAMERA_EXPOSURE_DOUBLING_COUNT, max_doubling_count)
287            << "average brightness should double at least "
288            << CAMERA_EXPOSURE_DOUBLING_COUNT
289            << " times over each consecutive frame as the exposure is doubled"
290            << std::endl;
291}
292
293/**
294 * This test varies exposure time, frame duration, and sensitivity for a
295 * burst of captures. It picks values by default, but the selection can be
296 * overridden with the environment variables
297 *   CAMERA2_TEST_VARIABLE_BURST_EXPOSURE_TIMES
298 *   CAMERA2_TEST_VARIABLE_BURST_FRAME_DURATIONS
299 *   CAMERA2_TEST_VARIABLE_BURST_SENSITIVITIES
300 * which must all be a list of comma-separated values, and each list must be
301 * the same length.  In addition, if the environment variable
302 *   CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES
303 * is set to 1, then the YUV buffers are dumped into files named
304 *   "camera2_test_variable_burst_frame_NNN.yuv"
305 *
306 * For example:
307 *   $ setenv CAMERA2_TEST_VARIABLE_BURST_EXPOSURE_TIMES 10000000,20000000
308 *   $ setenv CAMERA2_TEST_VARIABLE_BURST_FRAME_DURATIONS 40000000,40000000
309 *   $ setenv CAMERA2_TEST_VARIABLE_BURST_SENSITIVITIES 200,100
310 *   $ setenv CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES 1
311 *   $ /data/nativetest/camera2_test/camera2_test --gtest_filter="*VariableBurst"
312 */
313TEST_F(CameraBurstTest, VariableBurst) {
314
315    TEST_EXTENSION_FORKING_INIT;
316
317    // Bounds for checking frame duration is within range
318    const nsecs_t DURATION_UPPER_BOUND = 10 * MSEC;
319    const nsecs_t DURATION_LOWER_BOUND = 20 * MSEC;
320
321    // Threshold for considering two captures to have equivalent exposure value,
322    // as a ratio of the smaller EV to the larger EV.
323    const float   EV_MATCH_BOUND = 0.95;
324    // Bound for two captures with equivalent exp values to have the same
325    // measured brightness, in 0-255 luminance.
326    const float   BRIGHTNESS_MATCH_BOUND = 5;
327
328    // Environment variables to look for to override test settings
329    const char *expEnv         = "CAMERA2_TEST_VARIABLE_BURST_EXPOSURE_TIMES";
330    const char *durationEnv    = "CAMERA2_TEST_VARIABLE_BURST_FRAME_DURATIONS";
331    const char *sensitivityEnv = "CAMERA2_TEST_VARIABLE_BURST_SENSITIVITIES";
332    const char *dumpFrameEnv   = "CAMERA2_TEST_VARIABLE_BURST_DUMP_FRAMES";
333
334    // Range of valid exposure times, in nanoseconds
335    int64_t minExp = 0, maxExp = 0;
336    // List of valid sensor sensitivities
337    Vector<int32_t> sensitivities;
338    // Range of valid frame durations, in nanoseconds
339    int64_t minDuration = 0, maxDuration = 0;
340
341    {
342        camera_metadata_ro_entry exposureTimeRange =
343            GetStaticEntry(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE);
344
345        EXPECT_EQ(2u, exposureTimeRange.count) << "Bad exposure time range tag."
346                "Using default values";
347        if (exposureTimeRange.count == 2) {
348            minExp = exposureTimeRange.data.i64[0];
349            maxExp = exposureTimeRange.data.i64[1];
350        }
351
352        EXPECT_LT(0, minExp) << "Minimum exposure time is 0";
353        EXPECT_LT(0, maxExp) << "Maximum exposure time is 0";
354        EXPECT_LE(minExp, maxExp) << "Minimum exposure is greater than maximum";
355
356        if (minExp == 0) {
357            minExp = 1 * MSEC; // Fallback minimum exposure time
358        }
359
360        if (maxExp == 0) {
361            maxExp = 10 * SEC; // Fallback maximum exposure time
362        }
363    }
364
365    camera_metadata_ro_entry hardwareLevel =
366        GetStaticEntry(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL);
367    ASSERT_EQ(1u, hardwareLevel.count);
368    uint8_t level = hardwareLevel.data.u8[0];
369    ASSERT_GE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED);
370    ASSERT_LE(level, ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
371    if (level == ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED) {
372        const ::testing::TestInfo* const test_info =
373            ::testing::UnitTest::GetInstance()->current_test_info();
374        std::cerr << "Skipping test "
375                  << test_info->test_case_name() << "."
376                  << test_info->name()
377                  << " because HAL hardware supported level is limited "
378                  << std::endl;
379        return;
380    }
381
382    dout << "Stream size is " << mWidth << " x " << mHeight << std::endl;
383    dout << "Valid exposure range is: " <<
384            minExp << " - " << maxExp << " ns " << std::endl;
385
386    {
387        camera_metadata_ro_entry sensivityRange =
388            GetStaticEntry(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE);
389        EXPECT_EQ(2u, sensivityRange.count) << "No sensitivity range listed."
390                "Falling back to default set.";
391        int32_t minSensitivity = 100;
392        int32_t maxSensitivity = 800;
393        if (sensivityRange.count == 2) {
394            ASSERT_GT(sensivityRange.data.i32[0], 0);
395            ASSERT_GT(sensivityRange.data.i32[1], 0);
396            minSensitivity = sensivityRange.data.i32[0];
397            maxSensitivity = sensivityRange.data.i32[1];
398        }
399        int32_t count = (maxSensitivity - minSensitivity + 99) / 100;
400        sensitivities.push_back(minSensitivity);
401        for (int i = 1; i < count; i++) {
402            sensitivities.push_back(minSensitivity + i * 100);
403        }
404        sensitivities.push_back(maxSensitivity);
405    }
406
407    dout << "Available sensitivities: ";
408    for (size_t i = 0; i < sensitivities.size(); i++) {
409        dout << sensitivities[i] << " ";
410    }
411    dout << std::endl;
412
413    {
414        camera_metadata_ro_entry availableProcessedSizes =
415                GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES);
416
417        camera_metadata_ro_entry availableProcessedMinFrameDurations =
418                GetStaticEntry(ANDROID_SCALER_AVAILABLE_PROCESSED_MIN_DURATIONS);
419
420        EXPECT_EQ(availableProcessedSizes.count,
421                availableProcessedMinFrameDurations.count * 2) <<
422                "The number of minimum frame durations doesn't match the number of "
423                "available sizes. Using fallback values";
424
425        if (availableProcessedSizes.count ==
426                availableProcessedMinFrameDurations.count * 2) {
427            bool gotSize = false;
428            for (size_t i = 0; i < availableProcessedSizes.count; i += 2) {
429                if (availableProcessedSizes.data.i32[i] == mWidth &&
430                        availableProcessedSizes.data.i32[i+1] == mHeight) {
431                    gotSize = true;
432                    minDuration = availableProcessedMinFrameDurations.data.i64[i/2];
433                }
434            }
435            EXPECT_TRUE(gotSize) << "Can't find stream size in list of "
436                    "available sizes: " << mWidth << ", " << mHeight;
437        }
438        if (minDuration == 0) {
439            minDuration = 1 * SEC / 30; // Fall back to 30 fps as minimum duration
440        }
441
442        ASSERT_LT(0, minDuration);
443
444        camera_metadata_ro_entry maxFrameDuration =
445                GetStaticEntry(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION);
446
447        EXPECT_EQ(1u, maxFrameDuration.count) << "No valid maximum frame duration";
448
449        if (maxFrameDuration.count == 1) {
450            maxDuration = maxFrameDuration.data.i64[0];
451        }
452
453        EXPECT_GT(maxDuration, 0) << "Max duration is 0 or not given, using fallback";
454
455        if (maxDuration == 0) {
456            maxDuration = 10 * SEC; // Fall back to 10 seconds as max duration
457        }
458
459    }
460    dout << "Available frame duration range for configured stream size: "
461         << minDuration << " - " << maxDuration << " ns" << std::endl;
462
463    // Get environment variables if set
464    const char *expVal = getenv(expEnv);
465    const char *durationVal = getenv(durationEnv);
466    const char *sensitivityVal = getenv(sensitivityEnv);
467
468    bool gotExp = (expVal != NULL);
469    bool gotDuration = (durationVal != NULL);
470    bool gotSensitivity = (sensitivityVal != NULL);
471
472    // All or none must be provided if using override envs
473    ASSERT_TRUE( (gotDuration && gotExp && gotSensitivity) ||
474            (!gotDuration && !gotExp && !gotSensitivity) ) <<
475            "Incomplete set of environment variable overrides provided";
476
477    Vector<int64_t> expList, durationList;
478    Vector<int32_t> sensitivityList;
479    if (gotExp) {
480        ParseList(expVal, expList);
481        ParseList(durationVal, durationList);
482        ParseList(sensitivityVal, sensitivityList);
483
484        ASSERT_TRUE(
485            (expList.size() == durationList.size()) &&
486            (durationList.size() == sensitivityList.size())) <<
487                "Mismatched sizes in env lists, or parse error";
488
489        dout << "Using burst list from environment with " << expList.size() <<
490                " captures" << std::endl;
491    } else {
492        // Create a default set of controls based on the available ranges
493
494        int64_t e;
495        int64_t d;
496        int32_t s;
497
498        // Exposure ramp
499
500        e = minExp;
501        d = minDuration;
502        s = sensitivities[0];
503        while (e < maxExp) {
504            expList.push_back(e);
505            durationList.push_back(d);
506            sensitivityList.push_back(s);
507            e = e * 2;
508        }
509        e = maxExp;
510        expList.push_back(e);
511        durationList.push_back(d);
512        sensitivityList.push_back(s);
513
514        // Duration ramp
515
516        e = 30 * MSEC;
517        d = minDuration;
518        s = sensitivities[0];
519        while (d < maxDuration) {
520            // make sure exposure <= frame duration
521            expList.push_back(e > d ? d : e);
522            durationList.push_back(d);
523            sensitivityList.push_back(s);
524            d = d * 2;
525        }
526
527        // Sensitivity ramp
528
529        e = 30 * MSEC;
530        d = 30 * MSEC;
531        d = d > minDuration ? d : minDuration;
532        for (size_t i = 0; i < sensitivities.size(); i++) {
533            expList.push_back(e);
534            durationList.push_back(d);
535            sensitivityList.push_back(sensitivities[i]);
536        }
537
538        // Constant-EV ramp, duration == exposure
539
540        e = 30 * MSEC; // at ISO 100
541        for (size_t i = 0; i < sensitivities.size(); i++) {
542            int64_t e_adj = e * 100 / sensitivities[i];
543            expList.push_back(e_adj);
544            durationList.push_back(e_adj > minDuration ? e_adj : minDuration);
545            sensitivityList.push_back(sensitivities[i]);
546        }
547
548        dout << "Default burst sequence created with " << expList.size() <<
549                " entries" << std::endl;
550    }
551
552    // Validate the list, but warn only
553    for (size_t i = 0; i < expList.size(); i++) {
554        EXPECT_GE(maxExp, expList[i])
555                << "Capture " << i << " exposure too long: " << expList[i];
556        EXPECT_LE(minExp, expList[i])
557                << "Capture " << i << " exposure too short: " << expList[i];
558        EXPECT_GE(maxDuration, durationList[i])
559                << "Capture " << i << " duration too long: " << durationList[i];
560        EXPECT_LE(minDuration, durationList[i])
561                 << "Capture " << i << " duration too short: "  << durationList[i];
562        bool validSensitivity = false;
563        for (size_t j = 0; j < sensitivities.size(); j++) {
564            if (sensitivityList[i] == sensitivities[j]) {
565                validSensitivity = true;
566                break;
567            }
568        }
569        EXPECT_TRUE(validSensitivity)
570                << "Capture " << i << " sensitivity not in list: " << sensitivityList[i];
571    }
572
573    // Check if debug yuv dumps are requested
574
575    bool dumpFrames = false;
576    {
577        const char *frameDumpVal = getenv(dumpFrameEnv);
578        if (frameDumpVal != NULL) {
579            if (frameDumpVal[0] == '1') dumpFrames = true;
580        }
581    }
582
583    dout << "Dumping YUV frames " <<
584            (dumpFrames ? "enabled, not checking timing" : "disabled") << std::endl;
585
586    // Create a base preview request, turning off all 3A
587    CameraMetadata previewRequest;
588    ASSERT_EQ(OK, mDevice->createDefaultRequest(CAMERA2_TEMPLATE_PREVIEW,
589                                                &previewRequest));
590    {
591        Vector<int32_t> outputStreamIds;
592        outputStreamIds.push(mStreamId);
593        ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_OUTPUT_STREAMS,
594                                            outputStreamIds));
595
596        // Disable all 3A routines
597        uint8_t cmOff = static_cast<uint8_t>(ANDROID_CONTROL_MODE_OFF);
598        ASSERT_EQ(OK, previewRequest.update(ANDROID_CONTROL_MODE,
599                                            &cmOff, 1));
600
601        int requestId = 1;
602        ASSERT_EQ(OK, previewRequest.update(ANDROID_REQUEST_ID,
603                                            &requestId, 1));
604    }
605
606    // Submit capture requests
607
608    for (size_t i = 0; i < expList.size(); ++i) {
609        CameraMetadata tmpRequest = previewRequest;
610        ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_EXPOSURE_TIME,
611                                        &expList[i], 1));
612        ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_FRAME_DURATION,
613                                        &durationList[i], 1));
614        ASSERT_EQ(OK, tmpRequest.update(ANDROID_SENSOR_SENSITIVITY,
615                                        &sensitivityList[i], 1));
616        ALOGV("Submitting capture %d with exposure %lld, frame duration %lld, sensitivity %d",
617                i, expList[i], durationList[i], sensitivityList[i]);
618        dout << "Capture request " << i <<
619                ": exposure is " << (expList[i]/1e6f) << " ms" <<
620                ", frame duration is " << (durationList[i]/1e6f) << " ms" <<
621                ", sensitivity is " << sensitivityList[i] <<
622                std::endl;
623        ASSERT_EQ(OK, mDevice->capture(tmpRequest));
624    }
625
626    Vector<float> brightnesses;
627    Vector<nsecs_t> captureTimes;
628    brightnesses.setCapacity(expList.size());
629    captureTimes.setCapacity(expList.size());
630
631    // Get each frame (metadata) and then the buffer. Calculate brightness.
632    for (size_t i = 0; i < expList.size(); ++i) {
633
634        ALOGV("Reading request %d", i);
635        dout << "Waiting for capture " << i << ": " <<
636                " exposure " << (expList[i]/1e6f) << " ms," <<
637                " frame duration " << (durationList[i]/1e6f) << " ms," <<
638                " sensitivity " << sensitivityList[i] <<
639                std::endl;
640
641        // Set wait limit based on expected frame duration, or minimum timeout
642        int64_t waitLimit = CAMERA_FRAME_TIMEOUT;
643        if (expList[i] * 2 > waitLimit) waitLimit = expList[i] * 2;
644        if (durationList[i] * 2 > waitLimit) waitLimit = durationList[i] * 2;
645
646        ASSERT_EQ(OK, mDevice->waitForNextFrame(waitLimit));
647        ALOGV("Reading capture request-1 %d", i);
648        CameraMetadata frameMetadata;
649        ASSERT_EQ(OK, mDevice->getNextFrame(&frameMetadata));
650        ALOGV("Reading capture request-2 %d", i);
651
652        ASSERT_EQ(OK, mFrameListener->waitForFrame(CAMERA_FRAME_TIMEOUT));
653        ALOGV("We got the frame now");
654
655        captureTimes.push_back(systemTime());
656
657        CpuConsumer::LockedBuffer imgBuffer;
658        ASSERT_EQ(OK, mCpuConsumer->lockNextBuffer(&imgBuffer));
659
660        int underexposed, overexposed;
661        float avgBrightness = 0;
662        long long brightness = TotalBrightness(imgBuffer, &underexposed,
663                                               &overexposed);
664        int numValidPixels = mWidth * mHeight - (underexposed + overexposed);
665        if (numValidPixels != 0) {
666            avgBrightness = brightness * 1.0f / numValidPixels;
667        } else if (underexposed < overexposed) {
668            avgBrightness = 255;
669        }
670
671        ALOGV("Total brightness for frame %d was %lld (underexposed %d, "
672              "overexposed %d), avg %f", i, brightness, underexposed,
673              overexposed, avgBrightness);
674        dout << "Average brightness (frame " << i << ") was " << avgBrightness
675             << " (underexposed " << underexposed << ", overexposed "
676             << overexposed << ")" << std::endl;
677        brightnesses.push_back(avgBrightness);
678
679        if (i != 0) {
680            float prevEv = static_cast<float>(expList[i - 1]) * sensitivityList[i - 1];
681            float currentEv = static_cast<float>(expList[i]) * sensitivityList[i];
682            float evRatio = (prevEv > currentEv) ? (currentEv / prevEv) :
683                    (prevEv / currentEv);
684            if ( evRatio > EV_MATCH_BOUND ) {
685                WARN_LT(fabs(brightnesses[i] - brightnesses[i - 1]),
686                        BRIGHTNESS_MATCH_BOUND) <<
687                        "Capture brightness different from previous, even though "
688                        "they have the same EV value. Ev now: " << currentEv <<
689                        ", previous: " << prevEv << ". Brightness now: " <<
690                        brightnesses[i] << ", previous: " << brightnesses[i-1] <<
691                        std::endl;
692            }
693            // Only check timing if not saving to disk, since that slows things
694            // down substantially
695            if (!dumpFrames) {
696                nsecs_t timeDelta = captureTimes[i] - captureTimes[i-1];
697                nsecs_t expectedDelta = expList[i] > durationList[i] ?
698                        expList[i] : durationList[i];
699                WARN_LT(timeDelta, expectedDelta + DURATION_UPPER_BOUND) <<
700                        "Capture took " << timeDelta << " ns to receive, but expected"
701                        " frame duration was " << expectedDelta << " ns." <<
702                        std::endl;
703                WARN_GT(timeDelta, expectedDelta - DURATION_LOWER_BOUND) <<
704                        "Capture took " << timeDelta << " ns to receive, but expected"
705                        " frame duration was " << expectedDelta << " ns." <<
706                        std::endl;
707                dout << "Time delta from previous frame: " << timeDelta / 1e6 <<
708                        " ms.  Expected " << expectedDelta / 1e6 << " ms" << std::endl;
709            }
710        }
711
712        if (dumpFrames) {
713            String8 dumpName =
714                    String8::format("/data/local/tmp/camera2_test_variable_burst_frame_%03d.yuv", i);
715            dout << "  Writing YUV dump to " << dumpName << std::endl;
716            DumpYuvToFile(dumpName, imgBuffer);
717        }
718
719        ASSERT_EQ(OK, mCpuConsumer->unlockBuffer(imgBuffer));
720    }
721
722}
723
724}
725}
726}
727